Moving src/webrtc into src/.
In order to eliminate the WebRTC Subtree mirror in Chromium, WebRTC is moving the content of the src/webrtc directory up to the src/ directory. NOPRESUBMIT=true NOTREECHECKS=true NOTRY=true TBR=tommi@webrtc.org Bug: chromium:611808 Change-Id: Iac59c5b51b950f174119565bac87955a7994bc38 Reviewed-on: https://webrtc-review.googlesource.com/1560 Commit-Queue: Mirko Bonadei <mbonadei@webrtc.org> Reviewed-by: Henrik Kjellander <kjellander@webrtc.org> Cr-Commit-Position: refs/heads/master@{#19845}
This commit is contained in:
committed by
Commit Bot
parent
6674846b4a
commit
bb547203bf
@ -0,0 +1,172 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "WebRTC/RTCAudioSession.h"
|
||||
#import "WebRTC/RTCAudioSessionConfiguration.h"
|
||||
|
||||
#import "WebRTC/RTCLogging.h"
|
||||
#import "RTCAudioSession+Private.h"
|
||||
|
||||
|
||||
@implementation RTCAudioSession (Configuration)
|
||||
|
||||
- (BOOL)setConfiguration:(RTCAudioSessionConfiguration *)configuration
|
||||
error:(NSError **)outError {
|
||||
return [self setConfiguration:configuration
|
||||
active:NO
|
||||
shouldSetActive:NO
|
||||
error:outError];
|
||||
}
|
||||
|
||||
- (BOOL)setConfiguration:(RTCAudioSessionConfiguration *)configuration
|
||||
active:(BOOL)active
|
||||
error:(NSError **)outError {
|
||||
return [self setConfiguration:configuration
|
||||
active:active
|
||||
shouldSetActive:YES
|
||||
error:outError];
|
||||
}
|
||||
|
||||
#pragma mark - Private
|
||||
|
||||
- (BOOL)setConfiguration:(RTCAudioSessionConfiguration *)configuration
|
||||
active:(BOOL)active
|
||||
shouldSetActive:(BOOL)shouldSetActive
|
||||
error:(NSError **)outError {
|
||||
NSParameterAssert(configuration);
|
||||
if (outError) {
|
||||
*outError = nil;
|
||||
}
|
||||
if (![self checkLock:outError]) {
|
||||
return NO;
|
||||
}
|
||||
|
||||
// Provide an error even if there isn't one so we can log it. We will not
|
||||
// return immediately on error in this function and instead try to set
|
||||
// everything we can.
|
||||
NSError *error = nil;
|
||||
|
||||
if (self.category != configuration.category ||
|
||||
self.categoryOptions != configuration.categoryOptions) {
|
||||
NSError *categoryError = nil;
|
||||
if (![self setCategory:configuration.category
|
||||
withOptions:configuration.categoryOptions
|
||||
error:&categoryError]) {
|
||||
RTCLogError(@"Failed to set category: %@",
|
||||
categoryError.localizedDescription);
|
||||
error = categoryError;
|
||||
} else {
|
||||
RTCLog(@"Set category to: %@", configuration.category);
|
||||
}
|
||||
}
|
||||
|
||||
if (self.mode != configuration.mode) {
|
||||
NSError *modeError = nil;
|
||||
if (![self setMode:configuration.mode error:&modeError]) {
|
||||
RTCLogError(@"Failed to set mode: %@",
|
||||
modeError.localizedDescription);
|
||||
error = modeError;
|
||||
} else {
|
||||
RTCLog(@"Set mode to: %@", configuration.mode);
|
||||
}
|
||||
}
|
||||
|
||||
// Sometimes category options don't stick after setting mode.
|
||||
if (self.categoryOptions != configuration.categoryOptions) {
|
||||
NSError *categoryError = nil;
|
||||
if (![self setCategory:configuration.category
|
||||
withOptions:configuration.categoryOptions
|
||||
error:&categoryError]) {
|
||||
RTCLogError(@"Failed to set category options: %@",
|
||||
categoryError.localizedDescription);
|
||||
error = categoryError;
|
||||
} else {
|
||||
RTCLog(@"Set category options to: %ld",
|
||||
(long)configuration.categoryOptions);
|
||||
}
|
||||
}
|
||||
|
||||
if (self.preferredSampleRate != configuration.sampleRate) {
|
||||
NSError *sampleRateError = nil;
|
||||
if (![self setPreferredSampleRate:configuration.sampleRate
|
||||
error:&sampleRateError]) {
|
||||
RTCLogError(@"Failed to set preferred sample rate: %@",
|
||||
sampleRateError.localizedDescription);
|
||||
error = sampleRateError;
|
||||
} else {
|
||||
RTCLog(@"Set preferred sample rate to: %.2f",
|
||||
configuration.sampleRate);
|
||||
}
|
||||
}
|
||||
|
||||
if (self.preferredIOBufferDuration != configuration.ioBufferDuration) {
|
||||
NSError *bufferDurationError = nil;
|
||||
if (![self setPreferredIOBufferDuration:configuration.ioBufferDuration
|
||||
error:&bufferDurationError]) {
|
||||
RTCLogError(@"Failed to set preferred IO buffer duration: %@",
|
||||
bufferDurationError.localizedDescription);
|
||||
error = bufferDurationError;
|
||||
} else {
|
||||
RTCLog(@"Set preferred IO buffer duration to: %f",
|
||||
configuration.ioBufferDuration);
|
||||
}
|
||||
}
|
||||
|
||||
if (shouldSetActive) {
|
||||
NSError *activeError = nil;
|
||||
if (![self setActive:active error:&activeError]) {
|
||||
RTCLogError(@"Failed to setActive to %d: %@",
|
||||
active, activeError.localizedDescription);
|
||||
error = activeError;
|
||||
}
|
||||
}
|
||||
|
||||
if (self.isActive &&
|
||||
// TODO(tkchin): Figure out which category/mode numChannels is valid for.
|
||||
[self.mode isEqualToString:AVAudioSessionModeVoiceChat]) {
|
||||
// Try to set the preferred number of hardware audio channels. These calls
|
||||
// must be done after setting the audio session’s category and mode and
|
||||
// activating the session.
|
||||
NSInteger inputNumberOfChannels = configuration.inputNumberOfChannels;
|
||||
if (self.inputNumberOfChannels != inputNumberOfChannels) {
|
||||
NSError *inputChannelsError = nil;
|
||||
if (![self setPreferredInputNumberOfChannels:inputNumberOfChannels
|
||||
error:&inputChannelsError]) {
|
||||
RTCLogError(@"Failed to set preferred input number of channels: %@",
|
||||
inputChannelsError.localizedDescription);
|
||||
error = inputChannelsError;
|
||||
} else {
|
||||
RTCLog(@"Set input number of channels to: %ld",
|
||||
(long)inputNumberOfChannels);
|
||||
}
|
||||
}
|
||||
NSInteger outputNumberOfChannels = configuration.outputNumberOfChannels;
|
||||
if (self.outputNumberOfChannels != outputNumberOfChannels) {
|
||||
NSError *outputChannelsError = nil;
|
||||
if (![self setPreferredOutputNumberOfChannels:outputNumberOfChannels
|
||||
error:&outputChannelsError]) {
|
||||
RTCLogError(@"Failed to set preferred output number of channels: %@",
|
||||
outputChannelsError.localizedDescription);
|
||||
error = outputChannelsError;
|
||||
} else {
|
||||
RTCLog(@"Set output number of channels to: %ld",
|
||||
(long)outputNumberOfChannels);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (outError) {
|
||||
*outError = error;
|
||||
}
|
||||
|
||||
return error == nil;
|
||||
}
|
||||
|
||||
@end
|
||||
97
sdk/objc/Framework/Classes/Audio/RTCAudioSession+Private.h
Normal file
97
sdk/objc/Framework/Classes/Audio/RTCAudioSession+Private.h
Normal file
@ -0,0 +1,97 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "webrtc/sdk/objc/Framework/Headers/WebRTC/RTCAudioSession.h"
|
||||
|
||||
#include <vector>
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
@class RTCAudioSessionConfiguration;
|
||||
|
||||
@interface RTCAudioSession ()
|
||||
|
||||
/** Number of times setActive:YES has succeeded without a balanced call to
|
||||
* setActive:NO.
|
||||
*/
|
||||
@property(nonatomic, readonly) int activationCount;
|
||||
|
||||
/** The number of times |beginWebRTCSession| was called without a balanced call
|
||||
* to |endWebRTCSession|.
|
||||
*/
|
||||
@property(nonatomic, readonly) int webRTCSessionCount;
|
||||
|
||||
/** Convenience BOOL that checks useManualAudio and isAudioEnebled. */
|
||||
@property(readonly) BOOL canPlayOrRecord;
|
||||
|
||||
/** Tracks whether we have been sent an interruption event that hasn't been matched by either an
|
||||
* interrupted end event or a foreground event.
|
||||
*/
|
||||
@property(nonatomic, assign) BOOL isInterrupted;
|
||||
|
||||
- (BOOL)checkLock:(NSError **)outError;
|
||||
|
||||
/** Adds the delegate to the list of delegates, and places it at the front of
|
||||
* the list. This delegate will be notified before other delegates of
|
||||
* audio events.
|
||||
*/
|
||||
- (void)pushDelegate:(id<RTCAudioSessionDelegate>)delegate;
|
||||
|
||||
/** Signals RTCAudioSession that a WebRTC session is about to begin and
|
||||
* audio configuration is needed. Will configure the audio session for WebRTC
|
||||
* if not already configured and if configuration is not delayed.
|
||||
* Successful calls must be balanced by a call to endWebRTCSession.
|
||||
*/
|
||||
- (BOOL)beginWebRTCSession:(NSError **)outError;
|
||||
|
||||
/** Signals RTCAudioSession that a WebRTC session is about to end and audio
|
||||
* unconfiguration is needed. Will unconfigure the audio session for WebRTC
|
||||
* if this is the last unmatched call and if configuration is not delayed.
|
||||
*/
|
||||
- (BOOL)endWebRTCSession:(NSError **)outError;
|
||||
|
||||
/** Configure the audio session for WebRTC. This call will fail if the session
|
||||
* is already configured. On other failures, we will attempt to restore the
|
||||
* previously used audio session configuration.
|
||||
* |lockForConfiguration| must be called first.
|
||||
* Successful calls to configureWebRTCSession must be matched by calls to
|
||||
* |unconfigureWebRTCSession|.
|
||||
*/
|
||||
- (BOOL)configureWebRTCSession:(NSError **)outError;
|
||||
|
||||
/** Unconfigures the session for WebRTC. This will attempt to restore the
|
||||
* audio session to the settings used before |configureWebRTCSession| was
|
||||
* called.
|
||||
* |lockForConfiguration| must be called first.
|
||||
*/
|
||||
- (BOOL)unconfigureWebRTCSession:(NSError **)outError;
|
||||
|
||||
/** Returns a configuration error with the given description. */
|
||||
- (NSError *)configurationErrorWithDescription:(NSString *)description;
|
||||
|
||||
// Properties and methods for tests.
|
||||
@property(nonatomic, readonly)
|
||||
std::vector<__weak id<RTCAudioSessionDelegate> > delegates;
|
||||
|
||||
- (void)notifyDidBeginInterruption;
|
||||
- (void)notifyDidEndInterruptionWithShouldResumeSession:
|
||||
(BOOL)shouldResumeSession;
|
||||
- (void)notifyDidChangeRouteWithReason:(AVAudioSessionRouteChangeReason)reason
|
||||
previousRoute:(AVAudioSessionRouteDescription *)previousRoute;
|
||||
- (void)notifyMediaServicesWereLost;
|
||||
- (void)notifyMediaServicesWereReset;
|
||||
- (void)notifyDidChangeCanPlayOrRecord:(BOOL)canPlayOrRecord;
|
||||
- (void)notifyDidStartPlayOrRecord;
|
||||
- (void)notifyDidStopPlayOrRecord;
|
||||
- (void)notifyDidDetectPlayoutGlitch:(int64_t)totalNumberOfGlitches;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
||||
925
sdk/objc/Framework/Classes/Audio/RTCAudioSession.mm
Normal file
925
sdk/objc/Framework/Classes/Audio/RTCAudioSession.mm
Normal file
@ -0,0 +1,925 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "WebRTC/RTCAudioSession.h"
|
||||
|
||||
#import <UIKit/UIKit.h>
|
||||
|
||||
#include "webrtc/rtc_base/atomicops.h"
|
||||
#include "webrtc/rtc_base/checks.h"
|
||||
#include "webrtc/rtc_base/criticalsection.h"
|
||||
|
||||
#import "WebRTC/RTCAudioSessionConfiguration.h"
|
||||
#import "WebRTC/RTCLogging.h"
|
||||
|
||||
#import "RTCAudioSession+Private.h"
|
||||
|
||||
|
||||
NSString * const kRTCAudioSessionErrorDomain = @"org.webrtc.RTCAudioSession";
|
||||
NSInteger const kRTCAudioSessionErrorLockRequired = -1;
|
||||
NSInteger const kRTCAudioSessionErrorConfiguration = -2;
|
||||
NSString * const kRTCAudioSessionOutputVolumeSelector = @"outputVolume";
|
||||
|
||||
// This class needs to be thread-safe because it is accessed from many threads.
|
||||
// TODO(tkchin): Consider more granular locking. We're not expecting a lot of
|
||||
// lock contention so coarse locks should be fine for now.
|
||||
@implementation RTCAudioSession {
|
||||
rtc::CriticalSection _crit;
|
||||
AVAudioSession *_session;
|
||||
volatile int _activationCount;
|
||||
volatile int _lockRecursionCount;
|
||||
volatile int _webRTCSessionCount;
|
||||
BOOL _isActive;
|
||||
BOOL _useManualAudio;
|
||||
BOOL _isAudioEnabled;
|
||||
BOOL _canPlayOrRecord;
|
||||
BOOL _isInterrupted;
|
||||
}
|
||||
|
||||
@synthesize session = _session;
|
||||
@synthesize delegates = _delegates;
|
||||
|
||||
+ (instancetype)sharedInstance {
|
||||
static dispatch_once_t onceToken;
|
||||
static RTCAudioSession *sharedInstance = nil;
|
||||
dispatch_once(&onceToken, ^{
|
||||
sharedInstance = [[self alloc] init];
|
||||
});
|
||||
return sharedInstance;
|
||||
}
|
||||
|
||||
- (instancetype)init {
|
||||
if (self = [super init]) {
|
||||
_session = [AVAudioSession sharedInstance];
|
||||
|
||||
NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
|
||||
[center addObserver:self
|
||||
selector:@selector(handleInterruptionNotification:)
|
||||
name:AVAudioSessionInterruptionNotification
|
||||
object:nil];
|
||||
[center addObserver:self
|
||||
selector:@selector(handleRouteChangeNotification:)
|
||||
name:AVAudioSessionRouteChangeNotification
|
||||
object:nil];
|
||||
[center addObserver:self
|
||||
selector:@selector(handleMediaServicesWereLost:)
|
||||
name:AVAudioSessionMediaServicesWereLostNotification
|
||||
object:nil];
|
||||
[center addObserver:self
|
||||
selector:@selector(handleMediaServicesWereReset:)
|
||||
name:AVAudioSessionMediaServicesWereResetNotification
|
||||
object:nil];
|
||||
// Posted on the main thread when the primary audio from other applications
|
||||
// starts and stops. Foreground applications may use this notification as a
|
||||
// hint to enable or disable audio that is secondary.
|
||||
[center addObserver:self
|
||||
selector:@selector(handleSilenceSecondaryAudioHintNotification:)
|
||||
name:AVAudioSessionSilenceSecondaryAudioHintNotification
|
||||
object:nil];
|
||||
// Also track foreground event in order to deal with interruption ended situation.
|
||||
[center addObserver:self
|
||||
selector:@selector(handleApplicationDidBecomeActive:)
|
||||
name:UIApplicationDidBecomeActiveNotification
|
||||
object:nil];
|
||||
[_session addObserver:self
|
||||
forKeyPath:kRTCAudioSessionOutputVolumeSelector
|
||||
options:NSKeyValueObservingOptionNew | NSKeyValueObservingOptionOld
|
||||
context:nil];
|
||||
|
||||
RTCLog(@"RTCAudioSession (%p): init.", self);
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)dealloc {
|
||||
[[NSNotificationCenter defaultCenter] removeObserver:self];
|
||||
[_session removeObserver:self forKeyPath:kRTCAudioSessionOutputVolumeSelector context:nil];
|
||||
RTCLog(@"RTCAudioSession (%p): dealloc.", self);
|
||||
}
|
||||
|
||||
- (NSString *)description {
|
||||
NSString *format =
|
||||
@"RTCAudioSession: {\n"
|
||||
" category: %@\n"
|
||||
" categoryOptions: %ld\n"
|
||||
" mode: %@\n"
|
||||
" isActive: %d\n"
|
||||
" sampleRate: %.2f\n"
|
||||
" IOBufferDuration: %f\n"
|
||||
" outputNumberOfChannels: %ld\n"
|
||||
" inputNumberOfChannels: %ld\n"
|
||||
" outputLatency: %f\n"
|
||||
" inputLatency: %f\n"
|
||||
" outputVolume: %f\n"
|
||||
"}";
|
||||
NSString *description = [NSString stringWithFormat:format,
|
||||
self.category, (long)self.categoryOptions, self.mode,
|
||||
self.isActive, self.sampleRate, self.IOBufferDuration,
|
||||
self.outputNumberOfChannels, self.inputNumberOfChannels,
|
||||
self.outputLatency, self.inputLatency, self.outputVolume];
|
||||
return description;
|
||||
}
|
||||
|
||||
- (void)setIsActive:(BOOL)isActive {
|
||||
@synchronized(self) {
|
||||
_isActive = isActive;
|
||||
}
|
||||
}
|
||||
|
||||
- (BOOL)isActive {
|
||||
@synchronized(self) {
|
||||
return _isActive;
|
||||
}
|
||||
}
|
||||
|
||||
- (BOOL)isLocked {
|
||||
return _lockRecursionCount > 0;
|
||||
}
|
||||
|
||||
- (void)setUseManualAudio:(BOOL)useManualAudio {
|
||||
@synchronized(self) {
|
||||
if (_useManualAudio == useManualAudio) {
|
||||
return;
|
||||
}
|
||||
_useManualAudio = useManualAudio;
|
||||
}
|
||||
[self updateCanPlayOrRecord];
|
||||
}
|
||||
|
||||
- (BOOL)useManualAudio {
|
||||
@synchronized(self) {
|
||||
return _useManualAudio;
|
||||
}
|
||||
}
|
||||
|
||||
- (void)setIsAudioEnabled:(BOOL)isAudioEnabled {
|
||||
@synchronized(self) {
|
||||
if (_isAudioEnabled == isAudioEnabled) {
|
||||
return;
|
||||
}
|
||||
_isAudioEnabled = isAudioEnabled;
|
||||
}
|
||||
[self updateCanPlayOrRecord];
|
||||
}
|
||||
|
||||
- (BOOL)isAudioEnabled {
|
||||
@synchronized(self) {
|
||||
return _isAudioEnabled;
|
||||
}
|
||||
}
|
||||
|
||||
// TODO(tkchin): Check for duplicates.
|
||||
- (void)addDelegate:(id<RTCAudioSessionDelegate>)delegate {
|
||||
RTCLog(@"Adding delegate: (%p)", delegate);
|
||||
if (!delegate) {
|
||||
return;
|
||||
}
|
||||
@synchronized(self) {
|
||||
_delegates.push_back(delegate);
|
||||
[self removeZeroedDelegates];
|
||||
}
|
||||
}
|
||||
|
||||
- (void)removeDelegate:(id<RTCAudioSessionDelegate>)delegate {
|
||||
RTCLog(@"Removing delegate: (%p)", delegate);
|
||||
if (!delegate) {
|
||||
return;
|
||||
}
|
||||
@synchronized(self) {
|
||||
_delegates.erase(std::remove(_delegates.begin(),
|
||||
_delegates.end(),
|
||||
delegate),
|
||||
_delegates.end());
|
||||
[self removeZeroedDelegates];
|
||||
}
|
||||
}
|
||||
|
||||
#pragma clang diagnostic push
|
||||
#pragma clang diagnostic ignored "-Wthread-safety-analysis"
|
||||
|
||||
- (void)lockForConfiguration {
|
||||
_crit.Enter();
|
||||
rtc::AtomicOps::Increment(&_lockRecursionCount);
|
||||
}
|
||||
|
||||
- (void)unlockForConfiguration {
|
||||
// Don't let threads other than the one that called lockForConfiguration
|
||||
// unlock.
|
||||
if (_crit.TryEnter()) {
|
||||
rtc::AtomicOps::Decrement(&_lockRecursionCount);
|
||||
// One unlock for the tryLock, and another one to actually unlock. If this
|
||||
// was called without anyone calling lock, we will hit an assertion.
|
||||
_crit.Leave();
|
||||
_crit.Leave();
|
||||
}
|
||||
}
|
||||
|
||||
#pragma clang diagnostic pop
|
||||
|
||||
#pragma mark - AVAudioSession proxy methods
|
||||
|
||||
- (NSString *)category {
|
||||
return self.session.category;
|
||||
}
|
||||
|
||||
- (AVAudioSessionCategoryOptions)categoryOptions {
|
||||
return self.session.categoryOptions;
|
||||
}
|
||||
|
||||
- (NSString *)mode {
|
||||
return self.session.mode;
|
||||
}
|
||||
|
||||
- (BOOL)secondaryAudioShouldBeSilencedHint {
|
||||
return self.session.secondaryAudioShouldBeSilencedHint;
|
||||
}
|
||||
|
||||
- (AVAudioSessionRouteDescription *)currentRoute {
|
||||
return self.session.currentRoute;
|
||||
}
|
||||
|
||||
- (NSInteger)maximumInputNumberOfChannels {
|
||||
return self.session.maximumInputNumberOfChannels;
|
||||
}
|
||||
|
||||
- (NSInteger)maximumOutputNumberOfChannels {
|
||||
return self.session.maximumOutputNumberOfChannels;
|
||||
}
|
||||
|
||||
- (float)inputGain {
|
||||
return self.session.inputGain;
|
||||
}
|
||||
|
||||
- (BOOL)inputGainSettable {
|
||||
return self.session.inputGainSettable;
|
||||
}
|
||||
|
||||
- (BOOL)inputAvailable {
|
||||
return self.session.inputAvailable;
|
||||
}
|
||||
|
||||
- (NSArray<AVAudioSessionDataSourceDescription *> *)inputDataSources {
|
||||
return self.session.inputDataSources;
|
||||
}
|
||||
|
||||
- (AVAudioSessionDataSourceDescription *)inputDataSource {
|
||||
return self.session.inputDataSource;
|
||||
}
|
||||
|
||||
- (NSArray<AVAudioSessionDataSourceDescription *> *)outputDataSources {
|
||||
return self.session.outputDataSources;
|
||||
}
|
||||
|
||||
- (AVAudioSessionDataSourceDescription *)outputDataSource {
|
||||
return self.session.outputDataSource;
|
||||
}
|
||||
|
||||
- (double)sampleRate {
|
||||
return self.session.sampleRate;
|
||||
}
|
||||
|
||||
- (double)preferredSampleRate {
|
||||
return self.session.preferredSampleRate;
|
||||
}
|
||||
|
||||
- (NSInteger)inputNumberOfChannels {
|
||||
return self.session.inputNumberOfChannels;
|
||||
}
|
||||
|
||||
- (NSInteger)outputNumberOfChannels {
|
||||
return self.session.outputNumberOfChannels;
|
||||
}
|
||||
|
||||
- (float)outputVolume {
|
||||
return self.session.outputVolume;
|
||||
}
|
||||
|
||||
- (NSTimeInterval)inputLatency {
|
||||
return self.session.inputLatency;
|
||||
}
|
||||
|
||||
- (NSTimeInterval)outputLatency {
|
||||
return self.session.outputLatency;
|
||||
}
|
||||
|
||||
- (NSTimeInterval)IOBufferDuration {
|
||||
return self.session.IOBufferDuration;
|
||||
}
|
||||
|
||||
- (NSTimeInterval)preferredIOBufferDuration {
|
||||
return self.session.preferredIOBufferDuration;
|
||||
}
|
||||
|
||||
// TODO(tkchin): Simplify the amount of locking happening here. Likely that we
|
||||
// can just do atomic increments / decrements.
|
||||
- (BOOL)setActive:(BOOL)active
|
||||
error:(NSError **)outError {
|
||||
if (![self checkLock:outError]) {
|
||||
return NO;
|
||||
}
|
||||
int activationCount = _activationCount;
|
||||
if (!active && activationCount == 0) {
|
||||
RTCLogWarning(@"Attempting to deactivate without prior activation.");
|
||||
}
|
||||
BOOL success = YES;
|
||||
BOOL isActive = self.isActive;
|
||||
// Keep a local error so we can log it.
|
||||
NSError *error = nil;
|
||||
BOOL shouldSetActive =
|
||||
(active && !isActive) || (!active && isActive && activationCount == 1);
|
||||
// Attempt to activate if we're not active.
|
||||
// Attempt to deactivate if we're active and it's the last unbalanced call.
|
||||
if (shouldSetActive) {
|
||||
AVAudioSession *session = self.session;
|
||||
// AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation is used to ensure
|
||||
// that other audio sessions that were interrupted by our session can return
|
||||
// to their active state. It is recommended for VoIP apps to use this
|
||||
// option.
|
||||
AVAudioSessionSetActiveOptions options =
|
||||
active ? 0 : AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation;
|
||||
success = [session setActive:active
|
||||
withOptions:options
|
||||
error:&error];
|
||||
if (outError) {
|
||||
*outError = error;
|
||||
}
|
||||
}
|
||||
if (success) {
|
||||
if (shouldSetActive) {
|
||||
self.isActive = active;
|
||||
}
|
||||
if (active) {
|
||||
[self incrementActivationCount];
|
||||
}
|
||||
} else {
|
||||
RTCLogError(@"Failed to setActive:%d. Error: %@",
|
||||
active, error.localizedDescription);
|
||||
}
|
||||
// Decrement activation count on deactivation whether or not it succeeded.
|
||||
if (!active) {
|
||||
[self decrementActivationCount];
|
||||
}
|
||||
RTCLog(@"Number of current activations: %d", _activationCount);
|
||||
return success;
|
||||
}
|
||||
|
||||
- (BOOL)setCategory:(NSString *)category
|
||||
withOptions:(AVAudioSessionCategoryOptions)options
|
||||
error:(NSError **)outError {
|
||||
if (![self checkLock:outError]) {
|
||||
return NO;
|
||||
}
|
||||
return [self.session setCategory:category withOptions:options error:outError];
|
||||
}
|
||||
|
||||
- (BOOL)setMode:(NSString *)mode error:(NSError **)outError {
|
||||
if (![self checkLock:outError]) {
|
||||
return NO;
|
||||
}
|
||||
return [self.session setMode:mode error:outError];
|
||||
}
|
||||
|
||||
- (BOOL)setInputGain:(float)gain error:(NSError **)outError {
|
||||
if (![self checkLock:outError]) {
|
||||
return NO;
|
||||
}
|
||||
return [self.session setInputGain:gain error:outError];
|
||||
}
|
||||
|
||||
- (BOOL)setPreferredSampleRate:(double)sampleRate error:(NSError **)outError {
|
||||
if (![self checkLock:outError]) {
|
||||
return NO;
|
||||
}
|
||||
return [self.session setPreferredSampleRate:sampleRate error:outError];
|
||||
}
|
||||
|
||||
- (BOOL)setPreferredIOBufferDuration:(NSTimeInterval)duration
|
||||
error:(NSError **)outError {
|
||||
if (![self checkLock:outError]) {
|
||||
return NO;
|
||||
}
|
||||
return [self.session setPreferredIOBufferDuration:duration error:outError];
|
||||
}
|
||||
|
||||
- (BOOL)setPreferredInputNumberOfChannels:(NSInteger)count
|
||||
error:(NSError **)outError {
|
||||
if (![self checkLock:outError]) {
|
||||
return NO;
|
||||
}
|
||||
return [self.session setPreferredInputNumberOfChannels:count error:outError];
|
||||
}
|
||||
- (BOOL)setPreferredOutputNumberOfChannels:(NSInteger)count
|
||||
error:(NSError **)outError {
|
||||
if (![self checkLock:outError]) {
|
||||
return NO;
|
||||
}
|
||||
return [self.session setPreferredOutputNumberOfChannels:count error:outError];
|
||||
}
|
||||
|
||||
- (BOOL)overrideOutputAudioPort:(AVAudioSessionPortOverride)portOverride
|
||||
error:(NSError **)outError {
|
||||
if (![self checkLock:outError]) {
|
||||
return NO;
|
||||
}
|
||||
return [self.session overrideOutputAudioPort:portOverride error:outError];
|
||||
}
|
||||
|
||||
- (BOOL)setPreferredInput:(AVAudioSessionPortDescription *)inPort
|
||||
error:(NSError **)outError {
|
||||
if (![self checkLock:outError]) {
|
||||
return NO;
|
||||
}
|
||||
return [self.session setPreferredInput:inPort error:outError];
|
||||
}
|
||||
|
||||
- (BOOL)setInputDataSource:(AVAudioSessionDataSourceDescription *)dataSource
|
||||
error:(NSError **)outError {
|
||||
if (![self checkLock:outError]) {
|
||||
return NO;
|
||||
}
|
||||
return [self.session setInputDataSource:dataSource error:outError];
|
||||
}
|
||||
|
||||
- (BOOL)setOutputDataSource:(AVAudioSessionDataSourceDescription *)dataSource
|
||||
error:(NSError **)outError {
|
||||
if (![self checkLock:outError]) {
|
||||
return NO;
|
||||
}
|
||||
return [self.session setOutputDataSource:dataSource error:outError];
|
||||
}
|
||||
|
||||
#pragma mark - Notifications
|
||||
|
||||
- (void)handleInterruptionNotification:(NSNotification *)notification {
|
||||
NSNumber* typeNumber =
|
||||
notification.userInfo[AVAudioSessionInterruptionTypeKey];
|
||||
AVAudioSessionInterruptionType type =
|
||||
(AVAudioSessionInterruptionType)typeNumber.unsignedIntegerValue;
|
||||
switch (type) {
|
||||
case AVAudioSessionInterruptionTypeBegan:
|
||||
RTCLog(@"Audio session interruption began.");
|
||||
self.isActive = NO;
|
||||
self.isInterrupted = YES;
|
||||
[self notifyDidBeginInterruption];
|
||||
break;
|
||||
case AVAudioSessionInterruptionTypeEnded: {
|
||||
RTCLog(@"Audio session interruption ended.");
|
||||
self.isInterrupted = NO;
|
||||
[self updateAudioSessionAfterEvent];
|
||||
NSNumber *optionsNumber =
|
||||
notification.userInfo[AVAudioSessionInterruptionOptionKey];
|
||||
AVAudioSessionInterruptionOptions options =
|
||||
optionsNumber.unsignedIntegerValue;
|
||||
BOOL shouldResume =
|
||||
options & AVAudioSessionInterruptionOptionShouldResume;
|
||||
[self notifyDidEndInterruptionWithShouldResumeSession:shouldResume];
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
- (void)handleRouteChangeNotification:(NSNotification *)notification {
|
||||
// Get reason for current route change.
|
||||
NSNumber* reasonNumber =
|
||||
notification.userInfo[AVAudioSessionRouteChangeReasonKey];
|
||||
AVAudioSessionRouteChangeReason reason =
|
||||
(AVAudioSessionRouteChangeReason)reasonNumber.unsignedIntegerValue;
|
||||
RTCLog(@"Audio route changed:");
|
||||
switch (reason) {
|
||||
case AVAudioSessionRouteChangeReasonUnknown:
|
||||
RTCLog(@"Audio route changed: ReasonUnknown");
|
||||
break;
|
||||
case AVAudioSessionRouteChangeReasonNewDeviceAvailable:
|
||||
RTCLog(@"Audio route changed: NewDeviceAvailable");
|
||||
break;
|
||||
case AVAudioSessionRouteChangeReasonOldDeviceUnavailable:
|
||||
RTCLog(@"Audio route changed: OldDeviceUnavailable");
|
||||
break;
|
||||
case AVAudioSessionRouteChangeReasonCategoryChange:
|
||||
RTCLog(@"Audio route changed: CategoryChange to :%@",
|
||||
self.session.category);
|
||||
break;
|
||||
case AVAudioSessionRouteChangeReasonOverride:
|
||||
RTCLog(@"Audio route changed: Override");
|
||||
break;
|
||||
case AVAudioSessionRouteChangeReasonWakeFromSleep:
|
||||
RTCLog(@"Audio route changed: WakeFromSleep");
|
||||
break;
|
||||
case AVAudioSessionRouteChangeReasonNoSuitableRouteForCategory:
|
||||
RTCLog(@"Audio route changed: NoSuitableRouteForCategory");
|
||||
break;
|
||||
case AVAudioSessionRouteChangeReasonRouteConfigurationChange:
|
||||
RTCLog(@"Audio route changed: RouteConfigurationChange");
|
||||
break;
|
||||
}
|
||||
AVAudioSessionRouteDescription* previousRoute =
|
||||
notification.userInfo[AVAudioSessionRouteChangePreviousRouteKey];
|
||||
// Log previous route configuration.
|
||||
RTCLog(@"Previous route: %@\nCurrent route:%@",
|
||||
previousRoute, self.session.currentRoute);
|
||||
[self notifyDidChangeRouteWithReason:reason previousRoute:previousRoute];
|
||||
}
|
||||
|
||||
- (void)handleMediaServicesWereLost:(NSNotification *)notification {
|
||||
RTCLog(@"Media services were lost.");
|
||||
[self updateAudioSessionAfterEvent];
|
||||
[self notifyMediaServicesWereLost];
|
||||
}
|
||||
|
||||
- (void)handleMediaServicesWereReset:(NSNotification *)notification {
|
||||
RTCLog(@"Media services were reset.");
|
||||
[self updateAudioSessionAfterEvent];
|
||||
[self notifyMediaServicesWereReset];
|
||||
}
|
||||
|
||||
- (void)handleSilenceSecondaryAudioHintNotification:(NSNotification *)notification {
|
||||
// TODO(henrika): just adding logs here for now until we know if we are ever
|
||||
// see this notification and might be affected by it or if further actions
|
||||
// are required.
|
||||
NSNumber *typeNumber =
|
||||
notification.userInfo[AVAudioSessionSilenceSecondaryAudioHintTypeKey];
|
||||
AVAudioSessionSilenceSecondaryAudioHintType type =
|
||||
(AVAudioSessionSilenceSecondaryAudioHintType)typeNumber.unsignedIntegerValue;
|
||||
switch (type) {
|
||||
case AVAudioSessionSilenceSecondaryAudioHintTypeBegin:
|
||||
RTCLog(@"Another application's primary audio has started.");
|
||||
break;
|
||||
case AVAudioSessionSilenceSecondaryAudioHintTypeEnd:
|
||||
RTCLog(@"Another application's primary audio has stopped.");
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
- (void)handleApplicationDidBecomeActive:(NSNotification *)notification {
|
||||
RTCLog(@"Application became active after an interruption. Treating as interruption "
|
||||
" end. isInterrupted changed from %d to 0.", self.isInterrupted);
|
||||
if (self.isInterrupted) {
|
||||
self.isInterrupted = NO;
|
||||
[self updateAudioSessionAfterEvent];
|
||||
}
|
||||
// Always treat application becoming active as an interruption end event.
|
||||
[self notifyDidEndInterruptionWithShouldResumeSession:YES];
|
||||
}
|
||||
|
||||
#pragma mark - Private
|
||||
|
||||
+ (NSError *)lockError {
|
||||
NSDictionary *userInfo = @{
|
||||
NSLocalizedDescriptionKey:
|
||||
@"Must call lockForConfiguration before calling this method."
|
||||
};
|
||||
NSError *error =
|
||||
[[NSError alloc] initWithDomain:kRTCAudioSessionErrorDomain
|
||||
code:kRTCAudioSessionErrorLockRequired
|
||||
userInfo:userInfo];
|
||||
return error;
|
||||
}
|
||||
|
||||
- (std::vector<__weak id<RTCAudioSessionDelegate> >)delegates {
|
||||
@synchronized(self) {
|
||||
// Note: this returns a copy.
|
||||
return _delegates;
|
||||
}
|
||||
}
|
||||
|
||||
// TODO(tkchin): check for duplicates.
|
||||
- (void)pushDelegate:(id<RTCAudioSessionDelegate>)delegate {
|
||||
@synchronized(self) {
|
||||
_delegates.insert(_delegates.begin(), delegate);
|
||||
}
|
||||
}
|
||||
|
||||
- (void)removeZeroedDelegates {
|
||||
@synchronized(self) {
|
||||
_delegates.erase(
|
||||
std::remove_if(_delegates.begin(),
|
||||
_delegates.end(),
|
||||
[](id delegate) -> bool { return delegate == nil; }),
|
||||
_delegates.end());
|
||||
}
|
||||
}
|
||||
|
||||
- (int)activationCount {
|
||||
return _activationCount;
|
||||
}
|
||||
|
||||
- (int)incrementActivationCount {
|
||||
RTCLog(@"Incrementing activation count.");
|
||||
return rtc::AtomicOps::Increment(&_activationCount);
|
||||
}
|
||||
|
||||
- (NSInteger)decrementActivationCount {
|
||||
RTCLog(@"Decrementing activation count.");
|
||||
return rtc::AtomicOps::Decrement(&_activationCount);
|
||||
}
|
||||
|
||||
- (int)webRTCSessionCount {
|
||||
return _webRTCSessionCount;
|
||||
}
|
||||
|
||||
- (BOOL)canPlayOrRecord {
|
||||
return !self.useManualAudio || self.isAudioEnabled;
|
||||
}
|
||||
|
||||
- (BOOL)isInterrupted {
|
||||
@synchronized(self) {
|
||||
return _isInterrupted;
|
||||
}
|
||||
}
|
||||
|
||||
- (void)setIsInterrupted:(BOOL)isInterrupted {
|
||||
@synchronized(self) {
|
||||
if (_isInterrupted == isInterrupted) {
|
||||
return;
|
||||
}
|
||||
_isInterrupted = isInterrupted;
|
||||
}
|
||||
}
|
||||
|
||||
- (BOOL)checkLock:(NSError **)outError {
|
||||
// Check ivar instead of trying to acquire lock so that we won't accidentally
|
||||
// acquire lock if it hasn't already been called.
|
||||
if (!self.isLocked) {
|
||||
if (outError) {
|
||||
*outError = [RTCAudioSession lockError];
|
||||
}
|
||||
return NO;
|
||||
}
|
||||
return YES;
|
||||
}
|
||||
|
||||
- (BOOL)beginWebRTCSession:(NSError **)outError {
|
||||
if (outError) {
|
||||
*outError = nil;
|
||||
}
|
||||
if (![self checkLock:outError]) {
|
||||
return NO;
|
||||
}
|
||||
rtc::AtomicOps::Increment(&_webRTCSessionCount);
|
||||
[self notifyDidStartPlayOrRecord];
|
||||
return YES;
|
||||
}
|
||||
|
||||
- (BOOL)endWebRTCSession:(NSError **)outError {
|
||||
if (outError) {
|
||||
*outError = nil;
|
||||
}
|
||||
if (![self checkLock:outError]) {
|
||||
return NO;
|
||||
}
|
||||
rtc::AtomicOps::Decrement(&_webRTCSessionCount);
|
||||
[self notifyDidStopPlayOrRecord];
|
||||
return YES;
|
||||
}
|
||||
|
||||
- (BOOL)configureWebRTCSession:(NSError **)outError {
|
||||
if (outError) {
|
||||
*outError = nil;
|
||||
}
|
||||
if (![self checkLock:outError]) {
|
||||
return NO;
|
||||
}
|
||||
RTCLog(@"Configuring audio session for WebRTC.");
|
||||
|
||||
// Configure the AVAudioSession and activate it.
|
||||
// Provide an error even if there isn't one so we can log it.
|
||||
NSError *error = nil;
|
||||
RTCAudioSessionConfiguration *webRTCConfig =
|
||||
[RTCAudioSessionConfiguration webRTCConfiguration];
|
||||
if (![self setConfiguration:webRTCConfig active:YES error:&error]) {
|
||||
RTCLogError(@"Failed to set WebRTC audio configuration: %@",
|
||||
error.localizedDescription);
|
||||
// Do not call setActive:NO if setActive:YES failed.
|
||||
if (outError) {
|
||||
*outError = error;
|
||||
}
|
||||
return NO;
|
||||
}
|
||||
|
||||
// Ensure that the device currently supports audio input.
|
||||
// TODO(tkchin): Figure out if this is really necessary.
|
||||
if (!self.inputAvailable) {
|
||||
RTCLogError(@"No audio input path is available!");
|
||||
[self unconfigureWebRTCSession:nil];
|
||||
if (outError) {
|
||||
*outError = [self configurationErrorWithDescription:@"No input path."];
|
||||
}
|
||||
return NO;
|
||||
}
|
||||
|
||||
// It can happen (e.g. in combination with BT devices) that the attempt to set
|
||||
// the preferred sample rate for WebRTC (48kHz) fails. If so, make a new
|
||||
// configuration attempt using the sample rate that worked using the active
|
||||
// audio session. A typical case is that only 8 or 16kHz can be set, e.g. in
|
||||
// combination with BT headsets. Using this "trick" seems to avoid a state
|
||||
// where Core Audio asks for a different number of audio frames than what the
|
||||
// session's I/O buffer duration corresponds to.
|
||||
// TODO(henrika): this fix resolves bugs.webrtc.org/6004 but it has only been
|
||||
// tested on a limited set of iOS devices and BT devices.
|
||||
double sessionSampleRate = self.sampleRate;
|
||||
double preferredSampleRate = webRTCConfig.sampleRate;
|
||||
if (sessionSampleRate != preferredSampleRate) {
|
||||
RTCLogWarning(
|
||||
@"Current sample rate (%.2f) is not the preferred rate (%.2f)",
|
||||
sessionSampleRate, preferredSampleRate);
|
||||
if (![self setPreferredSampleRate:sessionSampleRate
|
||||
error:&error]) {
|
||||
RTCLogError(@"Failed to set preferred sample rate: %@",
|
||||
error.localizedDescription);
|
||||
if (outError) {
|
||||
*outError = error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return YES;
|
||||
}
|
||||
|
||||
- (BOOL)unconfigureWebRTCSession:(NSError **)outError {
|
||||
if (outError) {
|
||||
*outError = nil;
|
||||
}
|
||||
if (![self checkLock:outError]) {
|
||||
return NO;
|
||||
}
|
||||
RTCLog(@"Unconfiguring audio session for WebRTC.");
|
||||
[self setActive:NO error:outError];
|
||||
|
||||
return YES;
|
||||
}
|
||||
|
||||
- (NSError *)configurationErrorWithDescription:(NSString *)description {
|
||||
NSDictionary* userInfo = @{
|
||||
NSLocalizedDescriptionKey: description,
|
||||
};
|
||||
return [[NSError alloc] initWithDomain:kRTCAudioSessionErrorDomain
|
||||
code:kRTCAudioSessionErrorConfiguration
|
||||
userInfo:userInfo];
|
||||
}
|
||||
|
||||
- (void)updateAudioSessionAfterEvent {
|
||||
BOOL shouldActivate = self.activationCount > 0;
|
||||
AVAudioSessionSetActiveOptions options = shouldActivate ?
|
||||
0 : AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation;
|
||||
NSError *error = nil;
|
||||
if ([self.session setActive:shouldActivate
|
||||
withOptions:options
|
||||
error:&error]) {
|
||||
self.isActive = shouldActivate;
|
||||
} else {
|
||||
RTCLogError(@"Failed to set session active to %d. Error:%@",
|
||||
shouldActivate, error.localizedDescription);
|
||||
}
|
||||
}
|
||||
|
||||
- (void)updateCanPlayOrRecord {
|
||||
BOOL canPlayOrRecord = NO;
|
||||
BOOL shouldNotify = NO;
|
||||
@synchronized(self) {
|
||||
canPlayOrRecord = !self.useManualAudio || self.isAudioEnabled;
|
||||
if (_canPlayOrRecord == canPlayOrRecord) {
|
||||
return;
|
||||
}
|
||||
_canPlayOrRecord = canPlayOrRecord;
|
||||
shouldNotify = YES;
|
||||
}
|
||||
if (shouldNotify) {
|
||||
[self notifyDidChangeCanPlayOrRecord:canPlayOrRecord];
|
||||
}
|
||||
}
|
||||
|
||||
- (void)audioSessionDidActivate:(AVAudioSession *)session {
|
||||
if (_session != session) {
|
||||
RTCLogError(@"audioSessionDidActivate called on different AVAudioSession");
|
||||
}
|
||||
[self incrementActivationCount];
|
||||
self.isActive = YES;
|
||||
}
|
||||
|
||||
- (void)audioSessionDidDeactivate:(AVAudioSession *)session {
|
||||
if (_session != session) {
|
||||
RTCLogError(@"audioSessionDidDeactivate called on different AVAudioSession");
|
||||
}
|
||||
self.isActive = NO;
|
||||
[self decrementActivationCount];
|
||||
}
|
||||
|
||||
- (void)observeValueForKeyPath:(NSString *)keyPath
|
||||
ofObject:(id)object
|
||||
change:(NSDictionary *)change
|
||||
context:(void *)context {
|
||||
if (object == _session) {
|
||||
NSNumber *newVolume = change[NSKeyValueChangeNewKey];
|
||||
RTCLog(@"OutputVolumeDidChange to %f", newVolume.floatValue);
|
||||
[self notifyDidChangeOutputVolume:newVolume.floatValue];
|
||||
} else {
|
||||
[super observeValueForKeyPath:keyPath
|
||||
ofObject:object
|
||||
change:change
|
||||
context:context];
|
||||
}
|
||||
}
|
||||
|
||||
- (void)notifyDidBeginInterruption {
|
||||
for (auto delegate : self.delegates) {
|
||||
SEL sel = @selector(audioSessionDidBeginInterruption:);
|
||||
if ([delegate respondsToSelector:sel]) {
|
||||
[delegate audioSessionDidBeginInterruption:self];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
- (void)notifyDidEndInterruptionWithShouldResumeSession:
|
||||
(BOOL)shouldResumeSession {
|
||||
for (auto delegate : self.delegates) {
|
||||
SEL sel = @selector(audioSessionDidEndInterruption:shouldResumeSession:);
|
||||
if ([delegate respondsToSelector:sel]) {
|
||||
[delegate audioSessionDidEndInterruption:self
|
||||
shouldResumeSession:shouldResumeSession];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
- (void)notifyDidChangeRouteWithReason:(AVAudioSessionRouteChangeReason)reason
|
||||
previousRoute:(AVAudioSessionRouteDescription *)previousRoute {
|
||||
for (auto delegate : self.delegates) {
|
||||
SEL sel = @selector(audioSessionDidChangeRoute:reason:previousRoute:);
|
||||
if ([delegate respondsToSelector:sel]) {
|
||||
[delegate audioSessionDidChangeRoute:self
|
||||
reason:reason
|
||||
previousRoute:previousRoute];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
- (void)notifyMediaServicesWereLost {
|
||||
for (auto delegate : self.delegates) {
|
||||
SEL sel = @selector(audioSessionMediaServerTerminated:);
|
||||
if ([delegate respondsToSelector:sel]) {
|
||||
[delegate audioSessionMediaServerTerminated:self];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
- (void)notifyMediaServicesWereReset {
|
||||
for (auto delegate : self.delegates) {
|
||||
SEL sel = @selector(audioSessionMediaServerReset:);
|
||||
if ([delegate respondsToSelector:sel]) {
|
||||
[delegate audioSessionMediaServerReset:self];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
- (void)notifyDidChangeCanPlayOrRecord:(BOOL)canPlayOrRecord {
|
||||
for (auto delegate : self.delegates) {
|
||||
SEL sel = @selector(audioSession:didChangeCanPlayOrRecord:);
|
||||
if ([delegate respondsToSelector:sel]) {
|
||||
[delegate audioSession:self didChangeCanPlayOrRecord:canPlayOrRecord];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
- (void)notifyDidStartPlayOrRecord {
|
||||
for (auto delegate : self.delegates) {
|
||||
SEL sel = @selector(audioSessionDidStartPlayOrRecord:);
|
||||
if ([delegate respondsToSelector:sel]) {
|
||||
[delegate audioSessionDidStartPlayOrRecord:self];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
- (void)notifyDidStopPlayOrRecord {
|
||||
for (auto delegate : self.delegates) {
|
||||
SEL sel = @selector(audioSessionDidStopPlayOrRecord:);
|
||||
if ([delegate respondsToSelector:sel]) {
|
||||
[delegate audioSessionDidStopPlayOrRecord:self];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
- (void)notifyDidChangeOutputVolume:(float)volume {
|
||||
for (auto delegate : self.delegates) {
|
||||
SEL sel = @selector(audioSession:didChangeOutputVolume:);
|
||||
if ([delegate respondsToSelector:sel]) {
|
||||
[delegate audioSession:self didChangeOutputVolume:volume];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
- (void)notifyDidDetectPlayoutGlitch:(int64_t)totalNumberOfGlitches {
|
||||
for (auto delegate : self.delegates) {
|
||||
SEL sel = @selector(audioSession:didDetectPlayoutGlitch:);
|
||||
if ([delegate respondsToSelector:sel]) {
|
||||
[delegate audioSession:self didDetectPlayoutGlitch:totalNumberOfGlitches];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@end
|
||||
134
sdk/objc/Framework/Classes/Audio/RTCAudioSessionConfiguration.m
Normal file
134
sdk/objc/Framework/Classes/Audio/RTCAudioSessionConfiguration.m
Normal file
@ -0,0 +1,134 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "WebRTC/RTCAudioSession.h"
|
||||
#import "WebRTC/RTCAudioSessionConfiguration.h"
|
||||
|
||||
#import "WebRTC/RTCDispatcher.h"
|
||||
#import "WebRTC/UIDevice+RTCDevice.h"
|
||||
|
||||
|
||||
// Try to use mono to save resources. Also avoids channel format conversion
|
||||
// in the I/O audio unit. Initial tests have shown that it is possible to use
|
||||
// mono natively for built-in microphones and for BT headsets but not for
|
||||
// wired headsets. Wired headsets only support stereo as native channel format
|
||||
// but it is a low cost operation to do a format conversion to mono in the
|
||||
// audio unit. Hence, we will not hit a RTC_CHECK in
|
||||
// VerifyAudioParametersForActiveAudioSession() for a mismatch between the
|
||||
// preferred number of channels and the actual number of channels.
|
||||
const int kRTCAudioSessionPreferredNumberOfChannels = 1;
|
||||
|
||||
// Preferred hardware sample rate (unit is in Hertz). The client sample rate
|
||||
// will be set to this value as well to avoid resampling the the audio unit's
|
||||
// format converter. Note that, some devices, e.g. BT headsets, only supports
|
||||
// 8000Hz as native sample rate.
|
||||
const double kRTCAudioSessionHighPerformanceSampleRate = 48000.0;
|
||||
|
||||
// A lower sample rate will be used for devices with only one core
|
||||
// (e.g. iPhone 4). The goal is to reduce the CPU load of the application.
|
||||
const double kRTCAudioSessionLowComplexitySampleRate = 16000.0;
|
||||
|
||||
// Use a hardware I/O buffer size (unit is in seconds) that matches the 10ms
|
||||
// size used by WebRTC. The exact actual size will differ between devices.
|
||||
// Example: using 48kHz on iPhone 6 results in a native buffer size of
|
||||
// ~10.6667ms or 512 audio frames per buffer. The FineAudioBuffer instance will
|
||||
// take care of any buffering required to convert between native buffers and
|
||||
// buffers used by WebRTC. It is beneficial for the performance if the native
|
||||
// size is as an even multiple of 10ms as possible since it results in "clean"
|
||||
// callback sequence without bursts of callbacks back to back.
|
||||
const double kRTCAudioSessionHighPerformanceIOBufferDuration = 0.02;
|
||||
|
||||
// Use a larger buffer size on devices with only one core (e.g. iPhone 4).
|
||||
// It will result in a lower CPU consumption at the cost of a larger latency.
|
||||
// The size of 60ms is based on instrumentation that shows a significant
|
||||
// reduction in CPU load compared with 10ms on low-end devices.
|
||||
// TODO(henrika): monitor this size and determine if it should be modified.
|
||||
const double kRTCAudioSessionLowComplexityIOBufferDuration = 0.06;
|
||||
|
||||
static RTCAudioSessionConfiguration *gWebRTCConfiguration = nil;
|
||||
|
||||
@implementation RTCAudioSessionConfiguration
|
||||
|
||||
@synthesize category = _category;
|
||||
@synthesize categoryOptions = _categoryOptions;
|
||||
@synthesize mode = _mode;
|
||||
@synthesize sampleRate = _sampleRate;
|
||||
@synthesize ioBufferDuration = _ioBufferDuration;
|
||||
@synthesize inputNumberOfChannels = _inputNumberOfChannels;
|
||||
@synthesize outputNumberOfChannels = _outputNumberOfChannels;
|
||||
|
||||
- (instancetype)init {
|
||||
if (self = [super init]) {
|
||||
// Use a category which supports simultaneous recording and playback.
|
||||
// By default, using this category implies that our app’s audio is
|
||||
// nonmixable, hence activating the session will interrupt any other
|
||||
// audio sessions which are also nonmixable.
|
||||
_category = AVAudioSessionCategoryPlayAndRecord;
|
||||
_categoryOptions = AVAudioSessionCategoryOptionAllowBluetooth;
|
||||
|
||||
// Specify mode for two-way voice communication (e.g. VoIP).
|
||||
_mode = AVAudioSessionModeVoiceChat;
|
||||
|
||||
// Set the session's sample rate or the hardware sample rate.
|
||||
// It is essential that we use the same sample rate as stream format
|
||||
// to ensure that the I/O unit does not have to do sample rate conversion.
|
||||
// Set the preferred audio I/O buffer duration, in seconds.
|
||||
NSUInteger processorCount = [NSProcessInfo processInfo].processorCount;
|
||||
// Use best sample rate and buffer duration if the CPU has more than one
|
||||
// core.
|
||||
if (processorCount > 1 && [UIDevice deviceType] != RTCDeviceTypeIPhone4S) {
|
||||
_sampleRate = kRTCAudioSessionHighPerformanceSampleRate;
|
||||
_ioBufferDuration = kRTCAudioSessionHighPerformanceIOBufferDuration;
|
||||
} else {
|
||||
_sampleRate = kRTCAudioSessionLowComplexitySampleRate;
|
||||
_ioBufferDuration = kRTCAudioSessionLowComplexityIOBufferDuration;
|
||||
}
|
||||
|
||||
// We try to use mono in both directions to save resources and format
|
||||
// conversions in the audio unit. Some devices does only support stereo;
|
||||
// e.g. wired headset on iPhone 6.
|
||||
// TODO(henrika): add support for stereo if needed.
|
||||
_inputNumberOfChannels = kRTCAudioSessionPreferredNumberOfChannels;
|
||||
_outputNumberOfChannels = kRTCAudioSessionPreferredNumberOfChannels;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
+ (void)initialize {
|
||||
gWebRTCConfiguration = [[self alloc] init];
|
||||
}
|
||||
|
||||
+ (instancetype)currentConfiguration {
|
||||
RTCAudioSession *session = [RTCAudioSession sharedInstance];
|
||||
RTCAudioSessionConfiguration *config =
|
||||
[[RTCAudioSessionConfiguration alloc] init];
|
||||
config.category = session.category;
|
||||
config.categoryOptions = session.categoryOptions;
|
||||
config.mode = session.mode;
|
||||
config.sampleRate = session.sampleRate;
|
||||
config.ioBufferDuration = session.IOBufferDuration;
|
||||
config.inputNumberOfChannels = session.inputNumberOfChannels;
|
||||
config.outputNumberOfChannels = session.outputNumberOfChannels;
|
||||
return config;
|
||||
}
|
||||
|
||||
+ (instancetype)webRTCConfiguration {
|
||||
@synchronized(self) {
|
||||
return (RTCAudioSessionConfiguration *)gWebRTCConfiguration;
|
||||
}
|
||||
}
|
||||
|
||||
+ (void)setWebRTCConfiguration:(RTCAudioSessionConfiguration *)configuration {
|
||||
@synchronized(self) {
|
||||
gWebRTCConfiguration = configuration;
|
||||
}
|
||||
}
|
||||
|
||||
@end
|
||||
26
sdk/objc/Framework/Classes/Common/NSString+StdString.h
Normal file
26
sdk/objc/Framework/Classes/Common/NSString+StdString.h
Normal file
@ -0,0 +1,26 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
#include <string>
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
@interface NSString (StdString)
|
||||
|
||||
@property(nonatomic, readonly) std::string stdString;
|
||||
|
||||
+ (std::string)stdStringForString:(NSString *)nsString;
|
||||
+ (NSString *)stringForStdString:(const std::string&)stdString;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
||||
33
sdk/objc/Framework/Classes/Common/NSString+StdString.mm
Normal file
33
sdk/objc/Framework/Classes/Common/NSString+StdString.mm
Normal file
@ -0,0 +1,33 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "NSString+StdString.h"
|
||||
|
||||
@implementation NSString (StdString)
|
||||
|
||||
- (std::string)stdString {
|
||||
return [NSString stdStringForString:self];
|
||||
}
|
||||
|
||||
+ (std::string)stdStringForString:(NSString *)nsString {
|
||||
NSData *charData = [nsString dataUsingEncoding:NSUTF8StringEncoding];
|
||||
return std::string(reinterpret_cast<const char *>(charData.bytes),
|
||||
charData.length);
|
||||
}
|
||||
|
||||
+ (NSString *)stringForStdString:(const std::string&)stdString {
|
||||
// std::string may contain null termination character so we construct
|
||||
// using length.
|
||||
return [[NSString alloc] initWithBytes:stdString.data()
|
||||
length:stdString.length()
|
||||
encoding:NSUTF8StringEncoding];
|
||||
}
|
||||
|
||||
@end
|
||||
17
sdk/objc/Framework/Classes/Common/RTCDispatcher+Private.h
Normal file
17
sdk/objc/Framework/Classes/Common/RTCDispatcher+Private.h
Normal file
@ -0,0 +1,17 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "WebRTC/RTCDispatcher.h"
|
||||
|
||||
@interface RTCDispatcher ()
|
||||
|
||||
+ (dispatch_queue_t)dispatchQueueForType:(RTCDispatcherQueueType)dispatchType;
|
||||
|
||||
@end
|
||||
61
sdk/objc/Framework/Classes/Common/RTCDispatcher.m
Normal file
61
sdk/objc/Framework/Classes/Common/RTCDispatcher.m
Normal file
@ -0,0 +1,61 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "RTCDispatcher+Private.h"
|
||||
|
||||
static dispatch_queue_t kAudioSessionQueue = nil;
|
||||
static dispatch_queue_t kCaptureSessionQueue = nil;
|
||||
|
||||
@implementation RTCDispatcher
|
||||
|
||||
+ (void)initialize {
|
||||
static dispatch_once_t onceToken;
|
||||
dispatch_once(&onceToken, ^{
|
||||
kAudioSessionQueue = dispatch_queue_create(
|
||||
"org.webrtc.RTCDispatcherAudioSession",
|
||||
DISPATCH_QUEUE_SERIAL);
|
||||
kCaptureSessionQueue = dispatch_queue_create(
|
||||
"org.webrtc.RTCDispatcherCaptureSession",
|
||||
DISPATCH_QUEUE_SERIAL);
|
||||
});
|
||||
}
|
||||
|
||||
+ (void)dispatchAsyncOnType:(RTCDispatcherQueueType)dispatchType
|
||||
block:(dispatch_block_t)block {
|
||||
dispatch_queue_t queue = [self dispatchQueueForType:dispatchType];
|
||||
dispatch_async(queue, block);
|
||||
}
|
||||
|
||||
+ (BOOL)isOnQueueForType:(RTCDispatcherQueueType)dispatchType {
|
||||
dispatch_queue_t targetQueue = [self dispatchQueueForType:dispatchType];
|
||||
const char* targetLabel = dispatch_queue_get_label(targetQueue);
|
||||
const char* currentLabel = dispatch_queue_get_label(DISPATCH_CURRENT_QUEUE_LABEL);
|
||||
|
||||
NSAssert(strlen(targetLabel) > 0, @"Label is required for the target queue.");
|
||||
NSAssert(strlen(currentLabel) > 0, @"Label is required for the current queue.");
|
||||
|
||||
return strcmp(targetLabel, currentLabel) == 0;
|
||||
}
|
||||
|
||||
#pragma mark - Private
|
||||
|
||||
+ (dispatch_queue_t)dispatchQueueForType:(RTCDispatcherQueueType)dispatchType {
|
||||
switch (dispatchType) {
|
||||
case RTCDispatcherTypeMain:
|
||||
return dispatch_get_main_queue();
|
||||
case RTCDispatcherTypeCaptureSession:
|
||||
return kCaptureSessionQueue;
|
||||
case RTCDispatcherTypeAudioSession:
|
||||
return kAudioSessionQueue;
|
||||
}
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
69
sdk/objc/Framework/Classes/Common/RTCFieldTrials.mm
Normal file
69
sdk/objc/Framework/Classes/Common/RTCFieldTrials.mm
Normal file
@ -0,0 +1,69 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "WebRTC/RTCFieldTrials.h"
|
||||
|
||||
#include <memory>
|
||||
|
||||
#import "WebRTC/RTCLogging.h"
|
||||
|
||||
// Adding 'nogncheck' to disable the gn include headers check.
|
||||
// We don't want to depend on 'system_wrappers:field_trial_default' because
|
||||
// clients should be able to provide their own implementation.
|
||||
#include "webrtc/system_wrappers/include/field_trial_default.h" // nogncheck
|
||||
|
||||
NSString * const kRTCFieldTrialAudioSendSideBweKey = @"WebRTC-Audio-SendSideBwe";
|
||||
NSString * const kRTCFieldTrialSendSideBweWithOverheadKey = @"WebRTC-SendSideBwe-WithOverhead";
|
||||
NSString * const kRTCFieldTrialFlexFec03AdvertisedKey = @"WebRTC-FlexFEC-03-Advertised";
|
||||
NSString * const kRTCFieldTrialFlexFec03Key = @"WebRTC-FlexFEC-03";
|
||||
NSString * const kRTCFieldTrialImprovedBitrateEstimateKey = @"WebRTC-ImprovedBitrateEstimate";
|
||||
NSString * const kRTCFieldTrialMedianSlopeFilterKey = @"WebRTC-BweMedianSlopeFilter";
|
||||
NSString * const kRTCFieldTrialTrendlineFilterKey = @"WebRTC-BweTrendlineFilter";
|
||||
NSString * const kRTCFieldTrialH264HighProfileKey = @"WebRTC-H264HighProfile";
|
||||
NSString * const kRTCFieldTrialMinimizeResamplingOnMobileKey =
|
||||
@"WebRTC-Audio-MinimizeResamplingOnMobile";
|
||||
NSString * const kRTCFieldTrialEnabledValue = @"Enabled";
|
||||
|
||||
static std::unique_ptr<char[]> gFieldTrialInitString;
|
||||
|
||||
NSString *RTCFieldTrialMedianSlopeFilterValue(
|
||||
size_t windowSize, double thresholdGain) {
|
||||
NSString *format = @"Enabled-%zu,%lf";
|
||||
return [NSString stringWithFormat:format, windowSize, thresholdGain];
|
||||
}
|
||||
|
||||
NSString *RTCFieldTrialTrendlineFilterValue(
|
||||
size_t windowSize, double smoothingCoeff, double thresholdGain) {
|
||||
NSString *format = @"Enabled-%zu,%lf,%lf";
|
||||
return [NSString stringWithFormat:format, windowSize, smoothingCoeff, thresholdGain];
|
||||
}
|
||||
|
||||
void RTCInitFieldTrialDictionary(NSDictionary<NSString *, NSString *> *fieldTrials) {
|
||||
if (!fieldTrials) {
|
||||
RTCLogWarning(@"No fieldTrials provided.");
|
||||
return;
|
||||
}
|
||||
// Assemble the keys and values into the field trial string.
|
||||
// We don't perform any extra format checking. That should be done by the underlying WebRTC calls.
|
||||
NSMutableString *fieldTrialInitString = [NSMutableString string];
|
||||
for (NSString *key in fieldTrials) {
|
||||
NSString *fieldTrialEntry = [NSString stringWithFormat:@"%@/%@/", key, fieldTrials[key]];
|
||||
[fieldTrialInitString appendString:fieldTrialEntry];
|
||||
}
|
||||
size_t len = fieldTrialInitString.length + 1;
|
||||
gFieldTrialInitString.reset(new char[len]);
|
||||
if (![fieldTrialInitString getCString:gFieldTrialInitString.get()
|
||||
maxLength:len
|
||||
encoding:NSUTF8StringEncoding]) {
|
||||
RTCLogError(@"Failed to convert field trial string.");
|
||||
return;
|
||||
}
|
||||
webrtc::field_trial::InitFieldTrialsFromString(gFieldTrialInitString.get());
|
||||
}
|
||||
176
sdk/objc/Framework/Classes/Common/RTCFileLogger.mm
Normal file
176
sdk/objc/Framework/Classes/Common/RTCFileLogger.mm
Normal file
@ -0,0 +1,176 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "WebRTC/RTCFileLogger.h"
|
||||
|
||||
#include <memory>
|
||||
|
||||
#include "webrtc/rtc_base/checks.h"
|
||||
#include "webrtc/rtc_base/filerotatingstream.h"
|
||||
#include "webrtc/rtc_base/logging.h"
|
||||
#include "webrtc/rtc_base/logsinks.h"
|
||||
|
||||
NSString *const kDefaultLogDirName = @"webrtc_logs";
|
||||
NSUInteger const kDefaultMaxFileSize = 10 * 1024 * 1024; // 10MB.
|
||||
const char *kRTCFileLoggerRotatingLogPrefix = "rotating_log";
|
||||
|
||||
@implementation RTCFileLogger {
|
||||
BOOL _hasStarted;
|
||||
NSString *_dirPath;
|
||||
NSUInteger _maxFileSize;
|
||||
std::unique_ptr<rtc::FileRotatingLogSink> _logSink;
|
||||
}
|
||||
|
||||
@synthesize severity = _severity;
|
||||
@synthesize rotationType = _rotationType;
|
||||
@synthesize shouldDisableBuffering = _shouldDisableBuffering;
|
||||
|
||||
- (instancetype)init {
|
||||
NSArray *paths = NSSearchPathForDirectoriesInDomains(
|
||||
NSDocumentDirectory, NSUserDomainMask, YES);
|
||||
NSString *documentsDirPath = [paths firstObject];
|
||||
NSString *defaultDirPath =
|
||||
[documentsDirPath stringByAppendingPathComponent:kDefaultLogDirName];
|
||||
return [self initWithDirPath:defaultDirPath
|
||||
maxFileSize:kDefaultMaxFileSize];
|
||||
}
|
||||
|
||||
- (instancetype)initWithDirPath:(NSString *)dirPath
|
||||
maxFileSize:(NSUInteger)maxFileSize {
|
||||
return [self initWithDirPath:dirPath
|
||||
maxFileSize:maxFileSize
|
||||
rotationType:RTCFileLoggerTypeCall];
|
||||
}
|
||||
|
||||
- (instancetype)initWithDirPath:(NSString *)dirPath
|
||||
maxFileSize:(NSUInteger)maxFileSize
|
||||
rotationType:(RTCFileLoggerRotationType)rotationType {
|
||||
NSParameterAssert(dirPath.length);
|
||||
NSParameterAssert(maxFileSize);
|
||||
if (self = [super init]) {
|
||||
BOOL isDir = NO;
|
||||
NSFileManager *fileManager = [NSFileManager defaultManager];
|
||||
if ([fileManager fileExistsAtPath:dirPath isDirectory:&isDir]) {
|
||||
if (!isDir) {
|
||||
// Bail if something already exists there.
|
||||
return nil;
|
||||
}
|
||||
} else {
|
||||
if (![fileManager createDirectoryAtPath:dirPath
|
||||
withIntermediateDirectories:NO
|
||||
attributes:nil
|
||||
error:nil]) {
|
||||
// Bail if we failed to create a directory.
|
||||
return nil;
|
||||
}
|
||||
}
|
||||
_dirPath = dirPath;
|
||||
_maxFileSize = maxFileSize;
|
||||
_severity = RTCFileLoggerSeverityInfo;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)dealloc {
|
||||
[self stop];
|
||||
}
|
||||
|
||||
- (void)start {
|
||||
if (_hasStarted) {
|
||||
return;
|
||||
}
|
||||
switch (_rotationType) {
|
||||
case RTCFileLoggerTypeApp:
|
||||
_logSink.reset(
|
||||
new rtc::FileRotatingLogSink(_dirPath.UTF8String,
|
||||
kRTCFileLoggerRotatingLogPrefix,
|
||||
_maxFileSize,
|
||||
_maxFileSize / 10));
|
||||
break;
|
||||
case RTCFileLoggerTypeCall:
|
||||
_logSink.reset(
|
||||
new rtc::CallSessionFileRotatingLogSink(_dirPath.UTF8String,
|
||||
_maxFileSize));
|
||||
break;
|
||||
}
|
||||
if (!_logSink->Init()) {
|
||||
LOG(LS_ERROR) << "Failed to open log files at path: "
|
||||
<< _dirPath.UTF8String;
|
||||
_logSink.reset();
|
||||
return;
|
||||
}
|
||||
if (_shouldDisableBuffering) {
|
||||
_logSink->DisableBuffering();
|
||||
}
|
||||
rtc::LogMessage::LogThreads(true);
|
||||
rtc::LogMessage::LogTimestamps(true);
|
||||
rtc::LogMessage::AddLogToStream(_logSink.get(), [self rtcSeverity]);
|
||||
_hasStarted = YES;
|
||||
}
|
||||
|
||||
- (void)stop {
|
||||
if (!_hasStarted) {
|
||||
return;
|
||||
}
|
||||
RTC_DCHECK(_logSink);
|
||||
rtc::LogMessage::RemoveLogToStream(_logSink.get());
|
||||
_hasStarted = NO;
|
||||
_logSink.reset();
|
||||
}
|
||||
|
||||
- (NSData *)logData {
|
||||
if (_hasStarted) {
|
||||
return nil;
|
||||
}
|
||||
NSMutableData* logData = [NSMutableData data];
|
||||
std::unique_ptr<rtc::FileRotatingStream> stream;
|
||||
switch(_rotationType) {
|
||||
case RTCFileLoggerTypeApp:
|
||||
stream.reset(
|
||||
new rtc::FileRotatingStream(_dirPath.UTF8String,
|
||||
kRTCFileLoggerRotatingLogPrefix));
|
||||
break;
|
||||
case RTCFileLoggerTypeCall:
|
||||
stream.reset(new rtc::CallSessionFileRotatingStream(_dirPath.UTF8String));
|
||||
break;
|
||||
}
|
||||
if (!stream->Open()) {
|
||||
return logData;
|
||||
}
|
||||
size_t bufferSize = 0;
|
||||
if (!stream->GetSize(&bufferSize) || bufferSize == 0) {
|
||||
return logData;
|
||||
}
|
||||
size_t read = 0;
|
||||
// Allocate memory using malloc so we can pass it direcly to NSData without
|
||||
// copying.
|
||||
std::unique_ptr<uint8_t[]> buffer(static_cast<uint8_t*>(malloc(bufferSize)));
|
||||
stream->ReadAll(buffer.get(), bufferSize, &read, nullptr);
|
||||
logData = [[NSMutableData alloc] initWithBytesNoCopy:buffer.release()
|
||||
length:read];
|
||||
return logData;
|
||||
}
|
||||
|
||||
#pragma mark - Private
|
||||
|
||||
- (rtc::LoggingSeverity)rtcSeverity {
|
||||
switch (_severity) {
|
||||
case RTCFileLoggerSeverityVerbose:
|
||||
return rtc::LS_VERBOSE;
|
||||
case RTCFileLoggerSeverityInfo:
|
||||
return rtc::LS_INFO;
|
||||
case RTCFileLoggerSeverityWarning:
|
||||
return rtc::LS_WARNING;
|
||||
case RTCFileLoggerSeverityError:
|
||||
return rtc::LS_ERROR;
|
||||
}
|
||||
}
|
||||
|
||||
@end
|
||||
47
sdk/objc/Framework/Classes/Common/RTCLogging.mm
Normal file
47
sdk/objc/Framework/Classes/Common/RTCLogging.mm
Normal file
@ -0,0 +1,47 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "WebRTC/RTCLogging.h"
|
||||
|
||||
#include "webrtc/rtc_base/logging.h"
|
||||
|
||||
rtc::LoggingSeverity RTCGetNativeLoggingSeverity(RTCLoggingSeverity severity) {
|
||||
switch (severity) {
|
||||
case RTCLoggingSeverityVerbose:
|
||||
return rtc::LS_VERBOSE;
|
||||
case RTCLoggingSeverityInfo:
|
||||
return rtc::LS_INFO;
|
||||
case RTCLoggingSeverityWarning:
|
||||
return rtc::LS_WARNING;
|
||||
case RTCLoggingSeverityError:
|
||||
return rtc::LS_ERROR;
|
||||
}
|
||||
}
|
||||
|
||||
void RTCLogEx(RTCLoggingSeverity severity, NSString* log_string) {
|
||||
if (log_string.length) {
|
||||
const char* utf8_string = log_string.UTF8String;
|
||||
LOG_V(RTCGetNativeLoggingSeverity(severity)) << utf8_string;
|
||||
}
|
||||
}
|
||||
|
||||
void RTCSetMinDebugLogLevel(RTCLoggingSeverity severity) {
|
||||
rtc::LogMessage::LogToDebug(RTCGetNativeLoggingSeverity(severity));
|
||||
}
|
||||
|
||||
NSString* RTCFileName(const char* file_path) {
|
||||
NSString* ns_file_path =
|
||||
[[NSString alloc] initWithBytesNoCopy:const_cast<char*>(file_path)
|
||||
length:strlen(file_path)
|
||||
encoding:NSUTF8StringEncoding
|
||||
freeWhenDone:NO];
|
||||
return ns_file_path.lastPathComponent;
|
||||
}
|
||||
|
||||
@ -0,0 +1,24 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#if defined(WEBRTC_IOS)
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
@interface RTCUIApplicationStatusObserver : NSObject
|
||||
|
||||
+ (instancetype)sharedInstance;
|
||||
+ (void)prepareForUse;
|
||||
|
||||
- (BOOL)isApplicationActive;
|
||||
|
||||
@end
|
||||
|
||||
#endif // WEBRTC_IOS
|
||||
@ -0,0 +1,114 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "RTCUIApplicationStatusObserver.h"
|
||||
|
||||
#if defined(WEBRTC_IOS)
|
||||
|
||||
#import <UIKit/UIKit.h>
|
||||
|
||||
#include "webrtc/rtc_base/checks.h"
|
||||
|
||||
@interface RTCUIApplicationStatusObserver ()
|
||||
|
||||
@property(nonatomic, assign) BOOL initialized;
|
||||
@property(nonatomic, assign) UIApplicationState state;
|
||||
|
||||
@end
|
||||
|
||||
@implementation RTCUIApplicationStatusObserver {
|
||||
BOOL _initialized;
|
||||
dispatch_block_t _initializeBlock;
|
||||
dispatch_semaphore_t _waitForInitializeSemaphore;
|
||||
UIApplicationState _state;
|
||||
|
||||
id<NSObject> _activeObserver;
|
||||
id<NSObject> _backgroundObserver;
|
||||
}
|
||||
|
||||
@synthesize initialized = _initialized;
|
||||
@synthesize state = _state;
|
||||
|
||||
+ (instancetype)sharedInstance {
|
||||
static id sharedInstance;
|
||||
static dispatch_once_t onceToken;
|
||||
dispatch_once(&onceToken, ^{
|
||||
sharedInstance = [[self alloc] init];
|
||||
});
|
||||
|
||||
return sharedInstance;
|
||||
}
|
||||
|
||||
// Method to make sure observers are added and the initialization block is
|
||||
// scheduled to run on the main queue.
|
||||
+ (void)prepareForUse {
|
||||
__unused RTCUIApplicationStatusObserver *observer = [self sharedInstance];
|
||||
}
|
||||
|
||||
- (id)init {
|
||||
if (self = [super init]) {
|
||||
NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
|
||||
__weak RTCUIApplicationStatusObserver *weakSelf = self;
|
||||
_activeObserver = [center addObserverForName:UIApplicationDidBecomeActiveNotification
|
||||
object:nil
|
||||
queue:[NSOperationQueue mainQueue]
|
||||
usingBlock:^(NSNotification *note) {
|
||||
weakSelf.state =
|
||||
[UIApplication sharedApplication].applicationState;
|
||||
}];
|
||||
|
||||
_backgroundObserver = [center addObserverForName:UIApplicationDidEnterBackgroundNotification
|
||||
object:nil
|
||||
queue:[NSOperationQueue mainQueue]
|
||||
usingBlock:^(NSNotification *note) {
|
||||
weakSelf.state =
|
||||
[UIApplication sharedApplication].applicationState;
|
||||
}];
|
||||
|
||||
_waitForInitializeSemaphore = dispatch_semaphore_create(1);
|
||||
_initialized = NO;
|
||||
_initializeBlock = dispatch_block_create(DISPATCH_BLOCK_INHERIT_QOS_CLASS, ^{
|
||||
weakSelf.state = [UIApplication sharedApplication].applicationState;
|
||||
weakSelf.initialized = YES;
|
||||
});
|
||||
|
||||
dispatch_async(dispatch_get_main_queue(), _initializeBlock);
|
||||
}
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)dealloc {
|
||||
NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
|
||||
[center removeObserver:_activeObserver];
|
||||
[center removeObserver:_backgroundObserver];
|
||||
}
|
||||
|
||||
- (BOOL)isApplicationActive {
|
||||
// NOTE: The function `dispatch_block_wait` can only legally be called once.
|
||||
// Because of this, if several threads call the `isApplicationActive` method before
|
||||
// the `_initializeBlock` has been executed, instead of multiple threads calling
|
||||
// `dispatch_block_wait`, the other threads need to wait for the first waiting thread
|
||||
// instead.
|
||||
if (!_initialized) {
|
||||
dispatch_semaphore_wait(_waitForInitializeSemaphore, DISPATCH_TIME_FOREVER);
|
||||
if (!_initialized) {
|
||||
long ret = dispatch_block_wait(_initializeBlock,
|
||||
dispatch_time(DISPATCH_TIME_NOW, 10.0 * NSEC_PER_SEC));
|
||||
RTC_DCHECK_EQ(ret, 0);
|
||||
}
|
||||
dispatch_semaphore_signal(_waitForInitializeSemaphore);
|
||||
}
|
||||
return _state == UIApplicationStateActive;
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
#endif // WEBRTC_IOS
|
||||
176
sdk/objc/Framework/Classes/Common/UIDevice+RTCDevice.mm
Normal file
176
sdk/objc/Framework/Classes/Common/UIDevice+RTCDevice.mm
Normal file
@ -0,0 +1,176 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "WebRTC/UIDevice+RTCDevice.h"
|
||||
|
||||
#include <memory>
|
||||
#include <sys/sysctl.h>
|
||||
|
||||
@implementation UIDevice (RTCDevice)
|
||||
|
||||
+ (RTCDeviceType)deviceType {
|
||||
NSDictionary *machineNameToType = @{
|
||||
@"iPhone1,1": @(RTCDeviceTypeIPhone1G),
|
||||
@"iPhone1,2": @(RTCDeviceTypeIPhone3G),
|
||||
@"iPhone2,1": @(RTCDeviceTypeIPhone3GS),
|
||||
@"iPhone3,1": @(RTCDeviceTypeIPhone4),
|
||||
@"iPhone3,3": @(RTCDeviceTypeIPhone4Verizon),
|
||||
@"iPhone4,1": @(RTCDeviceTypeIPhone4S),
|
||||
@"iPhone5,1": @(RTCDeviceTypeIPhone5GSM),
|
||||
@"iPhone5,2": @(RTCDeviceTypeIPhone5GSM_CDMA),
|
||||
@"iPhone5,3": @(RTCDeviceTypeIPhone5CGSM),
|
||||
@"iPhone5,4": @(RTCDeviceTypeIPhone5CGSM_CDMA),
|
||||
@"iPhone6,1": @(RTCDeviceTypeIPhone5SGSM),
|
||||
@"iPhone6,2": @(RTCDeviceTypeIPhone5SGSM_CDMA),
|
||||
@"iPhone7,1": @(RTCDeviceTypeIPhone6Plus),
|
||||
@"iPhone7,2": @(RTCDeviceTypeIPhone6),
|
||||
@"iPhone8,1": @(RTCDeviceTypeIPhone6S),
|
||||
@"iPhone8,2": @(RTCDeviceTypeIPhone6SPlus),
|
||||
@"iPod1,1": @(RTCDeviceTypeIPodTouch1G),
|
||||
@"iPod2,1": @(RTCDeviceTypeIPodTouch2G),
|
||||
@"iPod3,1": @(RTCDeviceTypeIPodTouch3G),
|
||||
@"iPod4,1": @(RTCDeviceTypeIPodTouch4G),
|
||||
@"iPod5,1": @(RTCDeviceTypeIPodTouch5G),
|
||||
@"iPad1,1": @(RTCDeviceTypeIPad),
|
||||
@"iPad2,1": @(RTCDeviceTypeIPad2Wifi),
|
||||
@"iPad2,2": @(RTCDeviceTypeIPad2GSM),
|
||||
@"iPad2,3": @(RTCDeviceTypeIPad2CDMA),
|
||||
@"iPad2,4": @(RTCDeviceTypeIPad2Wifi2),
|
||||
@"iPad2,5": @(RTCDeviceTypeIPadMiniWifi),
|
||||
@"iPad2,6": @(RTCDeviceTypeIPadMiniGSM),
|
||||
@"iPad2,7": @(RTCDeviceTypeIPadMiniGSM_CDMA),
|
||||
@"iPad3,1": @(RTCDeviceTypeIPad3Wifi),
|
||||
@"iPad3,2": @(RTCDeviceTypeIPad3GSM_CDMA),
|
||||
@"iPad3,3": @(RTCDeviceTypeIPad3GSM),
|
||||
@"iPad3,4": @(RTCDeviceTypeIPad4Wifi),
|
||||
@"iPad3,5": @(RTCDeviceTypeIPad4GSM),
|
||||
@"iPad3,6": @(RTCDeviceTypeIPad4GSM_CDMA),
|
||||
@"iPad4,1": @(RTCDeviceTypeIPadAirWifi),
|
||||
@"iPad4,2": @(RTCDeviceTypeIPadAirCellular),
|
||||
@"iPad4,4": @(RTCDeviceTypeIPadMini2GWifi),
|
||||
@"iPad4,5": @(RTCDeviceTypeIPadMini2GCellular),
|
||||
@"i386": @(RTCDeviceTypeSimulatori386),
|
||||
@"x86_64": @(RTCDeviceTypeSimulatorx86_64),
|
||||
};
|
||||
|
||||
size_t size = 0;
|
||||
sysctlbyname("hw.machine", NULL, &size, NULL, 0);
|
||||
std::unique_ptr<char[]> machine;
|
||||
machine.reset(new char[size]);
|
||||
sysctlbyname("hw.machine", machine.get(), &size, NULL, 0);
|
||||
NSString *machineName = [[NSString alloc] initWithCString:machine.get()
|
||||
encoding:NSUTF8StringEncoding];
|
||||
RTCDeviceType deviceType = RTCDeviceTypeUnknown;
|
||||
NSNumber *typeNumber = machineNameToType[machineName];
|
||||
if (typeNumber) {
|
||||
deviceType = static_cast<RTCDeviceType>(typeNumber.integerValue);
|
||||
}
|
||||
return deviceType;
|
||||
}
|
||||
|
||||
+ (NSString *)stringForDeviceType:(RTCDeviceType)deviceType {
|
||||
switch (deviceType) {
|
||||
case RTCDeviceTypeUnknown:
|
||||
return @"Unknown";
|
||||
case RTCDeviceTypeIPhone1G:
|
||||
return @"iPhone 1G";
|
||||
case RTCDeviceTypeIPhone3G:
|
||||
return @"iPhone 3G";
|
||||
case RTCDeviceTypeIPhone3GS:
|
||||
return @"iPhone 3GS";
|
||||
case RTCDeviceTypeIPhone4:
|
||||
return @"iPhone 4";
|
||||
case RTCDeviceTypeIPhone4Verizon:
|
||||
return @"iPhone 4 Verizon";
|
||||
case RTCDeviceTypeIPhone4S:
|
||||
return @"iPhone 4S";
|
||||
case RTCDeviceTypeIPhone5GSM:
|
||||
return @"iPhone 5 (GSM)";
|
||||
case RTCDeviceTypeIPhone5GSM_CDMA:
|
||||
return @"iPhone 5 (GSM+CDMA)";
|
||||
case RTCDeviceTypeIPhone5CGSM:
|
||||
return @"iPhone 5C (GSM)";
|
||||
case RTCDeviceTypeIPhone5CGSM_CDMA:
|
||||
return @"iPhone 5C (GSM+CDMA)";
|
||||
case RTCDeviceTypeIPhone5SGSM:
|
||||
return @"iPhone 5S (GSM)";
|
||||
case RTCDeviceTypeIPhone5SGSM_CDMA:
|
||||
return @"iPhone 5S (GSM+CDMA)";
|
||||
case RTCDeviceTypeIPhone6Plus:
|
||||
return @"iPhone 6 Plus";
|
||||
case RTCDeviceTypeIPhone6:
|
||||
return @"iPhone 6";
|
||||
case RTCDeviceTypeIPhone6S:
|
||||
return @"iPhone 6S";
|
||||
case RTCDeviceTypeIPhone6SPlus:
|
||||
return @"iPhone 6S Plus";
|
||||
case RTCDeviceTypeIPodTouch1G:
|
||||
return @"iPod Touch 1G";
|
||||
case RTCDeviceTypeIPodTouch2G:
|
||||
return @"iPod Touch 2G";
|
||||
case RTCDeviceTypeIPodTouch3G:
|
||||
return @"iPod Touch 3G";
|
||||
case RTCDeviceTypeIPodTouch4G:
|
||||
return @"iPod Touch 4G";
|
||||
case RTCDeviceTypeIPodTouch5G:
|
||||
return @"iPod Touch 5G";
|
||||
case RTCDeviceTypeIPad:
|
||||
return @"iPad";
|
||||
case RTCDeviceTypeIPad2Wifi:
|
||||
return @"iPad 2 (WiFi)";
|
||||
case RTCDeviceTypeIPad2GSM:
|
||||
return @"iPad 2 (GSM)";
|
||||
case RTCDeviceTypeIPad2CDMA:
|
||||
return @"iPad 2 (CDMA)";
|
||||
case RTCDeviceTypeIPad2Wifi2:
|
||||
return @"iPad 2 (WiFi) 2";
|
||||
case RTCDeviceTypeIPadMiniWifi:
|
||||
return @"iPad Mini (WiFi)";
|
||||
case RTCDeviceTypeIPadMiniGSM:
|
||||
return @"iPad Mini (GSM)";
|
||||
case RTCDeviceTypeIPadMiniGSM_CDMA:
|
||||
return @"iPad Mini (GSM+CDMA)";
|
||||
case RTCDeviceTypeIPad3Wifi:
|
||||
return @"iPad 3 (WiFi)";
|
||||
case RTCDeviceTypeIPad3GSM_CDMA:
|
||||
return @"iPad 3 (GSM+CDMA)";
|
||||
case RTCDeviceTypeIPad3GSM:
|
||||
return @"iPad 3 (GSM)";
|
||||
case RTCDeviceTypeIPad4Wifi:
|
||||
return @"iPad 4 (WiFi)";
|
||||
case RTCDeviceTypeIPad4GSM:
|
||||
return @"iPad 4 (GSM)";
|
||||
case RTCDeviceTypeIPad4GSM_CDMA:
|
||||
return @"iPad 4 (GSM+CDMA)";
|
||||
case RTCDeviceTypeIPadAirWifi:
|
||||
return @"iPad Air (WiFi)";
|
||||
case RTCDeviceTypeIPadAirCellular:
|
||||
return @"iPad Air (Cellular)";
|
||||
case RTCDeviceTypeIPadMini2GWifi:
|
||||
return @"iPad Mini 2G (Wifi)";
|
||||
case RTCDeviceTypeIPadMini2GCellular:
|
||||
return @"iPad Mini 2G (Cellular)";
|
||||
case RTCDeviceTypeSimulatori386:
|
||||
return @"i386 Simulator";
|
||||
case RTCDeviceTypeSimulatorx86_64:
|
||||
return @"x86_64 Simulator";
|
||||
}
|
||||
return @"Unknown";
|
||||
}
|
||||
|
||||
+ (double)currentDeviceSystemVersion {
|
||||
return [self currentDevice].systemVersion.doubleValue;
|
||||
}
|
||||
|
||||
+ (BOOL)isIOS9OrLater {
|
||||
return [self currentDeviceSystemVersion] >= 9.0;
|
||||
}
|
||||
|
||||
@end
|
||||
76
sdk/objc/Framework/Classes/Common/helpers.h
Normal file
76
sdk/objc/Framework/Classes/Common/helpers.h
Normal file
@ -0,0 +1,76 @@
|
||||
/*
|
||||
* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_SDK_OBJC_FRAMEWORK_CLASSES_COMMON_HELPERS_H_
|
||||
#define WEBRTC_SDK_OBJC_FRAMEWORK_CLASSES_COMMON_HELPERS_H_
|
||||
|
||||
#include <string>
|
||||
|
||||
namespace webrtc {
|
||||
namespace ios {
|
||||
|
||||
bool CheckAndLogError(BOOL success, NSError* error);
|
||||
|
||||
NSString *NSStringFromStdString(const std::string& stdString);
|
||||
std::string StdStringFromNSString(NSString* nsString);
|
||||
|
||||
// Return thread ID as a string.
|
||||
std::string GetThreadId();
|
||||
|
||||
// Return thread ID as string suitable for debug logging.
|
||||
std::string GetThreadInfo();
|
||||
|
||||
// Returns [NSThread currentThread] description as string.
|
||||
// Example: <NSThread: 0x170066d80>{number = 1, name = main}
|
||||
std::string GetCurrentThreadDescription();
|
||||
|
||||
#if defined(WEBRTC_IOS)
|
||||
// Returns the current name of the operating system.
|
||||
std::string GetSystemName();
|
||||
|
||||
// Returns the current version of the operating system as a string.
|
||||
std::string GetSystemVersionAsString();
|
||||
|
||||
// Returns the version of the operating system in double representation.
|
||||
// Uses a cached value of the system version.
|
||||
double GetSystemVersion();
|
||||
|
||||
// Returns the device type.
|
||||
// Examples: ”iPhone” and ”iPod touch”.
|
||||
std::string GetDeviceType();
|
||||
#endif // defined(WEBRTC_IOS)
|
||||
|
||||
// Returns a more detailed device name.
|
||||
// Examples: "iPhone 5s (GSM)" and "iPhone 6 Plus".
|
||||
std::string GetDeviceName();
|
||||
|
||||
// Returns the name of the process. Does not uniquely identify the process.
|
||||
std::string GetProcessName();
|
||||
|
||||
// Returns the identifier of the process (often called process ID).
|
||||
int GetProcessID();
|
||||
|
||||
// Returns a string containing the version of the operating system on which the
|
||||
// process is executing. The string is string is human readable, localized, and
|
||||
// is appropriate for displaying to the user.
|
||||
std::string GetOSVersionString();
|
||||
|
||||
// Returns the number of processing cores available on the device.
|
||||
int GetProcessorCount();
|
||||
|
||||
#if defined(WEBRTC_IOS)
|
||||
// Indicates whether Low Power Mode is enabled on the iOS device.
|
||||
bool GetLowPowerModeEnabled();
|
||||
#endif
|
||||
|
||||
} // namespace ios
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // WEBRTC_SDK_OBJC_FRAMEWORK_CLASSES_COMMON_HELPERS_H_
|
||||
135
sdk/objc/Framework/Classes/Common/helpers.mm
Normal file
135
sdk/objc/Framework/Classes/Common/helpers.mm
Normal file
@ -0,0 +1,135 @@
|
||||
/*
|
||||
* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
#import <sys/sysctl.h>
|
||||
#if defined(WEBRTC_IOS)
|
||||
#import <UIKit/UIKit.h>
|
||||
#endif
|
||||
|
||||
#include <memory>
|
||||
|
||||
#include "webrtc/rtc_base/checks.h"
|
||||
#include "webrtc/rtc_base/logging.h"
|
||||
#include "webrtc/sdk/objc/Framework/Classes/Common/helpers.h"
|
||||
|
||||
namespace webrtc {
|
||||
namespace ios {
|
||||
|
||||
#if defined(WEBRTC_IOS)
|
||||
bool isOperatingSystemAtLeastVersion(double version) {
|
||||
return GetSystemVersion() >= version;
|
||||
}
|
||||
#endif
|
||||
|
||||
NSString* NSStringFromStdString(const std::string& stdString) {
|
||||
// std::string may contain null termination character so we construct
|
||||
// using length.
|
||||
return [[NSString alloc] initWithBytes:stdString.data()
|
||||
length:stdString.length()
|
||||
encoding:NSUTF8StringEncoding];
|
||||
}
|
||||
|
||||
std::string StdStringFromNSString(NSString* nsString) {
|
||||
NSData* charData = [nsString dataUsingEncoding:NSUTF8StringEncoding];
|
||||
return std::string(reinterpret_cast<const char*>([charData bytes]),
|
||||
[charData length]);
|
||||
}
|
||||
|
||||
bool CheckAndLogError(BOOL success, NSError* error) {
|
||||
if (!success) {
|
||||
NSString* msg =
|
||||
[NSString stringWithFormat:@"Error: %ld, %@, %@", (long)error.code,
|
||||
error.localizedDescription,
|
||||
error.localizedFailureReason];
|
||||
LOG(LS_ERROR) << StdStringFromNSString(msg);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
// TODO(henrika): see if it is possible to move to GetThreadName in
|
||||
// platform_thread.h and base it on pthread methods instead.
|
||||
std::string GetCurrentThreadDescription() {
|
||||
NSString* name = [NSString stringWithFormat:@"%@", [NSThread currentThread]];
|
||||
return StdStringFromNSString(name);
|
||||
}
|
||||
|
||||
#if defined(WEBRTC_IOS)
|
||||
std::string GetSystemName() {
|
||||
NSString* osName = [[UIDevice currentDevice] systemName];
|
||||
return StdStringFromNSString(osName);
|
||||
}
|
||||
|
||||
std::string GetSystemVersionAsString() {
|
||||
NSString* osVersion = [[UIDevice currentDevice] systemVersion];
|
||||
return StdStringFromNSString(osVersion);
|
||||
}
|
||||
|
||||
double GetSystemVersion() {
|
||||
static dispatch_once_t once_token;
|
||||
static double system_version;
|
||||
dispatch_once(&once_token, ^{
|
||||
system_version = [UIDevice currentDevice].systemVersion.doubleValue;
|
||||
});
|
||||
return system_version;
|
||||
}
|
||||
|
||||
std::string GetDeviceType() {
|
||||
NSString* deviceModel = [[UIDevice currentDevice] model];
|
||||
return StdStringFromNSString(deviceModel);
|
||||
}
|
||||
#endif
|
||||
|
||||
std::string GetDeviceName() {
|
||||
size_t size;
|
||||
sysctlbyname("hw.machine", NULL, &size, NULL, 0);
|
||||
std::unique_ptr<char[]> machine;
|
||||
machine.reset(new char[size]);
|
||||
sysctlbyname("hw.machine", machine.get(), &size, NULL, 0);
|
||||
return std::string(machine.get());
|
||||
}
|
||||
|
||||
std::string GetProcessName() {
|
||||
NSString* processName = [NSProcessInfo processInfo].processName;
|
||||
return StdStringFromNSString(processName);
|
||||
}
|
||||
|
||||
int GetProcessID() {
|
||||
return [NSProcessInfo processInfo].processIdentifier;
|
||||
}
|
||||
|
||||
std::string GetOSVersionString() {
|
||||
NSString* osVersion =
|
||||
[NSProcessInfo processInfo].operatingSystemVersionString;
|
||||
return StdStringFromNSString(osVersion);
|
||||
}
|
||||
|
||||
int GetProcessorCount() {
|
||||
return [NSProcessInfo processInfo].processorCount;
|
||||
}
|
||||
|
||||
#if defined(__IPHONE_9_0) && defined(__IPHONE_OS_VERSION_MAX_ALLOWED) \
|
||||
&& __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_9_0
|
||||
bool GetLowPowerModeEnabled() {
|
||||
if (isOperatingSystemAtLeastVersion(9.0)) {
|
||||
// lowPoweredModeEnabled is only available on iOS9+.
|
||||
return [NSProcessInfo processInfo].lowPowerModeEnabled;
|
||||
}
|
||||
LOG(LS_WARNING) << "webrtc::ios::GetLowPowerModeEnabled() is not "
|
||||
"supported. Requires at least iOS 9.0";
|
||||
return false;
|
||||
}
|
||||
#endif
|
||||
|
||||
} // namespace ios
|
||||
} // namespace webrtc
|
||||
|
||||
17
sdk/objc/Framework/Classes/Metal/RTCMTLI420Renderer.h
Normal file
17
sdk/objc/Framework/Classes/Metal/RTCMTLI420Renderer.h
Normal file
@ -0,0 +1,17 @@
|
||||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
#import "RTCMTLRenderer.h"
|
||||
|
||||
NS_AVAILABLE(10_11, 9_0)
|
||||
@interface RTCMTLI420Renderer : RTCMTLRenderer
|
||||
@end
|
||||
152
sdk/objc/Framework/Classes/Metal/RTCMTLI420Renderer.mm
Normal file
152
sdk/objc/Framework/Classes/Metal/RTCMTLI420Renderer.mm
Normal file
@ -0,0 +1,152 @@
|
||||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "RTCMTLI420Renderer.h"
|
||||
#import "WebRTC/RTCVideoFrameBuffer.h"
|
||||
|
||||
#import <Metal/Metal.h>
|
||||
#import <MetalKit/MetalKit.h>
|
||||
|
||||
#import "WebRTC/RTCLogging.h"
|
||||
#import "WebRTC/RTCVideoFrame.h"
|
||||
|
||||
#import "RTCMTLRenderer+Private.h"
|
||||
|
||||
#define MTL_STRINGIFY(s) @ #s
|
||||
|
||||
static NSString *const shaderSource = MTL_STRINGIFY(
|
||||
using namespace metal; typedef struct {
|
||||
packed_float2 position;
|
||||
packed_float2 texcoord;
|
||||
} Vertex;
|
||||
|
||||
typedef struct {
|
||||
float4 position[[position]];
|
||||
float2 texcoord;
|
||||
} Varyings;
|
||||
|
||||
vertex Varyings vertexPassthrough(device Vertex * verticies[[buffer(0)]],
|
||||
unsigned int vid[[vertex_id]]) {
|
||||
Varyings out;
|
||||
device Vertex &v = verticies[vid];
|
||||
out.position = float4(float2(v.position), 0.0, 1.0);
|
||||
out.texcoord = v.texcoord;
|
||||
|
||||
return out;
|
||||
}
|
||||
|
||||
fragment half4 fragmentColorConversion(
|
||||
Varyings in[[stage_in]], texture2d<float, access::sample> textureY[[texture(0)]],
|
||||
texture2d<float, access::sample> textureU[[texture(1)]],
|
||||
texture2d<float, access::sample> textureV[[texture(2)]]) {
|
||||
constexpr sampler s(address::clamp_to_edge, filter::linear);
|
||||
float y;
|
||||
float u;
|
||||
float v;
|
||||
float r;
|
||||
float g;
|
||||
float b;
|
||||
// Conversion for YUV to rgb from http://www.fourcc.org/fccyvrgb.php
|
||||
y = textureY.sample(s, in.texcoord).r;
|
||||
u = textureU.sample(s, in.texcoord).r;
|
||||
v = textureV.sample(s, in.texcoord).r;
|
||||
u = u - 0.5;
|
||||
v = v - 0.5;
|
||||
r = y + 1.403 * v;
|
||||
g = y - 0.344 * u - 0.714 * v;
|
||||
b = y + 1.770 * u;
|
||||
|
||||
float4 out = float4(r, g, b, 1.0);
|
||||
|
||||
return half4(out);
|
||||
});
|
||||
|
||||
@implementation RTCMTLI420Renderer {
|
||||
// Textures.
|
||||
id<MTLTexture> _yTexture;
|
||||
id<MTLTexture> _uTexture;
|
||||
id<MTLTexture> _vTexture;
|
||||
|
||||
MTLTextureDescriptor *_descriptor;
|
||||
MTLTextureDescriptor *_chromaDescriptor;
|
||||
|
||||
int _width;
|
||||
int _height;
|
||||
int _chromaWidth;
|
||||
int _chromaHeight;
|
||||
}
|
||||
|
||||
#pragma mark - Virtual
|
||||
|
||||
- (NSString *)shaderSource {
|
||||
return shaderSource;
|
||||
}
|
||||
|
||||
- (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame {
|
||||
[super setupTexturesForFrame:frame];
|
||||
|
||||
id<MTLDevice> device = [self currentMetalDevice];
|
||||
if (!device) {
|
||||
return NO;
|
||||
}
|
||||
|
||||
id<RTCI420Buffer> buffer = [frame.buffer toI420];
|
||||
|
||||
// Luma (y) texture.
|
||||
if (!_descriptor || (_width != frame.width && _height != frame.height)) {
|
||||
_width = frame.width;
|
||||
_height = frame.height;
|
||||
_descriptor = [MTLTextureDescriptor texture2DDescriptorWithPixelFormat:MTLPixelFormatR8Unorm
|
||||
width:_width
|
||||
height:_height
|
||||
mipmapped:NO];
|
||||
_descriptor.usage = MTLTextureUsageShaderRead;
|
||||
_yTexture = [device newTextureWithDescriptor:_descriptor];
|
||||
}
|
||||
|
||||
// Chroma (u,v) textures
|
||||
[_yTexture replaceRegion:MTLRegionMake2D(0, 0, _width, _height)
|
||||
mipmapLevel:0
|
||||
withBytes:buffer.dataY
|
||||
bytesPerRow:buffer.strideY];
|
||||
|
||||
if (!_chromaDescriptor ||
|
||||
(_chromaWidth != frame.width / 2 && _chromaHeight != frame.height / 2)) {
|
||||
_chromaWidth = frame.width / 2;
|
||||
_chromaHeight = frame.height / 2;
|
||||
_chromaDescriptor =
|
||||
[MTLTextureDescriptor texture2DDescriptorWithPixelFormat:MTLPixelFormatR8Unorm
|
||||
width:_chromaWidth
|
||||
height:_chromaHeight
|
||||
mipmapped:NO];
|
||||
_chromaDescriptor.usage = MTLTextureUsageShaderRead;
|
||||
_uTexture = [device newTextureWithDescriptor:_chromaDescriptor];
|
||||
_vTexture = [device newTextureWithDescriptor:_chromaDescriptor];
|
||||
}
|
||||
|
||||
[_uTexture replaceRegion:MTLRegionMake2D(0, 0, _chromaWidth, _chromaHeight)
|
||||
mipmapLevel:0
|
||||
withBytes:buffer.dataU
|
||||
bytesPerRow:buffer.strideU];
|
||||
[_vTexture replaceRegion:MTLRegionMake2D(0, 0, _chromaWidth, _chromaHeight)
|
||||
mipmapLevel:0
|
||||
withBytes:buffer.dataV
|
||||
bytesPerRow:buffer.strideV];
|
||||
|
||||
return (_uTexture != nil) && (_yTexture != nil) && (_vTexture != nil);
|
||||
}
|
||||
|
||||
- (void)uploadTexturesToRenderEncoder:(id<MTLRenderCommandEncoder>)renderEncoder {
|
||||
[renderEncoder setFragmentTexture:_yTexture atIndex:0];
|
||||
[renderEncoder setFragmentTexture:_uTexture atIndex:1];
|
||||
[renderEncoder setFragmentTexture:_vTexture atIndex:2];
|
||||
}
|
||||
|
||||
@end
|
||||
118
sdk/objc/Framework/Classes/Metal/RTCMTLNSVideoView.m
Normal file
118
sdk/objc/Framework/Classes/Metal/RTCMTLNSVideoView.m
Normal file
@ -0,0 +1,118 @@
|
||||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "WebRTC/RTCMTLNSVideoView.h"
|
||||
|
||||
#import <Metal/Metal.h>
|
||||
#import <MetalKit/MetalKit.h>
|
||||
|
||||
#import "WebRTC/RTCVideoFrame.h"
|
||||
|
||||
#import "RTCMTLI420Renderer.h"
|
||||
|
||||
@interface RTCMTLNSVideoView ()<MTKViewDelegate>
|
||||
@property(nonatomic) id<RTCMTLRenderer> renderer;
|
||||
@property(nonatomic, strong) MTKView *metalView;
|
||||
@property(atomic, strong) RTCVideoFrame *videoFrame;
|
||||
@end
|
||||
|
||||
@implementation RTCMTLNSVideoView {
|
||||
id<RTCMTLRenderer> _renderer;
|
||||
}
|
||||
|
||||
@synthesize renderer = _renderer;
|
||||
@synthesize metalView = _metalView;
|
||||
@synthesize videoFrame = _videoFrame;
|
||||
|
||||
- (instancetype)initWithFrame:(CGRect)frameRect {
|
||||
self = [super initWithFrame:frameRect];
|
||||
if (self) {
|
||||
[self configure];
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (instancetype)initWithCoder:(NSCoder *)aCoder {
|
||||
self = [super initWithCoder:aCoder];
|
||||
if (self) {
|
||||
[self configure];
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
#pragma mark - Private
|
||||
|
||||
+ (BOOL)isMetalAvailable {
|
||||
return [MTLCopyAllDevices() count] > 0;
|
||||
}
|
||||
|
||||
- (void)configure {
|
||||
if ([[self class] isMetalAvailable]) {
|
||||
_metalView = [[MTKView alloc] initWithFrame:self.bounds];
|
||||
[self addSubview:_metalView];
|
||||
_metalView.layerContentsPlacement = NSViewLayerContentsPlacementScaleProportionallyToFit;
|
||||
_metalView.translatesAutoresizingMaskIntoConstraints = NO;
|
||||
_metalView.framebufferOnly = YES;
|
||||
_metalView.delegate = self;
|
||||
|
||||
_renderer = [[RTCMTLI420Renderer alloc] init];
|
||||
if (![(RTCMTLI420Renderer *)_renderer addRenderingDestination:_metalView]) {
|
||||
_renderer = nil;
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
- (void)updateConstraints {
|
||||
NSDictionary *views = NSDictionaryOfVariableBindings(_metalView);
|
||||
|
||||
NSArray *constraintsHorizontal =
|
||||
[NSLayoutConstraint constraintsWithVisualFormat:@"H:|-0-[_metalView]-0-|"
|
||||
options:0
|
||||
metrics:nil
|
||||
views:views];
|
||||
[self addConstraints:constraintsHorizontal];
|
||||
|
||||
NSArray *constraintsVertical =
|
||||
[NSLayoutConstraint constraintsWithVisualFormat:@"V:|-0-[_metalView]-0-|"
|
||||
options:0
|
||||
metrics:nil
|
||||
views:views];
|
||||
[self addConstraints:constraintsVertical];
|
||||
[super updateConstraints];
|
||||
}
|
||||
|
||||
#pragma mark - MTKViewDelegate methods
|
||||
- (void)drawInMTKView:(nonnull MTKView *)view {
|
||||
if (self.videoFrame == nil) {
|
||||
return;
|
||||
}
|
||||
if (view == self.metalView) {
|
||||
[_renderer drawFrame:self.videoFrame];
|
||||
}
|
||||
}
|
||||
|
||||
- (void)mtkView:(MTKView *)view drawableSizeWillChange:(CGSize)size {
|
||||
}
|
||||
|
||||
#pragma mark - RTCVideoRenderer
|
||||
|
||||
- (void)setSize:(CGSize)size {
|
||||
_metalView.drawableSize = size;
|
||||
[_metalView draw];
|
||||
}
|
||||
|
||||
- (void)renderFrame:(nullable RTCVideoFrame *)frame {
|
||||
if (frame == nil) {
|
||||
return;
|
||||
}
|
||||
self.videoFrame = [frame newI420VideoFrame];
|
||||
}
|
||||
|
||||
@end
|
||||
18
sdk/objc/Framework/Classes/Metal/RTCMTLNV12Renderer.h
Normal file
18
sdk/objc/Framework/Classes/Metal/RTCMTLNV12Renderer.h
Normal file
@ -0,0 +1,18 @@
|
||||
/*
|
||||
* Copyright 2017 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
#import "RTCMTLRenderer.h"
|
||||
|
||||
NS_AVAILABLE(10_11, 9_0)
|
||||
@interface RTCMTLNV12Renderer : RTCMTLRenderer
|
||||
|
||||
@end
|
||||
135
sdk/objc/Framework/Classes/Metal/RTCMTLNV12Renderer.mm
Normal file
135
sdk/objc/Framework/Classes/Metal/RTCMTLNV12Renderer.mm
Normal file
@ -0,0 +1,135 @@
|
||||
/*
|
||||
* Copyright 2017 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "RTCMTLNV12Renderer.h"
|
||||
|
||||
#import <Metal/Metal.h>
|
||||
#import <MetalKit/MetalKit.h>
|
||||
|
||||
#import "WebRTC/RTCLogging.h"
|
||||
#import "WebRTC/RTCVideoFrame.h"
|
||||
#import "WebRTC/RTCVideoFrameBuffer.h"
|
||||
|
||||
#import "RTCMTLRenderer+Private.h"
|
||||
|
||||
#define MTL_STRINGIFY(s) @ #s
|
||||
|
||||
static NSString *const shaderSource = MTL_STRINGIFY(
|
||||
using namespace metal; typedef struct {
|
||||
packed_float2 position;
|
||||
packed_float2 texcoord;
|
||||
} Vertex;
|
||||
|
||||
typedef struct {
|
||||
float4 position[[position]];
|
||||
float2 texcoord;
|
||||
} Varyings;
|
||||
|
||||
vertex Varyings vertexPassthrough(device Vertex * verticies[[buffer(0)]],
|
||||
unsigned int vid[[vertex_id]]) {
|
||||
Varyings out;
|
||||
device Vertex &v = verticies[vid];
|
||||
out.position = float4(float2(v.position), 0.0, 1.0);
|
||||
out.texcoord = v.texcoord;
|
||||
return out;
|
||||
}
|
||||
|
||||
// Receiving YCrCb textures.
|
||||
fragment half4 fragmentColorConversion(
|
||||
Varyings in[[stage_in]], texture2d<float, access::sample> textureY[[texture(0)]],
|
||||
texture2d<float, access::sample> textureCbCr[[texture(1)]]) {
|
||||
constexpr sampler s(address::clamp_to_edge, filter::linear);
|
||||
float y;
|
||||
float2 uv;
|
||||
y = textureY.sample(s, in.texcoord).r;
|
||||
uv = textureCbCr.sample(s, in.texcoord).rg - float2(0.5, 0.5);
|
||||
|
||||
// Conversion for YUV to rgb from http://www.fourcc.org/fccyvrgb.php
|
||||
float4 out = float4(y + 1.403 * uv.y, y - 0.344 * uv.x - 0.714 * uv.y, y + 1.770 * uv.x, 1.0);
|
||||
|
||||
return half4(out);
|
||||
});
|
||||
|
||||
@implementation RTCMTLNV12Renderer {
|
||||
// Textures.
|
||||
CVMetalTextureCacheRef _textureCache;
|
||||
id<MTLTexture> _yTexture;
|
||||
id<MTLTexture> _CrCbTexture;
|
||||
}
|
||||
|
||||
- (BOOL)addRenderingDestination:(__kindof MTKView *)view {
|
||||
if ([super addRenderingDestination:view]) {
|
||||
[self initializeTextureCache];
|
||||
return YES;
|
||||
}
|
||||
return NO;
|
||||
}
|
||||
|
||||
- (void)initializeTextureCache {
|
||||
CVReturn status = CVMetalTextureCacheCreate(kCFAllocatorDefault, nil, [self currentMetalDevice],
|
||||
nil, &_textureCache);
|
||||
if (status != kCVReturnSuccess) {
|
||||
RTCLogError(@"Metal: Failed to initialize metal texture cache. Return status is %d", status);
|
||||
}
|
||||
}
|
||||
|
||||
- (NSString *)shaderSource {
|
||||
return shaderSource;
|
||||
}
|
||||
|
||||
- (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame {
|
||||
[super setupTexturesForFrame:frame];
|
||||
CVPixelBufferRef pixelBuffer = ((RTCCVPixelBuffer *)frame.buffer).pixelBuffer;
|
||||
|
||||
id<MTLTexture> lumaTexture = nil;
|
||||
id<MTLTexture> chromaTexture = nil;
|
||||
CVMetalTextureRef outTexture = nullptr;
|
||||
|
||||
// Luma (y) texture.
|
||||
int lumaWidth = CVPixelBufferGetWidthOfPlane(pixelBuffer, 0);
|
||||
int lumaHeight = CVPixelBufferGetHeightOfPlane(pixelBuffer, 0);
|
||||
|
||||
int indexPlane = 0;
|
||||
CVReturn result = CVMetalTextureCacheCreateTextureFromImage(
|
||||
kCFAllocatorDefault, _textureCache, pixelBuffer, nil, MTLPixelFormatR8Unorm, lumaWidth,
|
||||
lumaHeight, indexPlane, &outTexture);
|
||||
|
||||
if (result == kCVReturnSuccess) {
|
||||
lumaTexture = CVMetalTextureGetTexture(outTexture);
|
||||
}
|
||||
|
||||
// Same as CFRelease except it can be passed NULL without crashing.
|
||||
CVBufferRelease(outTexture);
|
||||
outTexture = nullptr;
|
||||
|
||||
// Chroma (CrCb) texture.
|
||||
indexPlane = 1;
|
||||
result = CVMetalTextureCacheCreateTextureFromImage(
|
||||
kCFAllocatorDefault, _textureCache, pixelBuffer, nil, MTLPixelFormatRG8Unorm, lumaWidth / 2,
|
||||
lumaHeight / 2, indexPlane, &outTexture);
|
||||
if (result == kCVReturnSuccess) {
|
||||
chromaTexture = CVMetalTextureGetTexture(outTexture);
|
||||
}
|
||||
CVBufferRelease(outTexture);
|
||||
|
||||
if (lumaTexture != nil && chromaTexture != nil) {
|
||||
_yTexture = lumaTexture;
|
||||
_CrCbTexture = chromaTexture;
|
||||
return YES;
|
||||
}
|
||||
return NO;
|
||||
}
|
||||
|
||||
- (void)uploadTexturesToRenderEncoder:(id<MTLRenderCommandEncoder>)renderEncoder {
|
||||
[renderEncoder setFragmentTexture:_yTexture atIndex:0];
|
||||
[renderEncoder setFragmentTexture:_CrCbTexture atIndex:1];
|
||||
}
|
||||
|
||||
@end
|
||||
21
sdk/objc/Framework/Classes/Metal/RTCMTLRenderer+Private.h
Normal file
21
sdk/objc/Framework/Classes/Metal/RTCMTLRenderer+Private.h
Normal file
@ -0,0 +1,21 @@
|
||||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import <Metal/Metal.h>
|
||||
#import "RTCMTLRenderer.h"
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
@interface RTCMTLRenderer (Private)
|
||||
- (nullable id<MTLDevice>)currentMetalDevice;
|
||||
- (NSString *)shaderSource;
|
||||
- (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame;
|
||||
- (void)uploadTexturesToRenderEncoder:(id<MTLRenderCommandEncoder>)renderEncoder;
|
||||
@end
|
||||
NS_ASSUME_NONNULL_END
|
||||
55
sdk/objc/Framework/Classes/Metal/RTCMTLRenderer.h
Normal file
55
sdk/objc/Framework/Classes/Metal/RTCMTLRenderer.h
Normal file
@ -0,0 +1,55 @@
|
||||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
#if TARGET_OS_IPHONE
|
||||
#import <UIKit/UIKit.h>
|
||||
#else
|
||||
#import <AppKit/AppKit.h>
|
||||
#endif
|
||||
|
||||
#import "WebRTC/RTCVideoFrame.h"
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
/**
|
||||
* Protocol defining ability to render RTCVideoFrame in Metal enabled views.
|
||||
*/
|
||||
@protocol RTCMTLRenderer<NSObject>
|
||||
|
||||
/**
|
||||
* Method to be implemented to perform actual rendering of the provided frame.
|
||||
*
|
||||
* @param frame The frame to be rendered.
|
||||
*/
|
||||
- (void)drawFrame:(RTCVideoFrame *)frame;
|
||||
|
||||
/**
|
||||
* Sets the provided view as rendering destination if possible.
|
||||
*
|
||||
* If not possible method returns NO and callers of the method are responisble for performing
|
||||
* cleanups.
|
||||
*/
|
||||
|
||||
#if TARGET_OS_IOS
|
||||
- (BOOL)addRenderingDestination:(__kindof UIView *)view;
|
||||
#else
|
||||
- (BOOL)addRenderingDestination:(__kindof NSView *)view;
|
||||
#endif
|
||||
|
||||
@end
|
||||
|
||||
/**
|
||||
* Implementation of RTCMTLRenderer protocol for rendering native nv12 video frames.
|
||||
*/
|
||||
NS_AVAILABLE(10_11, 9_0)
|
||||
@interface RTCMTLRenderer : NSObject<RTCMTLRenderer>
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
||||
244
sdk/objc/Framework/Classes/Metal/RTCMTLRenderer.mm
Normal file
244
sdk/objc/Framework/Classes/Metal/RTCMTLRenderer.mm
Normal file
@ -0,0 +1,244 @@
|
||||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "RTCMTLRenderer+Private.h"
|
||||
|
||||
#import <Metal/Metal.h>
|
||||
#import <MetalKit/MetalKit.h>
|
||||
|
||||
#import "WebRTC/RTCLogging.h"
|
||||
#import "WebRTC/RTCVideoFrame.h"
|
||||
|
||||
#include "webrtc/api/video/video_rotation.h"
|
||||
#include "webrtc/rtc_base/checks.h"
|
||||
|
||||
// As defined in shaderSource.
|
||||
static NSString *const vertexFunctionName = @"vertexPassthrough";
|
||||
static NSString *const fragmentFunctionName = @"fragmentColorConversion";
|
||||
|
||||
static NSString *const pipelineDescriptorLabel = @"RTCPipeline";
|
||||
static NSString *const commandBufferLabel = @"RTCCommandBuffer";
|
||||
static NSString *const renderEncoderLabel = @"RTCEncoder";
|
||||
static NSString *const renderEncoderDebugGroup = @"RTCDrawFrame";
|
||||
|
||||
static const float cubeVertexData[64] = {
|
||||
-1.0, -1.0, 0.0, 1.0, 1.0, -1.0, 1.0, 1.0, -1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0,
|
||||
|
||||
// rotation = 90, offset = 16.
|
||||
-1.0, -1.0, 1.0, 1.0, 1.0, -1.0, 1.0, 0.0, -1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0,
|
||||
|
||||
// rotation = 180, offset = 32.
|
||||
-1.0, -1.0, 1.0, 0.0, 1.0, -1.0, 0.0, 0.0, -1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0,
|
||||
|
||||
// rotation = 270, offset = 48.
|
||||
-1.0, -1.0, 0.0, 0.0, 1.0, -1.0, 0.0, 1.0, -1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0,
|
||||
};
|
||||
|
||||
static inline int offsetForRotation(RTCVideoRotation rotation) {
|
||||
switch (rotation) {
|
||||
case RTCVideoRotation_0:
|
||||
return 0;
|
||||
case RTCVideoRotation_90:
|
||||
return 16;
|
||||
case RTCVideoRotation_180:
|
||||
return 32;
|
||||
case RTCVideoRotation_270:
|
||||
return 48;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
// The max number of command buffers in flight (submitted to GPU).
|
||||
// For now setting it up to 1.
|
||||
// In future we might use triple buffering method if it improves performance.
|
||||
static const NSInteger kMaxInflightBuffers = 1;
|
||||
|
||||
@implementation RTCMTLRenderer {
|
||||
__kindof MTKView *_view;
|
||||
|
||||
// Controller.
|
||||
dispatch_semaphore_t _inflight_semaphore;
|
||||
|
||||
// Renderer.
|
||||
id<MTLDevice> _device;
|
||||
id<MTLCommandQueue> _commandQueue;
|
||||
id<MTLLibrary> _defaultLibrary;
|
||||
id<MTLRenderPipelineState> _pipelineState;
|
||||
|
||||
// Buffers.
|
||||
id<MTLBuffer> _vertexBuffer;
|
||||
|
||||
// RTC Frame parameters.
|
||||
int _offset;
|
||||
}
|
||||
|
||||
- (instancetype)init {
|
||||
if (self = [super init]) {
|
||||
// _offset of 0 is equal to rotation of 0.
|
||||
_offset = 0;
|
||||
_inflight_semaphore = dispatch_semaphore_create(kMaxInflightBuffers);
|
||||
}
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
- (BOOL)addRenderingDestination:(__kindof MTKView *)view {
|
||||
return [self setupWithView:view];
|
||||
}
|
||||
|
||||
#pragma mark - Private
|
||||
|
||||
- (BOOL)setupWithView:(__kindof MTKView *)view {
|
||||
BOOL success = NO;
|
||||
if ([self setupMetal]) {
|
||||
[self setupView:view];
|
||||
[self loadAssets];
|
||||
[self setupBuffers];
|
||||
success = YES;
|
||||
}
|
||||
return success;
|
||||
}
|
||||
#pragma mark - Inheritance
|
||||
|
||||
- (id<MTLDevice>)currentMetalDevice {
|
||||
return _device;
|
||||
}
|
||||
|
||||
- (NSString *)shaderSource {
|
||||
RTC_NOTREACHED() << "Virtual method not implemented in subclass.";
|
||||
return nil;
|
||||
}
|
||||
|
||||
- (void)uploadTexturesToRenderEncoder:(id<MTLRenderCommandEncoder>)renderEncoder {
|
||||
RTC_NOTREACHED() << "Virtual method not implemented in subclass.";
|
||||
}
|
||||
|
||||
- (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame {
|
||||
_offset = offsetForRotation(frame.rotation);
|
||||
return YES;
|
||||
}
|
||||
|
||||
#pragma mark - GPU methods
|
||||
|
||||
- (BOOL)setupMetal {
|
||||
// Set the view to use the default device.
|
||||
_device = MTLCreateSystemDefaultDevice();
|
||||
if (!_device) {
|
||||
return NO;
|
||||
}
|
||||
|
||||
// Create a new command queue.
|
||||
_commandQueue = [_device newCommandQueue];
|
||||
|
||||
// Load metal library from source.
|
||||
NSError *libraryError = nil;
|
||||
|
||||
id<MTLLibrary> sourceLibrary =
|
||||
[_device newLibraryWithSource:[self shaderSource] options:NULL error:&libraryError];
|
||||
|
||||
if (libraryError) {
|
||||
RTCLogError(@"Metal: Library with source failed\n%@", libraryError);
|
||||
return NO;
|
||||
}
|
||||
|
||||
if (!sourceLibrary) {
|
||||
RTCLogError(@"Metal: Failed to load library. %@", libraryError);
|
||||
return NO;
|
||||
}
|
||||
_defaultLibrary = sourceLibrary;
|
||||
|
||||
return YES;
|
||||
}
|
||||
|
||||
- (void)setupView:(__kindof MTKView *)view {
|
||||
view.device = _device;
|
||||
|
||||
view.preferredFramesPerSecond = 30;
|
||||
view.autoResizeDrawable = NO;
|
||||
|
||||
// We need to keep reference to the view as it's needed down the rendering pipeline.
|
||||
_view = view;
|
||||
}
|
||||
|
||||
- (void)loadAssets {
|
||||
id<MTLFunction> vertexFunction = [_defaultLibrary newFunctionWithName:vertexFunctionName];
|
||||
id<MTLFunction> fragmentFunction = [_defaultLibrary newFunctionWithName:fragmentFunctionName];
|
||||
|
||||
MTLRenderPipelineDescriptor *pipelineDescriptor = [[MTLRenderPipelineDescriptor alloc] init];
|
||||
pipelineDescriptor.label = pipelineDescriptorLabel;
|
||||
pipelineDescriptor.vertexFunction = vertexFunction;
|
||||
pipelineDescriptor.fragmentFunction = fragmentFunction;
|
||||
pipelineDescriptor.colorAttachments[0].pixelFormat = _view.colorPixelFormat;
|
||||
pipelineDescriptor.depthAttachmentPixelFormat = MTLPixelFormatInvalid;
|
||||
NSError *error = nil;
|
||||
_pipelineState = [_device newRenderPipelineStateWithDescriptor:pipelineDescriptor error:&error];
|
||||
|
||||
if (!_pipelineState) {
|
||||
RTCLogError(@"Metal: Failed to create pipeline state. %@", error);
|
||||
}
|
||||
}
|
||||
|
||||
- (void)setupBuffers {
|
||||
_vertexBuffer = [_device newBufferWithBytes:cubeVertexData
|
||||
length:sizeof(cubeVertexData)
|
||||
options:MTLResourceOptionCPUCacheModeDefault];
|
||||
}
|
||||
|
||||
- (void)render {
|
||||
// Wait until the inflight (curently sent to GPU) command buffer
|
||||
// has completed the GPU work.
|
||||
dispatch_semaphore_wait(_inflight_semaphore, DISPATCH_TIME_FOREVER);
|
||||
|
||||
id<MTLCommandBuffer> commandBuffer = [_commandQueue commandBuffer];
|
||||
commandBuffer.label = commandBufferLabel;
|
||||
|
||||
__block dispatch_semaphore_t block_semaphore = _inflight_semaphore;
|
||||
[commandBuffer addCompletedHandler:^(id<MTLCommandBuffer> _Nonnull) {
|
||||
// GPU work completed.
|
||||
dispatch_semaphore_signal(block_semaphore);
|
||||
}];
|
||||
|
||||
MTLRenderPassDescriptor *renderPassDescriptor = _view.currentRenderPassDescriptor;
|
||||
if (renderPassDescriptor) { // Valid drawable.
|
||||
id<MTLRenderCommandEncoder> renderEncoder =
|
||||
[commandBuffer renderCommandEncoderWithDescriptor:renderPassDescriptor];
|
||||
renderEncoder.label = renderEncoderLabel;
|
||||
|
||||
// Set context state.
|
||||
[renderEncoder pushDebugGroup:renderEncoderDebugGroup];
|
||||
[renderEncoder setRenderPipelineState:_pipelineState];
|
||||
[renderEncoder setVertexBuffer:_vertexBuffer offset:_offset * sizeof(float) atIndex:0];
|
||||
[self uploadTexturesToRenderEncoder:renderEncoder];
|
||||
|
||||
[renderEncoder drawPrimitives:MTLPrimitiveTypeTriangleStrip
|
||||
vertexStart:0
|
||||
vertexCount:4
|
||||
instanceCount:1];
|
||||
[renderEncoder popDebugGroup];
|
||||
[renderEncoder endEncoding];
|
||||
|
||||
[commandBuffer presentDrawable:_view.currentDrawable];
|
||||
}
|
||||
|
||||
// CPU work is completed, GPU work can be started.
|
||||
[commandBuffer commit];
|
||||
}
|
||||
|
||||
#pragma mark - RTCMTLRenderer
|
||||
|
||||
- (void)drawFrame:(RTCVideoFrame *)frame {
|
||||
@autoreleasepool {
|
||||
if ([self setupTexturesForFrame:frame]) {
|
||||
[self render];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@end
|
||||
152
sdk/objc/Framework/Classes/Metal/RTCMTLVideoView.m
Normal file
152
sdk/objc/Framework/Classes/Metal/RTCMTLVideoView.m
Normal file
@ -0,0 +1,152 @@
|
||||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "WebRTC/RTCMTLVideoView.h"
|
||||
|
||||
#import <Metal/Metal.h>
|
||||
#import <MetalKit/MetalKit.h>
|
||||
|
||||
#import "WebRTC/RTCLogging.h"
|
||||
#import "WebRTC/RTCVideoFrame.h"
|
||||
#import "WebRTC/RTCVideoFrameBuffer.h"
|
||||
|
||||
#import "RTCMTLI420Renderer.h"
|
||||
#import "RTCMTLNV12Renderer.h"
|
||||
|
||||
// To avoid unreconized symbol linker errors, we're taking advantage of the objc runtime.
|
||||
// Linking errors occur when compiling for architectures that don't support Metal.
|
||||
#define MTKViewClass NSClassFromString(@"MTKView")
|
||||
#define RTCMTLNV12RendererClass NSClassFromString(@"RTCMTLNV12Renderer")
|
||||
#define RTCMTLI420RendererClass NSClassFromString(@"RTCMTLI420Renderer")
|
||||
|
||||
@interface RTCMTLVideoView () <MTKViewDelegate>
|
||||
@property(nonatomic, strong) RTCMTLI420Renderer *rendererI420;
|
||||
@property(nonatomic, strong) RTCMTLNV12Renderer *rendererNV12;
|
||||
@property(nonatomic, strong) MTKView *metalView;
|
||||
@property(atomic, strong) RTCVideoFrame *videoFrame;
|
||||
@end
|
||||
|
||||
@implementation RTCMTLVideoView
|
||||
|
||||
@synthesize rendererI420 = _rendererI420;
|
||||
@synthesize rendererNV12 = _rendererNV12;
|
||||
@synthesize metalView = _metalView;
|
||||
@synthesize videoFrame = _videoFrame;
|
||||
|
||||
- (instancetype)initWithFrame:(CGRect)frameRect {
|
||||
self = [super initWithFrame:frameRect];
|
||||
if (self) {
|
||||
[self configure];
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (instancetype)initWithCoder:(NSCoder *)aCoder {
|
||||
self = [super initWithCoder:aCoder];
|
||||
if (self) {
|
||||
[self configure];
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
#pragma mark - Private
|
||||
|
||||
+ (BOOL)isMetalAvailable {
|
||||
#if defined(RTC_SUPPORTS_METAL)
|
||||
return YES;
|
||||
#else
|
||||
return NO;
|
||||
#endif
|
||||
}
|
||||
|
||||
+ (MTKView *)createMetalView:(CGRect)frame {
|
||||
MTKView *view = [[MTKViewClass alloc] initWithFrame:frame];
|
||||
return view;
|
||||
}
|
||||
|
||||
+ (RTCMTLNV12Renderer *)createNV12Renderer {
|
||||
return [[RTCMTLNV12RendererClass alloc] init];
|
||||
}
|
||||
|
||||
+ (RTCMTLI420Renderer *)createI420Renderer {
|
||||
return [[RTCMTLI420RendererClass alloc] init];
|
||||
}
|
||||
|
||||
- (void)configure {
|
||||
NSAssert([RTCMTLVideoView isMetalAvailable], @"Metal not availiable on this device");
|
||||
|
||||
_metalView = [RTCMTLVideoView createMetalView:self.bounds];
|
||||
[self configureMetalView];
|
||||
}
|
||||
|
||||
- (void)configureMetalView {
|
||||
if (_metalView) {
|
||||
_metalView.delegate = self;
|
||||
[self addSubview:_metalView];
|
||||
_metalView.contentMode = UIViewContentModeScaleAspectFit;
|
||||
_metalView.translatesAutoresizingMaskIntoConstraints = NO;
|
||||
UILayoutGuide *margins = self.layoutMarginsGuide;
|
||||
[_metalView.topAnchor constraintEqualToAnchor:margins.topAnchor].active = YES;
|
||||
[_metalView.bottomAnchor constraintEqualToAnchor:margins.bottomAnchor].active = YES;
|
||||
[_metalView.leftAnchor constraintEqualToAnchor:margins.leftAnchor].active = YES;
|
||||
[_metalView.rightAnchor constraintEqualToAnchor:margins.rightAnchor].active = YES;
|
||||
}
|
||||
}
|
||||
|
||||
#pragma mark - MTKViewDelegate methods
|
||||
|
||||
- (void)drawInMTKView:(nonnull MTKView *)view {
|
||||
NSAssert(view == self.metalView, @"Receiving draw callbacks from foreign instance.");
|
||||
if (!self.videoFrame) {
|
||||
return;
|
||||
}
|
||||
|
||||
id<RTCMTLRenderer> renderer = nil;
|
||||
if ([self.videoFrame.buffer isKindOfClass:[RTCCVPixelBuffer class]]) {
|
||||
if (!self.rendererNV12) {
|
||||
self.rendererNV12 = [RTCMTLVideoView createNV12Renderer];
|
||||
if (![self.rendererNV12 addRenderingDestination:self.metalView]) {
|
||||
self.rendererNV12 = nil;
|
||||
RTCLogError(@"Failed to create NV12 renderer");
|
||||
}
|
||||
}
|
||||
renderer = self.rendererNV12;
|
||||
} else {
|
||||
if (!self.rendererI420) {
|
||||
self.rendererI420 = [RTCMTLVideoView createI420Renderer];
|
||||
if (![self.rendererI420 addRenderingDestination:self.metalView]) {
|
||||
self.rendererI420 = nil;
|
||||
RTCLogError(@"Failed to create I420 renderer");
|
||||
}
|
||||
}
|
||||
renderer = self.rendererI420;
|
||||
}
|
||||
|
||||
[renderer drawFrame:self.videoFrame];
|
||||
}
|
||||
|
||||
- (void)mtkView:(MTKView *)view drawableSizeWillChange:(CGSize)size {
|
||||
}
|
||||
|
||||
#pragma mark - RTCVideoRenderer
|
||||
|
||||
- (void)setSize:(CGSize)size {
|
||||
self.metalView.drawableSize = size;
|
||||
}
|
||||
|
||||
- (void)renderFrame:(nullable RTCVideoFrame *)frame {
|
||||
if (frame == nil) {
|
||||
RTCLogInfo(@"Incoming frame is nil. Exiting render callback.");
|
||||
return;
|
||||
}
|
||||
self.videoFrame = frame;
|
||||
}
|
||||
|
||||
@end
|
||||
1
sdk/objc/Framework/Classes/PeerConnection/OWNERS
Normal file
1
sdk/objc/Framework/Classes/PeerConnection/OWNERS
Normal file
@ -0,0 +1 @@
|
||||
deadbeef@webrtc.org
|
||||
@ -0,0 +1,27 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "WebRTC/RTCAVFoundationVideoSource.h"
|
||||
|
||||
#include "avfoundationvideocapturer.h"
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
@interface RTCAVFoundationVideoSource ()
|
||||
|
||||
@property(nonatomic, readonly) webrtc::AVFoundationVideoCapturer *capturer;
|
||||
|
||||
/** Initialize an RTCAVFoundationVideoSource with constraints. */
|
||||
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
|
||||
constraints:(nullable RTCMediaConstraints *)constraints;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
||||
@ -0,0 +1,62 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "RTCAVFoundationVideoSource+Private.h"
|
||||
|
||||
#import "RTCMediaConstraints+Private.h"
|
||||
#import "RTCPeerConnectionFactory+Private.h"
|
||||
#import "RTCVideoSource+Private.h"
|
||||
|
||||
@implementation RTCAVFoundationVideoSource {
|
||||
webrtc::AVFoundationVideoCapturer *_capturer;
|
||||
}
|
||||
|
||||
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
|
||||
constraints:(RTCMediaConstraints *)constraints {
|
||||
NSParameterAssert(factory);
|
||||
// We pass ownership of the capturer to the source, but since we own
|
||||
// the source, it should be ok to keep a raw pointer to the
|
||||
// capturer.
|
||||
_capturer = new webrtc::AVFoundationVideoCapturer();
|
||||
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source =
|
||||
factory.nativeFactory->CreateVideoSource(
|
||||
std::unique_ptr<cricket::VideoCapturer>(_capturer),
|
||||
constraints.nativeConstraints.get());
|
||||
|
||||
return [super initWithNativeVideoSource:source];
|
||||
}
|
||||
|
||||
- (void)adaptOutputFormatToWidth:(int)width
|
||||
height:(int)height
|
||||
fps:(int)fps {
|
||||
self.capturer->AdaptOutputFormat(width, height, fps);
|
||||
}
|
||||
|
||||
- (BOOL)canUseBackCamera {
|
||||
return self.capturer->CanUseBackCamera();
|
||||
}
|
||||
|
||||
- (BOOL)useBackCamera {
|
||||
return self.capturer->GetUseBackCamera();
|
||||
}
|
||||
|
||||
- (void)setUseBackCamera:(BOOL)useBackCamera {
|
||||
self.capturer->SetUseBackCamera(useBackCamera);
|
||||
}
|
||||
|
||||
- (AVCaptureSession *)captureSession {
|
||||
return self.capturer->GetCaptureSession();
|
||||
}
|
||||
|
||||
- (webrtc::AVFoundationVideoCapturer *)capturer {
|
||||
return _capturer;
|
||||
}
|
||||
|
||||
@end
|
||||
@ -0,0 +1,32 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "WebRTC/RTCAudioSource.h"
|
||||
|
||||
#import "RTCMediaSource+Private.h"
|
||||
|
||||
@interface RTCAudioSource ()
|
||||
|
||||
/**
|
||||
* The AudioSourceInterface object passed to this RTCAudioSource during
|
||||
* construction.
|
||||
*/
|
||||
@property(nonatomic, readonly) rtc::scoped_refptr<webrtc::AudioSourceInterface> nativeAudioSource;
|
||||
|
||||
/** Initialize an RTCAudioSource from a native AudioSourceInterface. */
|
||||
- (instancetype)initWithNativeAudioSource:
|
||||
(rtc::scoped_refptr<webrtc::AudioSourceInterface>)nativeAudioSource
|
||||
NS_DESIGNATED_INITIALIZER;
|
||||
|
||||
- (instancetype)initWithNativeMediaSource:
|
||||
(rtc::scoped_refptr<webrtc::MediaSourceInterface>)nativeMediaSource
|
||||
type:(RTCMediaSourceType)type NS_UNAVAILABLE;
|
||||
|
||||
@end
|
||||
48
sdk/objc/Framework/Classes/PeerConnection/RTCAudioSource.mm
Normal file
48
sdk/objc/Framework/Classes/PeerConnection/RTCAudioSource.mm
Normal file
@ -0,0 +1,48 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "RTCAudioSource+Private.h"
|
||||
|
||||
#include "webrtc/rtc_base/checks.h"
|
||||
|
||||
@implementation RTCAudioSource {
|
||||
}
|
||||
|
||||
@synthesize volume = _volume;
|
||||
@synthesize nativeAudioSource = _nativeAudioSource;
|
||||
|
||||
- (instancetype)initWithNativeAudioSource:
|
||||
(rtc::scoped_refptr<webrtc::AudioSourceInterface>)nativeAudioSource {
|
||||
RTC_DCHECK(nativeAudioSource);
|
||||
if (self = [super initWithNativeMediaSource:nativeAudioSource
|
||||
type:RTCMediaSourceTypeAudio]) {
|
||||
_nativeAudioSource = nativeAudioSource;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (instancetype)initWithNativeMediaSource:
|
||||
(rtc::scoped_refptr<webrtc::MediaSourceInterface>)nativeMediaSource
|
||||
type:(RTCMediaSourceType)type {
|
||||
RTC_NOTREACHED();
|
||||
return nil;
|
||||
}
|
||||
|
||||
- (NSString *)description {
|
||||
NSString *stateString = [[self class] stringForState:self.state];
|
||||
return [NSString stringWithFormat:@"RTCAudioSource( %p ): %@", self, stateString];
|
||||
}
|
||||
|
||||
- (void)setVolume:(double)volume {
|
||||
_volume = volume;
|
||||
_nativeAudioSource->SetVolume(volume);
|
||||
}
|
||||
|
||||
@end
|
||||
@ -0,0 +1,31 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "WebRTC/RTCAudioTrack.h"
|
||||
|
||||
#include "webrtc/api/mediastreaminterface.h"
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
@class RTCPeerConnectionFactory;
|
||||
@interface RTCAudioTrack ()
|
||||
|
||||
/** AudioTrackInterface created or passed in at construction. */
|
||||
@property(nonatomic, readonly)
|
||||
rtc::scoped_refptr<webrtc::AudioTrackInterface> nativeAudioTrack;
|
||||
|
||||
/** Initialize an RTCAudioTrack with an id. */
|
||||
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
|
||||
source:(RTCAudioSource *)source
|
||||
trackId:(NSString *)trackId;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
||||
66
sdk/objc/Framework/Classes/PeerConnection/RTCAudioTrack.mm
Normal file
66
sdk/objc/Framework/Classes/PeerConnection/RTCAudioTrack.mm
Normal file
@ -0,0 +1,66 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "RTCAudioTrack+Private.h"
|
||||
|
||||
#import "NSString+StdString.h"
|
||||
#import "RTCAudioSource+Private.h"
|
||||
#import "RTCMediaStreamTrack+Private.h"
|
||||
#import "RTCPeerConnectionFactory+Private.h"
|
||||
|
||||
#include "webrtc/rtc_base/checks.h"
|
||||
|
||||
@implementation RTCAudioTrack
|
||||
|
||||
@synthesize source = _source;
|
||||
|
||||
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
|
||||
source:(RTCAudioSource *)source
|
||||
trackId:(NSString *)trackId {
|
||||
RTC_DCHECK(factory);
|
||||
RTC_DCHECK(source);
|
||||
RTC_DCHECK(trackId.length);
|
||||
|
||||
std::string nativeId = [NSString stdStringForString:trackId];
|
||||
rtc::scoped_refptr<webrtc::AudioTrackInterface> track =
|
||||
factory.nativeFactory->CreateAudioTrack(nativeId, source.nativeAudioSource);
|
||||
if ([self initWithNativeTrack:track type:RTCMediaStreamTrackTypeAudio]) {
|
||||
_source = source;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (instancetype)initWithNativeTrack:
|
||||
(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack
|
||||
type:(RTCMediaStreamTrackType)type {
|
||||
NSParameterAssert(nativeTrack);
|
||||
NSParameterAssert(type == RTCMediaStreamTrackTypeAudio);
|
||||
return [super initWithNativeTrack:nativeTrack type:type];
|
||||
}
|
||||
|
||||
|
||||
- (RTCAudioSource *)source {
|
||||
if (!_source) {
|
||||
rtc::scoped_refptr<webrtc::AudioSourceInterface> source =
|
||||
self.nativeAudioTrack->GetSource();
|
||||
if (source) {
|
||||
_source = [[RTCAudioSource alloc] initWithNativeAudioSource:source.get()];
|
||||
}
|
||||
}
|
||||
return _source;
|
||||
}
|
||||
|
||||
#pragma mark - Private
|
||||
|
||||
- (rtc::scoped_refptr<webrtc::AudioTrackInterface>)nativeAudioTrack {
|
||||
return static_cast<webrtc::AudioTrackInterface *>(self.nativeTrack.get());
|
||||
}
|
||||
|
||||
@end
|
||||
@ -0,0 +1,447 @@
|
||||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
#import "WebRTC/RTCCameraVideoCapturer.h"
|
||||
#import "WebRTC/RTCLogging.h"
|
||||
#import "WebRTC/RTCVideoFrameBuffer.h"
|
||||
|
||||
#if TARGET_OS_IPHONE
|
||||
#import "WebRTC/UIDevice+RTCDevice.h"
|
||||
#endif
|
||||
|
||||
#import "AVCaptureSession+DevicePosition.h"
|
||||
#import "RTCDispatcher+Private.h"
|
||||
|
||||
const int64_t kNanosecondsPerSecond = 1000000000;
|
||||
|
||||
static inline BOOL IsMediaSubTypeSupported(FourCharCode mediaSubType) {
|
||||
return (mediaSubType == kCVPixelFormatType_420YpCbCr8PlanarFullRange ||
|
||||
mediaSubType == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange);
|
||||
}
|
||||
|
||||
@interface RTCCameraVideoCapturer ()<AVCaptureVideoDataOutputSampleBufferDelegate>
|
||||
@property(nonatomic, readonly) dispatch_queue_t frameQueue;
|
||||
@end
|
||||
|
||||
@implementation RTCCameraVideoCapturer {
|
||||
AVCaptureVideoDataOutput *_videoDataOutput;
|
||||
AVCaptureSession *_captureSession;
|
||||
AVCaptureDevice *_currentDevice;
|
||||
BOOL _hasRetriedOnFatalError;
|
||||
BOOL _isRunning;
|
||||
// Will the session be running once all asynchronous operations have been completed?
|
||||
BOOL _willBeRunning;
|
||||
#if TARGET_OS_IPHONE
|
||||
UIDeviceOrientation _orientation;
|
||||
#endif
|
||||
}
|
||||
|
||||
@synthesize frameQueue = _frameQueue;
|
||||
@synthesize captureSession = _captureSession;
|
||||
|
||||
- (instancetype)initWithDelegate:(__weak id<RTCVideoCapturerDelegate>)delegate {
|
||||
if (self = [super initWithDelegate:delegate]) {
|
||||
// Create the capture session and all relevant inputs and outputs. We need
|
||||
// to do this in init because the application may want the capture session
|
||||
// before we start the capturer for e.g. AVCapturePreviewLayer. All objects
|
||||
// created here are retained until dealloc and never recreated.
|
||||
if (![self setupCaptureSession]) {
|
||||
return nil;
|
||||
}
|
||||
NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
|
||||
#if TARGET_OS_IPHONE
|
||||
_orientation = UIDeviceOrientationPortrait;
|
||||
[center addObserver:self
|
||||
selector:@selector(deviceOrientationDidChange:)
|
||||
name:UIDeviceOrientationDidChangeNotification
|
||||
object:nil];
|
||||
[center addObserver:self
|
||||
selector:@selector(handleCaptureSessionInterruption:)
|
||||
name:AVCaptureSessionWasInterruptedNotification
|
||||
object:_captureSession];
|
||||
[center addObserver:self
|
||||
selector:@selector(handleCaptureSessionInterruptionEnded:)
|
||||
name:AVCaptureSessionInterruptionEndedNotification
|
||||
object:_captureSession];
|
||||
[center addObserver:self
|
||||
selector:@selector(handleApplicationDidBecomeActive:)
|
||||
name:UIApplicationDidBecomeActiveNotification
|
||||
object:[UIApplication sharedApplication]];
|
||||
#endif
|
||||
[center addObserver:self
|
||||
selector:@selector(handleCaptureSessionRuntimeError:)
|
||||
name:AVCaptureSessionRuntimeErrorNotification
|
||||
object:_captureSession];
|
||||
[center addObserver:self
|
||||
selector:@selector(handleCaptureSessionDidStartRunning:)
|
||||
name:AVCaptureSessionDidStartRunningNotification
|
||||
object:_captureSession];
|
||||
[center addObserver:self
|
||||
selector:@selector(handleCaptureSessionDidStopRunning:)
|
||||
name:AVCaptureSessionDidStopRunningNotification
|
||||
object:_captureSession];
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)dealloc {
|
||||
NSAssert(
|
||||
!_willBeRunning,
|
||||
@"Session was still running in RTCCameraVideoCapturer dealloc. Forgot to call stopCapture?");
|
||||
[[NSNotificationCenter defaultCenter] removeObserver:self];
|
||||
}
|
||||
|
||||
+ (NSArray<AVCaptureDevice *> *)captureDevices {
|
||||
return [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
|
||||
}
|
||||
|
||||
+ (NSArray<AVCaptureDeviceFormat *> *)supportedFormatsForDevice:(AVCaptureDevice *)device {
|
||||
NSMutableArray<AVCaptureDeviceFormat *> *eligibleDeviceFormats = [NSMutableArray array];
|
||||
|
||||
for (AVCaptureDeviceFormat *format in device.formats) {
|
||||
// Filter out subTypes that we currently don't support in the stack
|
||||
FourCharCode mediaSubType = CMFormatDescriptionGetMediaSubType(format.formatDescription);
|
||||
if (IsMediaSubTypeSupported(mediaSubType)) {
|
||||
[eligibleDeviceFormats addObject:format];
|
||||
}
|
||||
}
|
||||
|
||||
return eligibleDeviceFormats;
|
||||
}
|
||||
|
||||
- (void)startCaptureWithDevice:(AVCaptureDevice *)device
|
||||
format:(AVCaptureDeviceFormat *)format
|
||||
fps:(NSInteger)fps {
|
||||
_willBeRunning = YES;
|
||||
[RTCDispatcher
|
||||
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
||||
block:^{
|
||||
RTCLogInfo("startCaptureWithDevice %@ @ %ld fps", format, (long)fps);
|
||||
|
||||
#if TARGET_OS_IPHONE
|
||||
[[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
|
||||
#endif
|
||||
|
||||
_currentDevice = device;
|
||||
|
||||
NSError *error = nil;
|
||||
if (![_currentDevice lockForConfiguration:&error]) {
|
||||
RTCLogError(
|
||||
@"Failed to lock device %@. Error: %@", _currentDevice, error.userInfo);
|
||||
return;
|
||||
}
|
||||
[self reconfigureCaptureSessionInput];
|
||||
[self updateOrientation];
|
||||
[self updateDeviceCaptureFormat:format fps:fps];
|
||||
[_captureSession startRunning];
|
||||
[_currentDevice unlockForConfiguration];
|
||||
_isRunning = YES;
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)stopCapture {
|
||||
_willBeRunning = NO;
|
||||
[RTCDispatcher
|
||||
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
||||
block:^{
|
||||
RTCLogInfo("Stop");
|
||||
_currentDevice = nil;
|
||||
for (AVCaptureDeviceInput *oldInput in [_captureSession.inputs copy]) {
|
||||
[_captureSession removeInput:oldInput];
|
||||
}
|
||||
[_captureSession stopRunning];
|
||||
|
||||
#if TARGET_OS_IPHONE
|
||||
[[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
|
||||
#endif
|
||||
_isRunning = NO;
|
||||
}];
|
||||
}
|
||||
|
||||
#pragma mark iOS notifications
|
||||
|
||||
#if TARGET_OS_IPHONE
|
||||
- (void)deviceOrientationDidChange:(NSNotification *)notification {
|
||||
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
||||
block:^{
|
||||
[self updateOrientation];
|
||||
}];
|
||||
}
|
||||
#endif
|
||||
|
||||
#pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
|
||||
|
||||
- (void)captureOutput:(AVCaptureOutput *)captureOutput
|
||||
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
|
||||
fromConnection:(AVCaptureConnection *)connection {
|
||||
NSParameterAssert(captureOutput == _videoDataOutput);
|
||||
|
||||
if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(sampleBuffer) ||
|
||||
!CMSampleBufferDataIsReady(sampleBuffer)) {
|
||||
return;
|
||||
}
|
||||
|
||||
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
|
||||
if (pixelBuffer == nil) {
|
||||
return;
|
||||
}
|
||||
|
||||
#if TARGET_OS_IPHONE
|
||||
// Default to portrait orientation on iPhone.
|
||||
RTCVideoRotation rotation = RTCVideoRotation_90;
|
||||
BOOL usingFrontCamera = NO;
|
||||
// Check the image's EXIF for the camera the image came from as the image could have been
|
||||
// delayed as we set alwaysDiscardsLateVideoFrames to NO.
|
||||
AVCaptureDevicePosition cameraPosition =
|
||||
[AVCaptureSession devicePositionForSampleBuffer:sampleBuffer];
|
||||
if (cameraPosition != AVCaptureDevicePositionUnspecified) {
|
||||
usingFrontCamera = AVCaptureDevicePositionFront == cameraPosition;
|
||||
} else {
|
||||
AVCaptureDeviceInput *deviceInput =
|
||||
(AVCaptureDeviceInput *)((AVCaptureInputPort *)connection.inputPorts.firstObject).input;
|
||||
usingFrontCamera = AVCaptureDevicePositionFront == deviceInput.device.position;
|
||||
}
|
||||
switch (_orientation) {
|
||||
case UIDeviceOrientationPortrait:
|
||||
rotation = RTCVideoRotation_90;
|
||||
break;
|
||||
case UIDeviceOrientationPortraitUpsideDown:
|
||||
rotation = RTCVideoRotation_270;
|
||||
break;
|
||||
case UIDeviceOrientationLandscapeLeft:
|
||||
rotation = usingFrontCamera ? RTCVideoRotation_180 : RTCVideoRotation_0;
|
||||
break;
|
||||
case UIDeviceOrientationLandscapeRight:
|
||||
rotation = usingFrontCamera ? RTCVideoRotation_0 : RTCVideoRotation_180;
|
||||
break;
|
||||
case UIDeviceOrientationFaceUp:
|
||||
case UIDeviceOrientationFaceDown:
|
||||
case UIDeviceOrientationUnknown:
|
||||
// Ignore.
|
||||
break;
|
||||
}
|
||||
#else
|
||||
// No rotation on Mac.
|
||||
RTCVideoRotation rotation = RTCVideoRotation_0;
|
||||
#endif
|
||||
|
||||
RTCCVPixelBuffer *rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer];
|
||||
int64_t timeStampNs = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) *
|
||||
kNanosecondsPerSecond;
|
||||
RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuffer
|
||||
rotation:rotation
|
||||
timeStampNs:timeStampNs];
|
||||
[self.delegate capturer:self didCaptureVideoFrame:videoFrame];
|
||||
}
|
||||
|
||||
- (void)captureOutput:(AVCaptureOutput *)captureOutput
|
||||
didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
|
||||
fromConnection:(AVCaptureConnection *)connection {
|
||||
RTCLogError(@"Dropped sample buffer.");
|
||||
}
|
||||
|
||||
#pragma mark - AVCaptureSession notifications
|
||||
|
||||
- (void)handleCaptureSessionInterruption:(NSNotification *)notification {
|
||||
NSString *reasonString = nil;
|
||||
#if defined(__IPHONE_9_0) && defined(__IPHONE_OS_VERSION_MAX_ALLOWED) && \
|
||||
__IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_9_0
|
||||
if ([UIDevice isIOS9OrLater]) {
|
||||
NSNumber *reason = notification.userInfo[AVCaptureSessionInterruptionReasonKey];
|
||||
if (reason) {
|
||||
switch (reason.intValue) {
|
||||
case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground:
|
||||
reasonString = @"VideoDeviceNotAvailableInBackground";
|
||||
break;
|
||||
case AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient:
|
||||
reasonString = @"AudioDeviceInUseByAnotherClient";
|
||||
break;
|
||||
case AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient:
|
||||
reasonString = @"VideoDeviceInUseByAnotherClient";
|
||||
break;
|
||||
case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultipleForegroundApps:
|
||||
reasonString = @"VideoDeviceNotAvailableWithMultipleForegroundApps";
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
RTCLog(@"Capture session interrupted: %@", reasonString);
|
||||
}
|
||||
|
||||
- (void)handleCaptureSessionInterruptionEnded:(NSNotification *)notification {
|
||||
RTCLog(@"Capture session interruption ended.");
|
||||
}
|
||||
|
||||
- (void)handleCaptureSessionRuntimeError:(NSNotification *)notification {
|
||||
NSError *error = [notification.userInfo objectForKey:AVCaptureSessionErrorKey];
|
||||
RTCLogError(@"Capture session runtime error: %@", error);
|
||||
|
||||
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
||||
block:^{
|
||||
#if TARGET_OS_IPHONE
|
||||
if (error.code == AVErrorMediaServicesWereReset) {
|
||||
[self handleNonFatalError];
|
||||
} else {
|
||||
[self handleFatalError];
|
||||
}
|
||||
#else
|
||||
[self handleFatalError];
|
||||
#endif
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification {
|
||||
RTCLog(@"Capture session started.");
|
||||
|
||||
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
||||
block:^{
|
||||
// If we successfully restarted after an unknown error,
|
||||
// allow future retries on fatal errors.
|
||||
_hasRetriedOnFatalError = NO;
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification {
|
||||
RTCLog(@"Capture session stopped.");
|
||||
}
|
||||
|
||||
- (void)handleFatalError {
|
||||
[RTCDispatcher
|
||||
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
||||
block:^{
|
||||
if (!_hasRetriedOnFatalError) {
|
||||
RTCLogWarning(@"Attempting to recover from fatal capture error.");
|
||||
[self handleNonFatalError];
|
||||
_hasRetriedOnFatalError = YES;
|
||||
} else {
|
||||
RTCLogError(@"Previous fatal error recovery failed.");
|
||||
}
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)handleNonFatalError {
|
||||
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
||||
block:^{
|
||||
RTCLog(@"Restarting capture session after error.");
|
||||
if (_isRunning) {
|
||||
[_captureSession startRunning];
|
||||
}
|
||||
}];
|
||||
}
|
||||
|
||||
#if TARGET_OS_IPHONE
|
||||
|
||||
#pragma mark - UIApplication notifications
|
||||
|
||||
- (void)handleApplicationDidBecomeActive:(NSNotification *)notification {
|
||||
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
||||
block:^{
|
||||
if (_isRunning && !_captureSession.isRunning) {
|
||||
RTCLog(@"Restarting capture session on active.");
|
||||
[_captureSession startRunning];
|
||||
}
|
||||
}];
|
||||
}
|
||||
|
||||
#endif // TARGET_OS_IPHONE
|
||||
|
||||
#pragma mark - Private
|
||||
|
||||
- (dispatch_queue_t)frameQueue {
|
||||
if (!_frameQueue) {
|
||||
_frameQueue =
|
||||
dispatch_queue_create("org.webrtc.avfoundationvideocapturer.video", DISPATCH_QUEUE_SERIAL);
|
||||
dispatch_set_target_queue(_frameQueue,
|
||||
dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0));
|
||||
}
|
||||
return _frameQueue;
|
||||
}
|
||||
|
||||
- (BOOL)setupCaptureSession {
|
||||
NSAssert(_captureSession == nil, @"Setup capture session called twice.");
|
||||
_captureSession = [[AVCaptureSession alloc] init];
|
||||
#if defined(WEBRTC_IOS)
|
||||
_captureSession.sessionPreset = AVCaptureSessionPresetInputPriority;
|
||||
_captureSession.usesApplicationAudioSession = NO;
|
||||
#endif
|
||||
[self setupVideoDataOutput];
|
||||
// Add the output.
|
||||
if (![_captureSession canAddOutput:_videoDataOutput]) {
|
||||
RTCLogError(@"Video data output unsupported.");
|
||||
return NO;
|
||||
}
|
||||
[_captureSession addOutput:_videoDataOutput];
|
||||
|
||||
return YES;
|
||||
}
|
||||
|
||||
- (void)setupVideoDataOutput {
|
||||
NSAssert(_videoDataOutput == nil, @"Setup video data output called twice.");
|
||||
// Make the capturer output NV12. Ideally we want I420 but that's not
|
||||
// currently supported on iPhone / iPad.
|
||||
AVCaptureVideoDataOutput *videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
|
||||
videoDataOutput.videoSettings = @{
|
||||
(NSString *)
|
||||
// TODO(denicija): Remove this color conversion and use the original capture format directly.
|
||||
kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
|
||||
};
|
||||
videoDataOutput.alwaysDiscardsLateVideoFrames = NO;
|
||||
[videoDataOutput setSampleBufferDelegate:self queue:self.frameQueue];
|
||||
_videoDataOutput = videoDataOutput;
|
||||
}
|
||||
|
||||
#pragma mark - Private, called inside capture queue
|
||||
|
||||
- (void)updateDeviceCaptureFormat:(AVCaptureDeviceFormat *)format fps:(NSInteger)fps {
|
||||
NSAssert([RTCDispatcher isOnQueueForType:RTCDispatcherTypeCaptureSession],
|
||||
@"updateDeviceCaptureFormat must be called on the capture queue.");
|
||||
@try {
|
||||
_currentDevice.activeFormat = format;
|
||||
_currentDevice.activeVideoMinFrameDuration = CMTimeMake(1, fps);
|
||||
} @catch (NSException *exception) {
|
||||
RTCLogError(@"Failed to set active format!\n User info:%@", exception.userInfo);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
- (void)reconfigureCaptureSessionInput {
|
||||
NSAssert([RTCDispatcher isOnQueueForType:RTCDispatcherTypeCaptureSession],
|
||||
@"reconfigureCaptureSessionInput must be called on the capture queue.");
|
||||
NSError *error = nil;
|
||||
AVCaptureDeviceInput *input =
|
||||
[AVCaptureDeviceInput deviceInputWithDevice:_currentDevice error:&error];
|
||||
if (!input) {
|
||||
RTCLogError(@"Failed to create front camera input: %@", error.localizedDescription);
|
||||
return;
|
||||
}
|
||||
[_captureSession beginConfiguration];
|
||||
for (AVCaptureDeviceInput *oldInput in [_captureSession.inputs copy]) {
|
||||
[_captureSession removeInput:oldInput];
|
||||
}
|
||||
if ([_captureSession canAddInput:input]) {
|
||||
[_captureSession addInput:input];
|
||||
} else {
|
||||
RTCLogError(@"Cannot add camera as an input to the session.");
|
||||
}
|
||||
[_captureSession commitConfiguration];
|
||||
}
|
||||
|
||||
- (void)updateOrientation {
|
||||
NSAssert([RTCDispatcher isOnQueueForType:RTCDispatcherTypeCaptureSession],
|
||||
@"updateOrientation must be called on the capture queue.");
|
||||
#if TARGET_OS_IPHONE
|
||||
_orientation = [UIDevice currentDevice].orientation;
|
||||
#endif
|
||||
}
|
||||
|
||||
@end
|
||||
@ -0,0 +1,73 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "WebRTC/RTCConfiguration.h"
|
||||
|
||||
#include "webrtc/api/peerconnectioninterface.h"
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
@interface RTCConfiguration ()
|
||||
|
||||
+ (webrtc::PeerConnectionInterface::IceTransportsType)
|
||||
nativeTransportsTypeForTransportPolicy:(RTCIceTransportPolicy)policy;
|
||||
|
||||
+ (RTCIceTransportPolicy)transportPolicyForTransportsType:
|
||||
(webrtc::PeerConnectionInterface::IceTransportsType)nativeType;
|
||||
|
||||
+ (NSString *)stringForTransportPolicy:(RTCIceTransportPolicy)policy;
|
||||
|
||||
+ (webrtc::PeerConnectionInterface::BundlePolicy)nativeBundlePolicyForPolicy:
|
||||
(RTCBundlePolicy)policy;
|
||||
|
||||
+ (RTCBundlePolicy)bundlePolicyForNativePolicy:
|
||||
(webrtc::PeerConnectionInterface::BundlePolicy)nativePolicy;
|
||||
|
||||
+ (NSString *)stringForBundlePolicy:(RTCBundlePolicy)policy;
|
||||
|
||||
+ (webrtc::PeerConnectionInterface::RtcpMuxPolicy)nativeRtcpMuxPolicyForPolicy:
|
||||
(RTCRtcpMuxPolicy)policy;
|
||||
|
||||
+ (RTCRtcpMuxPolicy)rtcpMuxPolicyForNativePolicy:
|
||||
(webrtc::PeerConnectionInterface::RtcpMuxPolicy)nativePolicy;
|
||||
|
||||
+ (NSString *)stringForRtcpMuxPolicy:(RTCRtcpMuxPolicy)policy;
|
||||
|
||||
+ (webrtc::PeerConnectionInterface::TcpCandidatePolicy)
|
||||
nativeTcpCandidatePolicyForPolicy:(RTCTcpCandidatePolicy)policy;
|
||||
|
||||
+ (RTCTcpCandidatePolicy)tcpCandidatePolicyForNativePolicy:
|
||||
(webrtc::PeerConnectionInterface::TcpCandidatePolicy)nativePolicy;
|
||||
|
||||
+ (NSString *)stringForTcpCandidatePolicy:(RTCTcpCandidatePolicy)policy;
|
||||
|
||||
+ (webrtc::PeerConnectionInterface::CandidateNetworkPolicy)
|
||||
nativeCandidateNetworkPolicyForPolicy:(RTCCandidateNetworkPolicy)policy;
|
||||
|
||||
+ (RTCCandidateNetworkPolicy)candidateNetworkPolicyForNativePolicy:
|
||||
(webrtc::PeerConnectionInterface::CandidateNetworkPolicy)nativePolicy;
|
||||
|
||||
+ (NSString *)stringForCandidateNetworkPolicy:(RTCCandidateNetworkPolicy)policy;
|
||||
|
||||
+ (rtc::KeyType)nativeEncryptionKeyTypeForKeyType:(RTCEncryptionKeyType)keyType;
|
||||
|
||||
/**
|
||||
* RTCConfiguration struct representation of this RTCConfiguration. This is
|
||||
* needed to pass to the underlying C++ APIs.
|
||||
*/
|
||||
- (webrtc::PeerConnectionInterface::RTCConfiguration *)
|
||||
createNativeConfiguration;
|
||||
|
||||
- (instancetype)initWithNativeConfiguration:
|
||||
(const webrtc::PeerConnectionInterface::RTCConfiguration &)config NS_DESIGNATED_INITIALIZER;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
||||
390
sdk/objc/Framework/Classes/PeerConnection/RTCConfiguration.mm
Normal file
390
sdk/objc/Framework/Classes/PeerConnection/RTCConfiguration.mm
Normal file
@ -0,0 +1,390 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "RTCConfiguration+Private.h"
|
||||
|
||||
#include <memory>
|
||||
|
||||
#import "RTCIceServer+Private.h"
|
||||
#import "RTCIntervalRange+Private.h"
|
||||
#import "WebRTC/RTCLogging.h"
|
||||
|
||||
#include "webrtc/rtc_base/rtccertificategenerator.h"
|
||||
#include "webrtc/rtc_base/sslidentity.h"
|
||||
|
||||
@implementation RTCConfiguration
|
||||
|
||||
@synthesize iceServers = _iceServers;
|
||||
@synthesize iceTransportPolicy = _iceTransportPolicy;
|
||||
@synthesize bundlePolicy = _bundlePolicy;
|
||||
@synthesize rtcpMuxPolicy = _rtcpMuxPolicy;
|
||||
@synthesize tcpCandidatePolicy = _tcpCandidatePolicy;
|
||||
@synthesize candidateNetworkPolicy = _candidateNetworkPolicy;
|
||||
@synthesize continualGatheringPolicy = _continualGatheringPolicy;
|
||||
@synthesize maxIPv6Networks = _maxIPv6Networks;
|
||||
@synthesize audioJitterBufferMaxPackets = _audioJitterBufferMaxPackets;
|
||||
@synthesize audioJitterBufferFastAccelerate = _audioJitterBufferFastAccelerate;
|
||||
@synthesize iceConnectionReceivingTimeout = _iceConnectionReceivingTimeout;
|
||||
@synthesize iceBackupCandidatePairPingInterval =
|
||||
_iceBackupCandidatePairPingInterval;
|
||||
@synthesize keyType = _keyType;
|
||||
@synthesize iceCandidatePoolSize = _iceCandidatePoolSize;
|
||||
@synthesize shouldPruneTurnPorts = _shouldPruneTurnPorts;
|
||||
@synthesize shouldPresumeWritableWhenFullyRelayed =
|
||||
_shouldPresumeWritableWhenFullyRelayed;
|
||||
@synthesize iceCheckMinInterval = _iceCheckMinInterval;
|
||||
@synthesize iceRegatherIntervalRange = _iceRegatherIntervalRange;
|
||||
|
||||
- (instancetype)init {
|
||||
// Copy defaults.
|
||||
webrtc::PeerConnectionInterface::RTCConfiguration config(
|
||||
webrtc::PeerConnectionInterface::RTCConfigurationType::kAggressive);
|
||||
return [self initWithNativeConfiguration:config];
|
||||
}
|
||||
|
||||
- (instancetype)initWithNativeConfiguration:
|
||||
(const webrtc::PeerConnectionInterface::RTCConfiguration &)config {
|
||||
if (self = [super init]) {
|
||||
NSMutableArray *iceServers = [NSMutableArray array];
|
||||
for (const webrtc::PeerConnectionInterface::IceServer& server : config.servers) {
|
||||
RTCIceServer *iceServer = [[RTCIceServer alloc] initWithNativeServer:server];
|
||||
[iceServers addObject:iceServer];
|
||||
}
|
||||
_iceServers = iceServers;
|
||||
_iceTransportPolicy =
|
||||
[[self class] transportPolicyForTransportsType:config.type];
|
||||
_bundlePolicy =
|
||||
[[self class] bundlePolicyForNativePolicy:config.bundle_policy];
|
||||
_rtcpMuxPolicy =
|
||||
[[self class] rtcpMuxPolicyForNativePolicy:config.rtcp_mux_policy];
|
||||
_tcpCandidatePolicy = [[self class] tcpCandidatePolicyForNativePolicy:
|
||||
config.tcp_candidate_policy];
|
||||
_candidateNetworkPolicy = [[self class]
|
||||
candidateNetworkPolicyForNativePolicy:config.candidate_network_policy];
|
||||
webrtc::PeerConnectionInterface::ContinualGatheringPolicy nativePolicy =
|
||||
config.continual_gathering_policy;
|
||||
_continualGatheringPolicy =
|
||||
[[self class] continualGatheringPolicyForNativePolicy:nativePolicy];
|
||||
_maxIPv6Networks = config.max_ipv6_networks;
|
||||
_audioJitterBufferMaxPackets = config.audio_jitter_buffer_max_packets;
|
||||
_audioJitterBufferFastAccelerate = config.audio_jitter_buffer_fast_accelerate;
|
||||
_iceConnectionReceivingTimeout = config.ice_connection_receiving_timeout;
|
||||
_iceBackupCandidatePairPingInterval =
|
||||
config.ice_backup_candidate_pair_ping_interval;
|
||||
_keyType = RTCEncryptionKeyTypeECDSA;
|
||||
_iceCandidatePoolSize = config.ice_candidate_pool_size;
|
||||
_shouldPruneTurnPorts = config.prune_turn_ports;
|
||||
_shouldPresumeWritableWhenFullyRelayed =
|
||||
config.presume_writable_when_fully_relayed;
|
||||
if (config.ice_check_min_interval) {
|
||||
_iceCheckMinInterval =
|
||||
[NSNumber numberWithInt:*config.ice_check_min_interval];
|
||||
}
|
||||
if (config.ice_regather_interval_range) {
|
||||
const rtc::IntervalRange &nativeIntervalRange = config.ice_regather_interval_range.value();
|
||||
_iceRegatherIntervalRange =
|
||||
[[RTCIntervalRange alloc] initWithNativeIntervalRange:nativeIntervalRange];
|
||||
}
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (NSString *)description {
|
||||
return
|
||||
[NSString stringWithFormat:
|
||||
@"RTCConfiguration: "
|
||||
@"{\n%@\n%@\n%@\n%@\n%@\n%@\n%@\n%d\n%d\n%d\n%d\n%d\n%d\n%d\n%@\n%@\n%d\n}\n",
|
||||
_iceServers,
|
||||
[[self class] stringForTransportPolicy:_iceTransportPolicy],
|
||||
[[self class] stringForBundlePolicy:_bundlePolicy],
|
||||
[[self class] stringForRtcpMuxPolicy:_rtcpMuxPolicy],
|
||||
[[self class] stringForTcpCandidatePolicy:_tcpCandidatePolicy],
|
||||
[[self class] stringForCandidateNetworkPolicy:_candidateNetworkPolicy],
|
||||
[[self class] stringForContinualGatheringPolicy:_continualGatheringPolicy],
|
||||
_audioJitterBufferMaxPackets,
|
||||
_audioJitterBufferFastAccelerate,
|
||||
_iceConnectionReceivingTimeout,
|
||||
_iceBackupCandidatePairPingInterval,
|
||||
_iceCandidatePoolSize,
|
||||
_shouldPruneTurnPorts,
|
||||
_shouldPresumeWritableWhenFullyRelayed,
|
||||
_iceCheckMinInterval,
|
||||
_iceRegatherIntervalRange,
|
||||
_maxIPv6Networks];
|
||||
}
|
||||
|
||||
#pragma mark - Private
|
||||
|
||||
- (webrtc::PeerConnectionInterface::RTCConfiguration *)
|
||||
createNativeConfiguration {
|
||||
std::unique_ptr<webrtc::PeerConnectionInterface::RTCConfiguration>
|
||||
nativeConfig(new webrtc::PeerConnectionInterface::RTCConfiguration(
|
||||
webrtc::PeerConnectionInterface::RTCConfigurationType::kAggressive));
|
||||
|
||||
for (RTCIceServer *iceServer in _iceServers) {
|
||||
nativeConfig->servers.push_back(iceServer.nativeServer);
|
||||
}
|
||||
nativeConfig->type =
|
||||
[[self class] nativeTransportsTypeForTransportPolicy:_iceTransportPolicy];
|
||||
nativeConfig->bundle_policy =
|
||||
[[self class] nativeBundlePolicyForPolicy:_bundlePolicy];
|
||||
nativeConfig->rtcp_mux_policy =
|
||||
[[self class] nativeRtcpMuxPolicyForPolicy:_rtcpMuxPolicy];
|
||||
nativeConfig->tcp_candidate_policy =
|
||||
[[self class] nativeTcpCandidatePolicyForPolicy:_tcpCandidatePolicy];
|
||||
nativeConfig->candidate_network_policy = [[self class]
|
||||
nativeCandidateNetworkPolicyForPolicy:_candidateNetworkPolicy];
|
||||
nativeConfig->continual_gathering_policy = [[self class]
|
||||
nativeContinualGatheringPolicyForPolicy:_continualGatheringPolicy];
|
||||
nativeConfig->max_ipv6_networks = _maxIPv6Networks;
|
||||
nativeConfig->audio_jitter_buffer_max_packets = _audioJitterBufferMaxPackets;
|
||||
nativeConfig->audio_jitter_buffer_fast_accelerate =
|
||||
_audioJitterBufferFastAccelerate ? true : false;
|
||||
nativeConfig->ice_connection_receiving_timeout =
|
||||
_iceConnectionReceivingTimeout;
|
||||
nativeConfig->ice_backup_candidate_pair_ping_interval =
|
||||
_iceBackupCandidatePairPingInterval;
|
||||
rtc::KeyType keyType =
|
||||
[[self class] nativeEncryptionKeyTypeForKeyType:_keyType];
|
||||
// Generate non-default certificate.
|
||||
if (keyType != rtc::KT_DEFAULT) {
|
||||
rtc::scoped_refptr<rtc::RTCCertificate> certificate =
|
||||
rtc::RTCCertificateGenerator::GenerateCertificate(
|
||||
rtc::KeyParams(keyType), rtc::Optional<uint64_t>());
|
||||
if (!certificate) {
|
||||
RTCLogError(@"Failed to generate certificate.");
|
||||
return nullptr;
|
||||
}
|
||||
nativeConfig->certificates.push_back(certificate);
|
||||
}
|
||||
nativeConfig->ice_candidate_pool_size = _iceCandidatePoolSize;
|
||||
nativeConfig->prune_turn_ports = _shouldPruneTurnPorts ? true : false;
|
||||
nativeConfig->presume_writable_when_fully_relayed =
|
||||
_shouldPresumeWritableWhenFullyRelayed ? true : false;
|
||||
if (_iceCheckMinInterval != nil) {
|
||||
nativeConfig->ice_check_min_interval =
|
||||
rtc::Optional<int>(_iceCheckMinInterval.intValue);
|
||||
}
|
||||
if (_iceRegatherIntervalRange != nil) {
|
||||
std::unique_ptr<rtc::IntervalRange> nativeIntervalRange(
|
||||
_iceRegatherIntervalRange.nativeIntervalRange);
|
||||
nativeConfig->ice_regather_interval_range =
|
||||
rtc::Optional<rtc::IntervalRange>(*nativeIntervalRange);
|
||||
}
|
||||
|
||||
return nativeConfig.release();
|
||||
}
|
||||
|
||||
+ (webrtc::PeerConnectionInterface::IceTransportsType)
|
||||
nativeTransportsTypeForTransportPolicy:(RTCIceTransportPolicy)policy {
|
||||
switch (policy) {
|
||||
case RTCIceTransportPolicyNone:
|
||||
return webrtc::PeerConnectionInterface::kNone;
|
||||
case RTCIceTransportPolicyRelay:
|
||||
return webrtc::PeerConnectionInterface::kRelay;
|
||||
case RTCIceTransportPolicyNoHost:
|
||||
return webrtc::PeerConnectionInterface::kNoHost;
|
||||
case RTCIceTransportPolicyAll:
|
||||
return webrtc::PeerConnectionInterface::kAll;
|
||||
}
|
||||
}
|
||||
|
||||
+ (RTCIceTransportPolicy)transportPolicyForTransportsType:
|
||||
(webrtc::PeerConnectionInterface::IceTransportsType)nativeType {
|
||||
switch (nativeType) {
|
||||
case webrtc::PeerConnectionInterface::kNone:
|
||||
return RTCIceTransportPolicyNone;
|
||||
case webrtc::PeerConnectionInterface::kRelay:
|
||||
return RTCIceTransportPolicyRelay;
|
||||
case webrtc::PeerConnectionInterface::kNoHost:
|
||||
return RTCIceTransportPolicyNoHost;
|
||||
case webrtc::PeerConnectionInterface::kAll:
|
||||
return RTCIceTransportPolicyAll;
|
||||
}
|
||||
}
|
||||
|
||||
+ (NSString *)stringForTransportPolicy:(RTCIceTransportPolicy)policy {
|
||||
switch (policy) {
|
||||
case RTCIceTransportPolicyNone:
|
||||
return @"NONE";
|
||||
case RTCIceTransportPolicyRelay:
|
||||
return @"RELAY";
|
||||
case RTCIceTransportPolicyNoHost:
|
||||
return @"NO_HOST";
|
||||
case RTCIceTransportPolicyAll:
|
||||
return @"ALL";
|
||||
}
|
||||
}
|
||||
|
||||
+ (webrtc::PeerConnectionInterface::BundlePolicy)nativeBundlePolicyForPolicy:
|
||||
(RTCBundlePolicy)policy {
|
||||
switch (policy) {
|
||||
case RTCBundlePolicyBalanced:
|
||||
return webrtc::PeerConnectionInterface::kBundlePolicyBalanced;
|
||||
case RTCBundlePolicyMaxCompat:
|
||||
return webrtc::PeerConnectionInterface::kBundlePolicyMaxCompat;
|
||||
case RTCBundlePolicyMaxBundle:
|
||||
return webrtc::PeerConnectionInterface::kBundlePolicyMaxBundle;
|
||||
}
|
||||
}
|
||||
|
||||
+ (RTCBundlePolicy)bundlePolicyForNativePolicy:
|
||||
(webrtc::PeerConnectionInterface::BundlePolicy)nativePolicy {
|
||||
switch (nativePolicy) {
|
||||
case webrtc::PeerConnectionInterface::kBundlePolicyBalanced:
|
||||
return RTCBundlePolicyBalanced;
|
||||
case webrtc::PeerConnectionInterface::kBundlePolicyMaxCompat:
|
||||
return RTCBundlePolicyMaxCompat;
|
||||
case webrtc::PeerConnectionInterface::kBundlePolicyMaxBundle:
|
||||
return RTCBundlePolicyMaxBundle;
|
||||
}
|
||||
}
|
||||
|
||||
+ (NSString *)stringForBundlePolicy:(RTCBundlePolicy)policy {
|
||||
switch (policy) {
|
||||
case RTCBundlePolicyBalanced:
|
||||
return @"BALANCED";
|
||||
case RTCBundlePolicyMaxCompat:
|
||||
return @"MAX_COMPAT";
|
||||
case RTCBundlePolicyMaxBundle:
|
||||
return @"MAX_BUNDLE";
|
||||
}
|
||||
}
|
||||
|
||||
+ (webrtc::PeerConnectionInterface::RtcpMuxPolicy)nativeRtcpMuxPolicyForPolicy:
|
||||
(RTCRtcpMuxPolicy)policy {
|
||||
switch (policy) {
|
||||
case RTCRtcpMuxPolicyNegotiate:
|
||||
return webrtc::PeerConnectionInterface::kRtcpMuxPolicyNegotiate;
|
||||
case RTCRtcpMuxPolicyRequire:
|
||||
return webrtc::PeerConnectionInterface::kRtcpMuxPolicyRequire;
|
||||
}
|
||||
}
|
||||
|
||||
+ (RTCRtcpMuxPolicy)rtcpMuxPolicyForNativePolicy:
|
||||
(webrtc::PeerConnectionInterface::RtcpMuxPolicy)nativePolicy {
|
||||
switch (nativePolicy) {
|
||||
case webrtc::PeerConnectionInterface::kRtcpMuxPolicyNegotiate:
|
||||
return RTCRtcpMuxPolicyNegotiate;
|
||||
case webrtc::PeerConnectionInterface::kRtcpMuxPolicyRequire:
|
||||
return RTCRtcpMuxPolicyRequire;
|
||||
}
|
||||
}
|
||||
|
||||
+ (NSString *)stringForRtcpMuxPolicy:(RTCRtcpMuxPolicy)policy {
|
||||
switch (policy) {
|
||||
case RTCRtcpMuxPolicyNegotiate:
|
||||
return @"NEGOTIATE";
|
||||
case RTCRtcpMuxPolicyRequire:
|
||||
return @"REQUIRE";
|
||||
}
|
||||
}
|
||||
|
||||
+ (webrtc::PeerConnectionInterface::TcpCandidatePolicy)
|
||||
nativeTcpCandidatePolicyForPolicy:(RTCTcpCandidatePolicy)policy {
|
||||
switch (policy) {
|
||||
case RTCTcpCandidatePolicyEnabled:
|
||||
return webrtc::PeerConnectionInterface::kTcpCandidatePolicyEnabled;
|
||||
case RTCTcpCandidatePolicyDisabled:
|
||||
return webrtc::PeerConnectionInterface::kTcpCandidatePolicyDisabled;
|
||||
}
|
||||
}
|
||||
|
||||
+ (webrtc::PeerConnectionInterface::CandidateNetworkPolicy)
|
||||
nativeCandidateNetworkPolicyForPolicy:(RTCCandidateNetworkPolicy)policy {
|
||||
switch (policy) {
|
||||
case RTCCandidateNetworkPolicyAll:
|
||||
return webrtc::PeerConnectionInterface::kCandidateNetworkPolicyAll;
|
||||
case RTCCandidateNetworkPolicyLowCost:
|
||||
return webrtc::PeerConnectionInterface::kCandidateNetworkPolicyLowCost;
|
||||
}
|
||||
}
|
||||
|
||||
+ (RTCTcpCandidatePolicy)tcpCandidatePolicyForNativePolicy:
|
||||
(webrtc::PeerConnectionInterface::TcpCandidatePolicy)nativePolicy {
|
||||
switch (nativePolicy) {
|
||||
case webrtc::PeerConnectionInterface::kTcpCandidatePolicyEnabled:
|
||||
return RTCTcpCandidatePolicyEnabled;
|
||||
case webrtc::PeerConnectionInterface::kTcpCandidatePolicyDisabled:
|
||||
return RTCTcpCandidatePolicyDisabled;
|
||||
}
|
||||
}
|
||||
|
||||
+ (NSString *)stringForTcpCandidatePolicy:(RTCTcpCandidatePolicy)policy {
|
||||
switch (policy) {
|
||||
case RTCTcpCandidatePolicyEnabled:
|
||||
return @"TCP_ENABLED";
|
||||
case RTCTcpCandidatePolicyDisabled:
|
||||
return @"TCP_DISABLED";
|
||||
}
|
||||
}
|
||||
|
||||
+ (RTCCandidateNetworkPolicy)candidateNetworkPolicyForNativePolicy:
|
||||
(webrtc::PeerConnectionInterface::CandidateNetworkPolicy)nativePolicy {
|
||||
switch (nativePolicy) {
|
||||
case webrtc::PeerConnectionInterface::kCandidateNetworkPolicyAll:
|
||||
return RTCCandidateNetworkPolicyAll;
|
||||
case webrtc::PeerConnectionInterface::kCandidateNetworkPolicyLowCost:
|
||||
return RTCCandidateNetworkPolicyLowCost;
|
||||
}
|
||||
}
|
||||
|
||||
+ (NSString *)stringForCandidateNetworkPolicy:
|
||||
(RTCCandidateNetworkPolicy)policy {
|
||||
switch (policy) {
|
||||
case RTCCandidateNetworkPolicyAll:
|
||||
return @"CANDIDATE_ALL_NETWORKS";
|
||||
case RTCCandidateNetworkPolicyLowCost:
|
||||
return @"CANDIDATE_LOW_COST_NETWORKS";
|
||||
}
|
||||
}
|
||||
|
||||
+ (webrtc::PeerConnectionInterface::ContinualGatheringPolicy)
|
||||
nativeContinualGatheringPolicyForPolicy:
|
||||
(RTCContinualGatheringPolicy)policy {
|
||||
switch (policy) {
|
||||
case RTCContinualGatheringPolicyGatherOnce:
|
||||
return webrtc::PeerConnectionInterface::GATHER_ONCE;
|
||||
case RTCContinualGatheringPolicyGatherContinually:
|
||||
return webrtc::PeerConnectionInterface::GATHER_CONTINUALLY;
|
||||
}
|
||||
}
|
||||
|
||||
+ (RTCContinualGatheringPolicy)continualGatheringPolicyForNativePolicy:
|
||||
(webrtc::PeerConnectionInterface::ContinualGatheringPolicy)nativePolicy {
|
||||
switch (nativePolicy) {
|
||||
case webrtc::PeerConnectionInterface::GATHER_ONCE:
|
||||
return RTCContinualGatheringPolicyGatherOnce;
|
||||
case webrtc::PeerConnectionInterface::GATHER_CONTINUALLY:
|
||||
return RTCContinualGatheringPolicyGatherContinually;
|
||||
}
|
||||
}
|
||||
|
||||
+ (NSString *)stringForContinualGatheringPolicy:
|
||||
(RTCContinualGatheringPolicy)policy {
|
||||
switch (policy) {
|
||||
case RTCContinualGatheringPolicyGatherOnce:
|
||||
return @"GATHER_ONCE";
|
||||
case RTCContinualGatheringPolicyGatherContinually:
|
||||
return @"GATHER_CONTINUALLY";
|
||||
}
|
||||
}
|
||||
|
||||
+ (rtc::KeyType)nativeEncryptionKeyTypeForKeyType:
|
||||
(RTCEncryptionKeyType)keyType {
|
||||
switch (keyType) {
|
||||
case RTCEncryptionKeyTypeRSA:
|
||||
return rtc::KT_RSA;
|
||||
case RTCEncryptionKeyTypeECDSA:
|
||||
return rtc::KT_ECDSA;
|
||||
}
|
||||
}
|
||||
|
||||
@end
|
||||
@ -0,0 +1,49 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "WebRTC/RTCDataChannel.h"
|
||||
|
||||
#include "webrtc/api/datachannelinterface.h"
|
||||
#include "webrtc/rtc_base/scoped_ref_ptr.h"
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
@interface RTCDataBuffer ()
|
||||
|
||||
/**
|
||||
* The native DataBuffer representation of this RTCDatabuffer object. This is
|
||||
* needed to pass to the underlying C++ APIs.
|
||||
*/
|
||||
@property(nonatomic, readonly) const webrtc::DataBuffer *nativeDataBuffer;
|
||||
|
||||
/** Initialize an RTCDataBuffer from a native DataBuffer. */
|
||||
- (instancetype)initWithNativeBuffer:(const webrtc::DataBuffer&)nativeBuffer;
|
||||
|
||||
@end
|
||||
|
||||
|
||||
@interface RTCDataChannel ()
|
||||
|
||||
/** Initialize an RTCDataChannel from a native DataChannelInterface. */
|
||||
- (instancetype)initWithNativeDataChannel:
|
||||
(rtc::scoped_refptr<webrtc::DataChannelInterface>)nativeDataChannel
|
||||
NS_DESIGNATED_INITIALIZER;
|
||||
|
||||
+ (webrtc::DataChannelInterface::DataState)
|
||||
nativeDataChannelStateForState:(RTCDataChannelState)state;
|
||||
|
||||
+ (RTCDataChannelState)dataChannelStateForNativeState:
|
||||
(webrtc::DataChannelInterface::DataState)nativeState;
|
||||
|
||||
+ (NSString *)stringForState:(RTCDataChannelState)state;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
||||
220
sdk/objc/Framework/Classes/PeerConnection/RTCDataChannel.mm
Normal file
220
sdk/objc/Framework/Classes/PeerConnection/RTCDataChannel.mm
Normal file
@ -0,0 +1,220 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "RTCDataChannel+Private.h"
|
||||
|
||||
#import "NSString+StdString.h"
|
||||
|
||||
#include <memory>
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
class DataChannelDelegateAdapter : public DataChannelObserver {
|
||||
public:
|
||||
DataChannelDelegateAdapter(RTCDataChannel *channel) { channel_ = channel; }
|
||||
|
||||
void OnStateChange() override {
|
||||
[channel_.delegate dataChannelDidChangeState:channel_];
|
||||
}
|
||||
|
||||
void OnMessage(const DataBuffer& buffer) override {
|
||||
RTCDataBuffer *data_buffer =
|
||||
[[RTCDataBuffer alloc] initWithNativeBuffer:buffer];
|
||||
[channel_.delegate dataChannel:channel_
|
||||
didReceiveMessageWithBuffer:data_buffer];
|
||||
}
|
||||
|
||||
void OnBufferedAmountChange(uint64_t previousAmount) override {
|
||||
id<RTCDataChannelDelegate> delegate = channel_.delegate;
|
||||
SEL sel = @selector(dataChannel:didChangeBufferedAmount:);
|
||||
if ([delegate respondsToSelector:sel]) {
|
||||
[delegate dataChannel:channel_ didChangeBufferedAmount:previousAmount];
|
||||
}
|
||||
}
|
||||
|
||||
private:
|
||||
__weak RTCDataChannel *channel_;
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@implementation RTCDataBuffer {
|
||||
std::unique_ptr<webrtc::DataBuffer> _dataBuffer;
|
||||
}
|
||||
|
||||
- (instancetype)initWithData:(NSData *)data isBinary:(BOOL)isBinary {
|
||||
NSParameterAssert(data);
|
||||
if (self = [super init]) {
|
||||
rtc::CopyOnWriteBuffer buffer(
|
||||
reinterpret_cast<const uint8_t*>(data.bytes), data.length);
|
||||
_dataBuffer.reset(new webrtc::DataBuffer(buffer, isBinary));
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (NSData *)data {
|
||||
return [NSData dataWithBytes:_dataBuffer->data.data()
|
||||
length:_dataBuffer->data.size()];
|
||||
}
|
||||
|
||||
- (BOOL)isBinary {
|
||||
return _dataBuffer->binary;
|
||||
}
|
||||
|
||||
#pragma mark - Private
|
||||
|
||||
- (instancetype)initWithNativeBuffer:(const webrtc::DataBuffer&)nativeBuffer {
|
||||
if (self = [super init]) {
|
||||
_dataBuffer.reset(new webrtc::DataBuffer(nativeBuffer));
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (const webrtc::DataBuffer *)nativeDataBuffer {
|
||||
return _dataBuffer.get();
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
|
||||
@implementation RTCDataChannel {
|
||||
rtc::scoped_refptr<webrtc::DataChannelInterface> _nativeDataChannel;
|
||||
std::unique_ptr<webrtc::DataChannelDelegateAdapter> _observer;
|
||||
BOOL _isObserverRegistered;
|
||||
}
|
||||
|
||||
@synthesize delegate = _delegate;
|
||||
|
||||
- (void)dealloc {
|
||||
// Handles unregistering the observer properly. We need to do this because
|
||||
// there may still be other references to the underlying data channel.
|
||||
_nativeDataChannel->UnregisterObserver();
|
||||
}
|
||||
|
||||
- (NSString *)label {
|
||||
return [NSString stringForStdString:_nativeDataChannel->label()];
|
||||
}
|
||||
|
||||
- (BOOL)isReliable {
|
||||
return _nativeDataChannel->reliable();
|
||||
}
|
||||
|
||||
- (BOOL)isOrdered {
|
||||
return _nativeDataChannel->ordered();
|
||||
}
|
||||
|
||||
- (NSUInteger)maxRetransmitTime {
|
||||
return self.maxPacketLifeTime;
|
||||
}
|
||||
|
||||
- (uint16_t)maxPacketLifeTime {
|
||||
return _nativeDataChannel->maxRetransmitTime();
|
||||
}
|
||||
|
||||
- (uint16_t)maxRetransmits {
|
||||
return _nativeDataChannel->maxRetransmits();
|
||||
}
|
||||
|
||||
- (NSString *)protocol {
|
||||
return [NSString stringForStdString:_nativeDataChannel->protocol()];
|
||||
}
|
||||
|
||||
- (BOOL)isNegotiated {
|
||||
return _nativeDataChannel->negotiated();
|
||||
}
|
||||
|
||||
- (NSInteger)streamId {
|
||||
return self.channelId;
|
||||
}
|
||||
|
||||
- (int)channelId {
|
||||
return _nativeDataChannel->id();
|
||||
}
|
||||
|
||||
- (RTCDataChannelState)readyState {
|
||||
return [[self class] dataChannelStateForNativeState:
|
||||
_nativeDataChannel->state()];
|
||||
}
|
||||
|
||||
- (uint64_t)bufferedAmount {
|
||||
return _nativeDataChannel->buffered_amount();
|
||||
}
|
||||
|
||||
- (void)close {
|
||||
_nativeDataChannel->Close();
|
||||
}
|
||||
|
||||
- (BOOL)sendData:(RTCDataBuffer *)data {
|
||||
return _nativeDataChannel->Send(*data.nativeDataBuffer);
|
||||
}
|
||||
|
||||
- (NSString *)description {
|
||||
return [NSString stringWithFormat:@"RTCDataChannel:\n%ld\n%@\n%@",
|
||||
(long)self.channelId,
|
||||
self.label,
|
||||
[[self class]
|
||||
stringForState:self.readyState]];
|
||||
}
|
||||
|
||||
#pragma mark - Private
|
||||
|
||||
- (instancetype)initWithNativeDataChannel:
|
||||
(rtc::scoped_refptr<webrtc::DataChannelInterface>)nativeDataChannel {
|
||||
NSParameterAssert(nativeDataChannel);
|
||||
if (self = [super init]) {
|
||||
_nativeDataChannel = nativeDataChannel;
|
||||
_observer.reset(new webrtc::DataChannelDelegateAdapter(self));
|
||||
_nativeDataChannel->RegisterObserver(_observer.get());
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
+ (webrtc::DataChannelInterface::DataState)
|
||||
nativeDataChannelStateForState:(RTCDataChannelState)state {
|
||||
switch (state) {
|
||||
case RTCDataChannelStateConnecting:
|
||||
return webrtc::DataChannelInterface::DataState::kConnecting;
|
||||
case RTCDataChannelStateOpen:
|
||||
return webrtc::DataChannelInterface::DataState::kOpen;
|
||||
case RTCDataChannelStateClosing:
|
||||
return webrtc::DataChannelInterface::DataState::kClosing;
|
||||
case RTCDataChannelStateClosed:
|
||||
return webrtc::DataChannelInterface::DataState::kClosed;
|
||||
}
|
||||
}
|
||||
|
||||
+ (RTCDataChannelState)dataChannelStateForNativeState:
|
||||
(webrtc::DataChannelInterface::DataState)nativeState {
|
||||
switch (nativeState) {
|
||||
case webrtc::DataChannelInterface::DataState::kConnecting:
|
||||
return RTCDataChannelStateConnecting;
|
||||
case webrtc::DataChannelInterface::DataState::kOpen:
|
||||
return RTCDataChannelStateOpen;
|
||||
case webrtc::DataChannelInterface::DataState::kClosing:
|
||||
return RTCDataChannelStateClosing;
|
||||
case webrtc::DataChannelInterface::DataState::kClosed:
|
||||
return RTCDataChannelStateClosed;
|
||||
}
|
||||
}
|
||||
|
||||
+ (NSString *)stringForState:(RTCDataChannelState)state {
|
||||
switch (state) {
|
||||
case RTCDataChannelStateConnecting:
|
||||
return @"Connecting";
|
||||
case RTCDataChannelStateOpen:
|
||||
return @"Open";
|
||||
case RTCDataChannelStateClosing:
|
||||
return @"Closing";
|
||||
case RTCDataChannelStateClosed:
|
||||
return @"Closed";
|
||||
}
|
||||
}
|
||||
|
||||
@end
|
||||
@ -0,0 +1,23 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "WebRTC/RTCDataChannelConfiguration.h"
|
||||
|
||||
#include "webrtc/api/datachannelinterface.h"
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
@interface RTCDataChannelConfiguration ()
|
||||
|
||||
@property(nonatomic, readonly) webrtc::DataChannelInit nativeDataChannelInit;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
||||
@ -0,0 +1,83 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "RTCDataChannelConfiguration+Private.h"
|
||||
|
||||
#import "NSString+StdString.h"
|
||||
|
||||
@implementation RTCDataChannelConfiguration
|
||||
|
||||
@synthesize nativeDataChannelInit = _nativeDataChannelInit;
|
||||
|
||||
- (BOOL)isOrdered {
|
||||
return _nativeDataChannelInit.ordered;
|
||||
}
|
||||
|
||||
- (void)setIsOrdered:(BOOL)isOrdered {
|
||||
_nativeDataChannelInit.ordered = isOrdered;
|
||||
}
|
||||
|
||||
- (NSInteger)maxRetransmitTimeMs {
|
||||
return self.maxPacketLifeTime;
|
||||
}
|
||||
|
||||
- (void)setMaxRetransmitTimeMs:(NSInteger)maxRetransmitTimeMs {
|
||||
self.maxPacketLifeTime = maxRetransmitTimeMs;
|
||||
}
|
||||
|
||||
- (int)maxPacketLifeTime {
|
||||
return _nativeDataChannelInit.maxRetransmitTime;
|
||||
}
|
||||
|
||||
- (void)setMaxPacketLifeTime:(int)maxPacketLifeTime {
|
||||
_nativeDataChannelInit.maxRetransmitTime = maxPacketLifeTime;
|
||||
}
|
||||
|
||||
- (int)maxRetransmits {
|
||||
return _nativeDataChannelInit.maxRetransmits;
|
||||
}
|
||||
|
||||
- (void)setMaxRetransmits:(int)maxRetransmits {
|
||||
_nativeDataChannelInit.maxRetransmits = maxRetransmits;
|
||||
}
|
||||
|
||||
- (NSString *)protocol {
|
||||
return [NSString stringForStdString:_nativeDataChannelInit.protocol];
|
||||
}
|
||||
|
||||
- (void)setProtocol:(NSString *)protocol {
|
||||
_nativeDataChannelInit.protocol = [NSString stdStringForString:protocol];
|
||||
}
|
||||
|
||||
- (BOOL)isNegotiated {
|
||||
return _nativeDataChannelInit.negotiated;
|
||||
}
|
||||
|
||||
- (void)setIsNegotiated:(BOOL)isNegotiated {
|
||||
_nativeDataChannelInit.negotiated = isNegotiated;
|
||||
}
|
||||
|
||||
- (int)streamId {
|
||||
return self.channelId;
|
||||
}
|
||||
|
||||
- (void)setStreamId:(int)streamId {
|
||||
self.channelId = streamId;
|
||||
}
|
||||
|
||||
- (int)channelId {
|
||||
return _nativeDataChannelInit.id;
|
||||
}
|
||||
|
||||
- (void)setChannelId:(int)channelId {
|
||||
_nativeDataChannelInit.id = channelId;
|
||||
}
|
||||
|
||||
@end
|
||||
83
sdk/objc/Framework/Classes/PeerConnection/RTCEncodedImage.mm
Normal file
83
sdk/objc/Framework/Classes/PeerConnection/RTCEncodedImage.mm
Normal file
@ -0,0 +1,83 @@
|
||||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "WebRTC/RTCVideoCodec.h"
|
||||
|
||||
#import "RTCVideoCodec+Private.h"
|
||||
|
||||
#include "webrtc/rtc_base/safe_conversions.h"
|
||||
|
||||
@implementation RTCEncodedImage
|
||||
|
||||
@synthesize buffer = _buffer;
|
||||
@synthesize encodedWidth = _encodedWidth;
|
||||
@synthesize encodedHeight = _encodedHeight;
|
||||
@synthesize timeStamp = _timeStamp;
|
||||
@synthesize captureTimeMs = _captureTimeMs;
|
||||
@synthesize ntpTimeMs = _ntpTimeMs;
|
||||
@synthesize flags = _flags;
|
||||
@synthesize encodeStartMs = _encodeStartMs;
|
||||
@synthesize encodeFinishMs = _encodeFinishMs;
|
||||
@synthesize frameType = _frameType;
|
||||
@synthesize rotation = _rotation;
|
||||
@synthesize completeFrame = _completeFrame;
|
||||
@synthesize qp = _qp;
|
||||
@synthesize contentType = _contentType;
|
||||
|
||||
- (instancetype)initWithNativeEncodedImage:(webrtc::EncodedImage)encodedImage {
|
||||
if (self = [super init]) {
|
||||
// Wrap the buffer in NSData without copying, do not take ownership.
|
||||
_buffer = [NSData dataWithBytesNoCopy:encodedImage._buffer
|
||||
length:encodedImage._length
|
||||
freeWhenDone:NO];
|
||||
_encodedWidth = rtc::dchecked_cast<int32_t>(encodedImage._encodedWidth);
|
||||
_encodedHeight = rtc::dchecked_cast<int32_t>(encodedImage._encodedHeight);
|
||||
_timeStamp = encodedImage._timeStamp;
|
||||
_captureTimeMs = encodedImage.capture_time_ms_;
|
||||
_ntpTimeMs = encodedImage.ntp_time_ms_;
|
||||
_flags = encodedImage.timing_.flags;
|
||||
_encodeStartMs = encodedImage.timing_.encode_start_ms;
|
||||
_encodeFinishMs = encodedImage.timing_.encode_finish_ms;
|
||||
_frameType = static_cast<RTCFrameType>(encodedImage._frameType);
|
||||
_rotation = static_cast<RTCVideoRotation>(encodedImage.rotation_);
|
||||
_completeFrame = encodedImage._completeFrame;
|
||||
_qp = encodedImage.qp_ == -1 ? nil : @(encodedImage.qp_);
|
||||
_contentType = (encodedImage.content_type_ == webrtc::VideoContentType::SCREENSHARE) ?
|
||||
RTCVideoContentTypeScreenshare :
|
||||
RTCVideoContentTypeUnspecified;
|
||||
}
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
- (webrtc::EncodedImage)nativeEncodedImage {
|
||||
// Return the pointer without copying.
|
||||
webrtc::EncodedImage encodedImage(
|
||||
(uint8_t *)_buffer.bytes, (size_t)_buffer.length, (size_t)_buffer.length);
|
||||
encodedImage._encodedWidth = rtc::dchecked_cast<uint32_t>(_encodedWidth);
|
||||
encodedImage._encodedHeight = rtc::dchecked_cast<uint32_t>(_encodedHeight);
|
||||
encodedImage._timeStamp = _timeStamp;
|
||||
encodedImage.capture_time_ms_ = _captureTimeMs;
|
||||
encodedImage.ntp_time_ms_ = _ntpTimeMs;
|
||||
encodedImage.timing_.flags = _flags;
|
||||
encodedImage.timing_.encode_start_ms = _encodeStartMs;
|
||||
encodedImage.timing_.encode_finish_ms = _encodeFinishMs;
|
||||
encodedImage._frameType = webrtc::FrameType(_frameType);
|
||||
encodedImage.rotation_ = webrtc::VideoRotation(_rotation);
|
||||
encodedImage._completeFrame = _completeFrame;
|
||||
encodedImage.qp_ = _qp ? _qp.intValue : -1;
|
||||
encodedImage.content_type_ = (_contentType == RTCVideoContentTypeScreenshare) ?
|
||||
webrtc::VideoContentType::SCREENSHARE :
|
||||
webrtc::VideoContentType::UNSPECIFIED;
|
||||
|
||||
return encodedImage;
|
||||
}
|
||||
|
||||
@end
|
||||
@ -0,0 +1,25 @@
|
||||
/*
|
||||
* Copyright 2017 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
#import <WebRTC/RTCVideoCapturer.h>
|
||||
|
||||
/**
|
||||
* RTCVideoCapturer that reads buffers from file.
|
||||
*
|
||||
* Per design, the file capturer can only be run once and once stopped it cannot run again.
|
||||
* To run another file capture session, create new instance of the class.
|
||||
*/
|
||||
@interface RTCFileVideoCapturer : RTCVideoCapturer
|
||||
|
||||
- (void)startCapturingFromFileNamed:(NSString *)nameOfFile;
|
||||
- (void)stopCapture;
|
||||
|
||||
@end
|
||||
163
sdk/objc/Framework/Classes/PeerConnection/RTCFileVideoCapturer.m
Normal file
163
sdk/objc/Framework/Classes/PeerConnection/RTCFileVideoCapturer.m
Normal file
@ -0,0 +1,163 @@
|
||||
/**
|
||||
* Copyright 2017 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "RTCFileVideoCapturer.h"
|
||||
|
||||
#import "WebRTC/RTCLogging.h"
|
||||
#import "WebRTC/RTCVideoFrameBuffer.h"
|
||||
|
||||
@implementation RTCFileVideoCapturer {
|
||||
AVAssetReader *_reader;
|
||||
AVAssetReaderTrackOutput *_outTrack;
|
||||
BOOL _capturerStopped;
|
||||
CMTime _lastPresentationTime;
|
||||
dispatch_queue_t _frameQueue;
|
||||
}
|
||||
|
||||
- (void)startCapturingFromFileNamed:(NSString *)nameOfFile {
|
||||
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
|
||||
if (_reader && _reader.status == AVAssetReaderStatusReading) {
|
||||
RTCLog("Capturer exists and reads another file. Start capture request failed.");
|
||||
return;
|
||||
}
|
||||
NSString *pathForFile = [self pathForFileName:nameOfFile];
|
||||
if (!pathForFile) {
|
||||
RTCLog("File %@ not found in bundle", nameOfFile);
|
||||
return;
|
||||
}
|
||||
|
||||
_lastPresentationTime = CMTimeMake(0, 0);
|
||||
|
||||
NSURL *URLForFile = [NSURL fileURLWithPath:pathForFile];
|
||||
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:URLForFile options:nil];
|
||||
|
||||
NSArray *allTracks = [asset tracksWithMediaType:AVMediaTypeVideo];
|
||||
NSError *error = nil;
|
||||
_reader = [[AVAssetReader alloc] initWithAsset:asset error:&error];
|
||||
if (error) {
|
||||
RTCLog("File reader failed with error: %@", error);
|
||||
return;
|
||||
}
|
||||
|
||||
NSDictionary *options = @{
|
||||
(NSString *)
|
||||
kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
|
||||
};
|
||||
_outTrack = [[AVAssetReaderTrackOutput alloc] initWithTrack:allTracks.firstObject
|
||||
outputSettings:options];
|
||||
[_reader addOutput:_outTrack];
|
||||
|
||||
[_reader startReading];
|
||||
RTCLog(@"File capturer started reading");
|
||||
[self readNextBuffer];
|
||||
});
|
||||
}
|
||||
|
||||
- (void)stopCapture {
|
||||
_capturerStopped = YES;
|
||||
RTCLog(@"File capturer stopped.");
|
||||
}
|
||||
|
||||
#pragma mark - Private
|
||||
|
||||
- (nullable NSString *)pathForFileName:(NSString *)fileName {
|
||||
NSArray *nameComponents = [fileName componentsSeparatedByString:@"."];
|
||||
if (nameComponents.count != 2) {
|
||||
return nil;
|
||||
}
|
||||
|
||||
NSString *path =
|
||||
[[NSBundle mainBundle] pathForResource:nameComponents[0] ofType:nameComponents[1]];
|
||||
return path;
|
||||
}
|
||||
|
||||
- (dispatch_queue_t)frameQueue {
|
||||
if (!_frameQueue) {
|
||||
_frameQueue = dispatch_queue_create("org.webrtc.filecapturer.video", DISPATCH_QUEUE_SERIAL);
|
||||
dispatch_set_target_queue(_frameQueue,
|
||||
dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_BACKGROUND, 0));
|
||||
}
|
||||
return _frameQueue;
|
||||
}
|
||||
|
||||
- (void)readNextBuffer {
|
||||
if (_reader.status != AVAssetReaderStatusReading || _capturerStopped) {
|
||||
[_reader cancelReading];
|
||||
_reader = nil;
|
||||
return;
|
||||
}
|
||||
|
||||
CMSampleBufferRef sampleBuffer = [_outTrack copyNextSampleBuffer];
|
||||
if (!sampleBuffer) {
|
||||
[self readNextBuffer];
|
||||
return;
|
||||
}
|
||||
if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(sampleBuffer) ||
|
||||
!CMSampleBufferDataIsReady(sampleBuffer)) {
|
||||
[self readNextBuffer];
|
||||
return;
|
||||
}
|
||||
|
||||
[self publishSampleBuffer:sampleBuffer];
|
||||
}
|
||||
|
||||
- (void)publishSampleBuffer:(CMSampleBufferRef)sampleBuffer {
|
||||
CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
|
||||
Float64 presentationDifference =
|
||||
CMTimeGetSeconds(CMTimeSubtract(presentationTime, _lastPresentationTime));
|
||||
_lastPresentationTime = presentationTime;
|
||||
int64_t presentationDifferenceRound = lroundf(presentationDifference * NSEC_PER_SEC);
|
||||
|
||||
__block dispatch_source_t timer = [self createStrictTimer];
|
||||
// Strict timer that will fire |presentationDifferenceRound| ns from now and never again.
|
||||
dispatch_source_set_timer(timer,
|
||||
dispatch_time(DISPATCH_TIME_NOW, presentationDifferenceRound),
|
||||
DISPATCH_TIME_FOREVER,
|
||||
0);
|
||||
dispatch_source_set_event_handler(timer, ^{
|
||||
dispatch_source_cancel(timer);
|
||||
timer = nil;
|
||||
|
||||
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
|
||||
if (!pixelBuffer) {
|
||||
CFRelease(sampleBuffer);
|
||||
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
|
||||
[self readNextBuffer];
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
RTCCVPixelBuffer *rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer];
|
||||
NSTimeInterval timeStampSeconds = CACurrentMediaTime();
|
||||
int64_t timeStampNs = lroundf(timeStampSeconds * NSEC_PER_SEC);
|
||||
RTCVideoFrame *videoFrame =
|
||||
[[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuffer rotation:0 timeStampNs:timeStampNs];
|
||||
CFRelease(sampleBuffer);
|
||||
|
||||
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
|
||||
[self readNextBuffer];
|
||||
});
|
||||
|
||||
[self.delegate capturer:self didCaptureVideoFrame:videoFrame];
|
||||
});
|
||||
dispatch_activate(timer);
|
||||
}
|
||||
|
||||
- (dispatch_source_t)createStrictTimer {
|
||||
dispatch_source_t timer = dispatch_source_create(
|
||||
DISPATCH_SOURCE_TYPE_TIMER, 0, DISPATCH_TIMER_STRICT, [self frameQueue]);
|
||||
return timer;
|
||||
}
|
||||
|
||||
- (void)dealloc {
|
||||
[self stopCapture];
|
||||
}
|
||||
|
||||
@end
|
||||
@ -0,0 +1,37 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "WebRTC/RTCIceCandidate.h"
|
||||
|
||||
#include <memory>
|
||||
|
||||
#include "webrtc/api/jsep.h"
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
@interface RTCIceCandidate ()
|
||||
|
||||
/**
|
||||
* The native IceCandidateInterface representation of this RTCIceCandidate
|
||||
* object. This is needed to pass to the underlying C++ APIs.
|
||||
*/
|
||||
@property(nonatomic, readonly)
|
||||
std::unique_ptr<webrtc::IceCandidateInterface> nativeCandidate;
|
||||
|
||||
/**
|
||||
* Initialize an RTCIceCandidate from a native IceCandidateInterface. No
|
||||
* ownership is taken of the native candidate.
|
||||
*/
|
||||
- (instancetype)initWithNativeCandidate:
|
||||
(const webrtc::IceCandidateInterface *)candidate;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
||||
76
sdk/objc/Framework/Classes/PeerConnection/RTCIceCandidate.mm
Normal file
76
sdk/objc/Framework/Classes/PeerConnection/RTCIceCandidate.mm
Normal file
@ -0,0 +1,76 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "RTCIceCandidate+Private.h"
|
||||
|
||||
#include <memory>
|
||||
|
||||
#import "NSString+StdString.h"
|
||||
#import "WebRTC/RTCLogging.h"
|
||||
|
||||
@implementation RTCIceCandidate
|
||||
|
||||
@synthesize sdpMid = _sdpMid;
|
||||
@synthesize sdpMLineIndex = _sdpMLineIndex;
|
||||
@synthesize sdp = _sdp;
|
||||
@synthesize serverUrl = _serverUrl;
|
||||
|
||||
- (instancetype)initWithSdp:(NSString *)sdp
|
||||
sdpMLineIndex:(int)sdpMLineIndex
|
||||
sdpMid:(NSString *)sdpMid {
|
||||
NSParameterAssert(sdp.length);
|
||||
if (self = [super init]) {
|
||||
_sdpMid = [sdpMid copy];
|
||||
_sdpMLineIndex = sdpMLineIndex;
|
||||
_sdp = [sdp copy];
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (NSString *)description {
|
||||
return [NSString stringWithFormat:@"RTCIceCandidate:\n%@\n%d\n%@\n%@",
|
||||
_sdpMid,
|
||||
_sdpMLineIndex,
|
||||
_sdp,
|
||||
_serverUrl];
|
||||
}
|
||||
|
||||
#pragma mark - Private
|
||||
|
||||
- (instancetype)initWithNativeCandidate:
|
||||
(const webrtc::IceCandidateInterface *)candidate {
|
||||
NSParameterAssert(candidate);
|
||||
std::string sdp;
|
||||
candidate->ToString(&sdp);
|
||||
|
||||
RTCIceCandidate *rtcCandidate =
|
||||
[self initWithSdp:[NSString stringForStdString:sdp]
|
||||
sdpMLineIndex:candidate->sdp_mline_index()
|
||||
sdpMid:[NSString stringForStdString:candidate->sdp_mid()]];
|
||||
rtcCandidate->_serverUrl = [NSString stringForStdString:candidate->server_url()];
|
||||
return rtcCandidate;
|
||||
}
|
||||
|
||||
- (std::unique_ptr<webrtc::IceCandidateInterface>)nativeCandidate {
|
||||
webrtc::SdpParseError error;
|
||||
|
||||
webrtc::IceCandidateInterface *candidate = webrtc::CreateIceCandidate(
|
||||
_sdpMid.stdString, _sdpMLineIndex, _sdp.stdString, &error);
|
||||
|
||||
if (!candidate) {
|
||||
RTCLog(@"Failed to create ICE candidate: %s\nline: %s",
|
||||
error.description.c_str(),
|
||||
error.line.c_str());
|
||||
}
|
||||
|
||||
return std::unique_ptr<webrtc::IceCandidateInterface>(candidate);
|
||||
}
|
||||
|
||||
@end
|
||||
@ -0,0 +1,32 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "WebRTC/RTCIceServer.h"
|
||||
|
||||
#include "webrtc/api/peerconnectioninterface.h"
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
@interface RTCIceServer ()
|
||||
|
||||
/**
|
||||
* IceServer struct representation of this RTCIceServer object's data.
|
||||
* This is needed to pass to the underlying C++ APIs.
|
||||
*/
|
||||
@property(nonatomic, readonly)
|
||||
webrtc::PeerConnectionInterface::IceServer nativeServer;
|
||||
|
||||
/** Initialize an RTCIceServer from a native IceServer. */
|
||||
- (instancetype)initWithNativeServer:
|
||||
(webrtc::PeerConnectionInterface::IceServer)nativeServer;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
||||
196
sdk/objc/Framework/Classes/PeerConnection/RTCIceServer.mm
Normal file
196
sdk/objc/Framework/Classes/PeerConnection/RTCIceServer.mm
Normal file
@ -0,0 +1,196 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "RTCIceServer+Private.h"
|
||||
|
||||
#import "NSString+StdString.h"
|
||||
|
||||
@implementation RTCIceServer
|
||||
|
||||
@synthesize urlStrings = _urlStrings;
|
||||
@synthesize username = _username;
|
||||
@synthesize credential = _credential;
|
||||
@synthesize tlsCertPolicy = _tlsCertPolicy;
|
||||
@synthesize hostname = _hostname;
|
||||
@synthesize tlsAlpnProtocols = _tlsAlpnProtocols;
|
||||
@synthesize tlsEllipticCurves = _tlsEllipticCurves;
|
||||
|
||||
- (instancetype)initWithURLStrings:(NSArray<NSString *> *)urlStrings {
|
||||
return [self initWithURLStrings:urlStrings
|
||||
username:nil
|
||||
credential:nil];
|
||||
}
|
||||
|
||||
- (instancetype)initWithURLStrings:(NSArray<NSString *> *)urlStrings
|
||||
username:(NSString *)username
|
||||
credential:(NSString *)credential {
|
||||
return [self initWithURLStrings:urlStrings
|
||||
username:username
|
||||
credential:credential
|
||||
tlsCertPolicy:RTCTlsCertPolicySecure];
|
||||
}
|
||||
|
||||
- (instancetype)initWithURLStrings:(NSArray<NSString *> *)urlStrings
|
||||
username:(NSString *)username
|
||||
credential:(NSString *)credential
|
||||
tlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy {
|
||||
return [self initWithURLStrings:urlStrings
|
||||
username:username
|
||||
credential:credential
|
||||
tlsCertPolicy:tlsCertPolicy
|
||||
hostname:nil];
|
||||
}
|
||||
|
||||
- (instancetype)initWithURLStrings:(NSArray<NSString *> *)urlStrings
|
||||
username:(NSString *)username
|
||||
credential:(NSString *)credential
|
||||
tlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy
|
||||
hostname:(NSString *)hostname {
|
||||
return [self initWithURLStrings:urlStrings
|
||||
username:username
|
||||
credential:credential
|
||||
tlsCertPolicy:tlsCertPolicy
|
||||
hostname:hostname
|
||||
tlsAlpnProtocols:[NSArray array]];
|
||||
}
|
||||
|
||||
- (instancetype)initWithURLStrings:(NSArray<NSString *> *)urlStrings
|
||||
username:(NSString *)username
|
||||
credential:(NSString *)credential
|
||||
tlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy
|
||||
hostname:(NSString *)hostname
|
||||
tlsAlpnProtocols:(NSArray<NSString *> *)tlsAlpnProtocols {
|
||||
return [self initWithURLStrings:urlStrings
|
||||
username:username
|
||||
credential:credential
|
||||
tlsCertPolicy:tlsCertPolicy
|
||||
hostname:hostname
|
||||
tlsAlpnProtocols:tlsAlpnProtocols
|
||||
tlsEllipticCurves:[NSArray array]];
|
||||
}
|
||||
|
||||
- (instancetype)initWithURLStrings:(NSArray<NSString *> *)urlStrings
|
||||
username:(NSString *)username
|
||||
credential:(NSString *)credential
|
||||
tlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy
|
||||
hostname:(NSString *)hostname
|
||||
tlsAlpnProtocols:(NSArray<NSString *> *)tlsAlpnProtocols
|
||||
tlsEllipticCurves:(NSArray<NSString *> *)tlsEllipticCurves {
|
||||
NSParameterAssert(urlStrings.count);
|
||||
if (self = [super init]) {
|
||||
_urlStrings = [[NSArray alloc] initWithArray:urlStrings copyItems:YES];
|
||||
_username = [username copy];
|
||||
_credential = [credential copy];
|
||||
_tlsCertPolicy = tlsCertPolicy;
|
||||
_hostname = [hostname copy];
|
||||
_tlsAlpnProtocols = [[NSArray alloc] initWithArray:tlsAlpnProtocols copyItems:YES];
|
||||
_tlsEllipticCurves = [[NSArray alloc] initWithArray:tlsEllipticCurves copyItems:YES];
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (NSString *)description {
|
||||
return [NSString stringWithFormat:@"RTCIceServer:\n%@\n%@\n%@\n%@\n%@\n%@\n%@",
|
||||
_urlStrings,
|
||||
_username,
|
||||
_credential,
|
||||
[self stringForTlsCertPolicy:_tlsCertPolicy],
|
||||
_hostname,
|
||||
_tlsAlpnProtocols,
|
||||
_tlsEllipticCurves];
|
||||
}
|
||||
|
||||
#pragma mark - Private
|
||||
|
||||
- (NSString *)stringForTlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy {
|
||||
switch (tlsCertPolicy) {
|
||||
case RTCTlsCertPolicySecure:
|
||||
return @"RTCTlsCertPolicySecure";
|
||||
case RTCTlsCertPolicyInsecureNoCheck:
|
||||
return @"RTCTlsCertPolicyInsecureNoCheck";
|
||||
}
|
||||
}
|
||||
|
||||
- (webrtc::PeerConnectionInterface::IceServer)nativeServer {
|
||||
__block webrtc::PeerConnectionInterface::IceServer iceServer;
|
||||
|
||||
iceServer.username = [NSString stdStringForString:_username];
|
||||
iceServer.password = [NSString stdStringForString:_credential];
|
||||
iceServer.hostname = [NSString stdStringForString:_hostname];
|
||||
|
||||
[_tlsAlpnProtocols enumerateObjectsUsingBlock:^(NSString *proto, NSUInteger idx, BOOL *stop) {
|
||||
iceServer.tls_alpn_protocols.push_back(proto.stdString);
|
||||
}];
|
||||
|
||||
[_tlsEllipticCurves enumerateObjectsUsingBlock:^(NSString *curve, NSUInteger idx, BOOL *stop) {
|
||||
iceServer.tls_elliptic_curves.push_back(curve.stdString);
|
||||
}];
|
||||
|
||||
[_urlStrings enumerateObjectsUsingBlock:^(NSString *url,
|
||||
NSUInteger idx,
|
||||
BOOL *stop) {
|
||||
iceServer.urls.push_back(url.stdString);
|
||||
}];
|
||||
|
||||
switch (_tlsCertPolicy) {
|
||||
case RTCTlsCertPolicySecure:
|
||||
iceServer.tls_cert_policy =
|
||||
webrtc::PeerConnectionInterface::kTlsCertPolicySecure;
|
||||
break;
|
||||
case RTCTlsCertPolicyInsecureNoCheck:
|
||||
iceServer.tls_cert_policy =
|
||||
webrtc::PeerConnectionInterface::kTlsCertPolicyInsecureNoCheck;
|
||||
break;
|
||||
}
|
||||
return iceServer;
|
||||
}
|
||||
|
||||
- (instancetype)initWithNativeServer:
|
||||
(webrtc::PeerConnectionInterface::IceServer)nativeServer {
|
||||
NSMutableArray *urls =
|
||||
[NSMutableArray arrayWithCapacity:nativeServer.urls.size()];
|
||||
for (auto const &url : nativeServer.urls) {
|
||||
[urls addObject:[NSString stringForStdString:url]];
|
||||
}
|
||||
NSString *username = [NSString stringForStdString:nativeServer.username];
|
||||
NSString *credential = [NSString stringForStdString:nativeServer.password];
|
||||
NSString *hostname = [NSString stringForStdString:nativeServer.hostname];
|
||||
NSMutableArray *tlsAlpnProtocols =
|
||||
[NSMutableArray arrayWithCapacity:nativeServer.tls_alpn_protocols.size()];
|
||||
for (auto const &proto : nativeServer.tls_alpn_protocols) {
|
||||
[tlsAlpnProtocols addObject:[NSString stringForStdString:proto]];
|
||||
}
|
||||
NSMutableArray *tlsEllipticCurves =
|
||||
[NSMutableArray arrayWithCapacity:nativeServer.tls_elliptic_curves.size()];
|
||||
for (auto const &curve : nativeServer.tls_elliptic_curves) {
|
||||
[tlsEllipticCurves addObject:[NSString stringForStdString:curve]];
|
||||
}
|
||||
RTCTlsCertPolicy tlsCertPolicy;
|
||||
|
||||
switch (nativeServer.tls_cert_policy) {
|
||||
case webrtc::PeerConnectionInterface::kTlsCertPolicySecure:
|
||||
tlsCertPolicy = RTCTlsCertPolicySecure;
|
||||
break;
|
||||
case webrtc::PeerConnectionInterface::kTlsCertPolicyInsecureNoCheck:
|
||||
tlsCertPolicy = RTCTlsCertPolicyInsecureNoCheck;
|
||||
break;
|
||||
}
|
||||
|
||||
self = [self initWithURLStrings:urls
|
||||
username:username
|
||||
credential:credential
|
||||
tlsCertPolicy:tlsCertPolicy
|
||||
hostname:hostname
|
||||
tlsAlpnProtocols:tlsAlpnProtocols
|
||||
tlsEllipticCurves:tlsEllipticCurves];
|
||||
return self;
|
||||
}
|
||||
|
||||
@end
|
||||
@ -0,0 +1,27 @@
|
||||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "WebRTC/RTCIntervalRange.h"
|
||||
|
||||
#include "webrtc/rtc_base/timeutils.h"
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
@interface RTCIntervalRange ()
|
||||
|
||||
@property(nonatomic, readonly)
|
||||
std::unique_ptr<rtc::IntervalRange> nativeIntervalRange;
|
||||
|
||||
- (instancetype)initWithNativeIntervalRange:(const rtc::IntervalRange &)config;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
||||
|
||||
@ -0,0 +1,50 @@
|
||||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "RTCIntervalRange+Private.h"
|
||||
|
||||
#include "webrtc/rtc_base/checks.h"
|
||||
|
||||
@implementation RTCIntervalRange
|
||||
|
||||
@synthesize min = _min;
|
||||
@synthesize max = _max;
|
||||
|
||||
- (instancetype)init {
|
||||
return [self initWithMin:0 max:0];
|
||||
}
|
||||
|
||||
- (instancetype)initWithMin:(NSInteger)min
|
||||
max:(NSInteger)max {
|
||||
RTC_DCHECK_LE(min, max);
|
||||
if (self = [super init]) {
|
||||
_min = min;
|
||||
_max = max;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (instancetype)initWithNativeIntervalRange:(const rtc::IntervalRange &)config {
|
||||
return [self initWithMin:config.min() max:config.max()];
|
||||
}
|
||||
|
||||
- (NSString *)description {
|
||||
return [NSString stringWithFormat:@"[%ld, %ld]", (long)_min, (long)_max];
|
||||
}
|
||||
|
||||
#pragma mark - Private
|
||||
|
||||
- (std::unique_ptr<rtc::IntervalRange>)nativeIntervalRange {
|
||||
std::unique_ptr<rtc::IntervalRange> nativeIntervalRange(
|
||||
new rtc::IntervalRange((int)_min, (int)_max));
|
||||
return nativeIntervalRange;
|
||||
}
|
||||
|
||||
@end
|
||||
@ -0,0 +1,24 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "WebRTC/RTCLegacyStatsReport.h"
|
||||
|
||||
#include "webrtc/api/statstypes.h"
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
@interface RTCLegacyStatsReport ()
|
||||
|
||||
/** Initialize an RTCLegacyStatsReport object from a native StatsReport. */
|
||||
- (instancetype)initWithNativeReport:(const webrtc::StatsReport &)nativeReport;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
||||
@ -0,0 +1,60 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "RTCLegacyStatsReport+Private.h"
|
||||
|
||||
#import "NSString+StdString.h"
|
||||
#import "WebRTC/RTCLogging.h"
|
||||
|
||||
#include "webrtc/rtc_base/checks.h"
|
||||
|
||||
@implementation RTCLegacyStatsReport
|
||||
|
||||
@synthesize timestamp = _timestamp;
|
||||
@synthesize type = _type;
|
||||
@synthesize reportId = _reportId;
|
||||
@synthesize values = _values;
|
||||
|
||||
- (NSString *)description {
|
||||
return [NSString stringWithFormat:@"RTCLegacyStatsReport:\n%@\n%@\n%f\n%@",
|
||||
_reportId,
|
||||
_type,
|
||||
_timestamp,
|
||||
_values];
|
||||
}
|
||||
|
||||
#pragma mark - Private
|
||||
|
||||
- (instancetype)initWithNativeReport:(const webrtc::StatsReport &)nativeReport {
|
||||
if (self = [super init]) {
|
||||
_timestamp = nativeReport.timestamp();
|
||||
_type = [NSString stringForStdString:nativeReport.TypeToString()];
|
||||
_reportId = [NSString stringForStdString:
|
||||
nativeReport.id()->ToString()];
|
||||
|
||||
NSUInteger capacity = nativeReport.values().size();
|
||||
NSMutableDictionary *values =
|
||||
[NSMutableDictionary dictionaryWithCapacity:capacity];
|
||||
for (auto const &valuePair : nativeReport.values()) {
|
||||
NSString *key = [NSString stringForStdString:
|
||||
valuePair.second->display_name()];
|
||||
NSString *value = [NSString stringForStdString:
|
||||
valuePair.second->ToString()];
|
||||
|
||||
// Not expecting duplicate keys.
|
||||
RTC_DCHECK(![values objectForKey:key]);
|
||||
[values setObject:value forKey:key];
|
||||
}
|
||||
_values = values;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
@end
|
||||
@ -0,0 +1,54 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "WebRTC/RTCMediaConstraints.h"
|
||||
|
||||
#include <memory>
|
||||
|
||||
#include "webrtc/api/mediaconstraintsinterface.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
class MediaConstraints : public MediaConstraintsInterface {
|
||||
public:
|
||||
virtual ~MediaConstraints();
|
||||
MediaConstraints();
|
||||
MediaConstraints(
|
||||
const MediaConstraintsInterface::Constraints& mandatory,
|
||||
const MediaConstraintsInterface::Constraints& optional);
|
||||
virtual const Constraints& GetMandatory() const;
|
||||
virtual const Constraints& GetOptional() const;
|
||||
|
||||
private:
|
||||
MediaConstraintsInterface::Constraints mandatory_;
|
||||
MediaConstraintsInterface::Constraints optional_;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
@interface RTCMediaConstraints ()
|
||||
|
||||
/**
|
||||
* A MediaConstraints representation of this RTCMediaConstraints object. This is
|
||||
* needed to pass to the underlying C++ APIs.
|
||||
*/
|
||||
- (std::unique_ptr<webrtc::MediaConstraints>)nativeConstraints;
|
||||
|
||||
/** Return a native Constraints object representing these constraints */
|
||||
+ (webrtc::MediaConstraintsInterface::Constraints)
|
||||
nativeConstraintsForConstraints:
|
||||
(NSDictionary<NSString *, NSString *> *)constraints;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
||||
138
sdk/objc/Framework/Classes/PeerConnection/RTCMediaConstraints.mm
Normal file
138
sdk/objc/Framework/Classes/PeerConnection/RTCMediaConstraints.mm
Normal file
@ -0,0 +1,138 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "RTCMediaConstraints+Private.h"
|
||||
|
||||
#import "NSString+StdString.h"
|
||||
|
||||
#include <memory>
|
||||
|
||||
NSString * const kRTCMediaConstraintsMinAspectRatio =
|
||||
@(webrtc::MediaConstraintsInterface::kMinAspectRatio);
|
||||
NSString * const kRTCMediaConstraintsMaxAspectRatio =
|
||||
@(webrtc::MediaConstraintsInterface::kMaxAspectRatio);
|
||||
NSString * const kRTCMediaConstraintsMinWidth =
|
||||
@(webrtc::MediaConstraintsInterface::kMinWidth);
|
||||
NSString * const kRTCMediaConstraintsMaxWidth =
|
||||
@(webrtc::MediaConstraintsInterface::kMaxWidth);
|
||||
NSString * const kRTCMediaConstraintsMinHeight =
|
||||
@(webrtc::MediaConstraintsInterface::kMinHeight);
|
||||
NSString * const kRTCMediaConstraintsMaxHeight =
|
||||
@(webrtc::MediaConstraintsInterface::kMaxHeight);
|
||||
NSString * const kRTCMediaConstraintsMinFrameRate =
|
||||
@(webrtc::MediaConstraintsInterface::kMinFrameRate);
|
||||
NSString * const kRTCMediaConstraintsMaxFrameRate =
|
||||
@(webrtc::MediaConstraintsInterface::kMaxFrameRate);
|
||||
NSString * const kRTCMediaConstraintsLevelControl =
|
||||
@(webrtc::MediaConstraintsInterface::kLevelControl);
|
||||
NSString * const kRTCMediaConstraintsAudioNetworkAdaptorConfig =
|
||||
@(webrtc::MediaConstraintsInterface::kAudioNetworkAdaptorConfig);
|
||||
|
||||
NSString * const kRTCMediaConstraintsIceRestart =
|
||||
@(webrtc::MediaConstraintsInterface::kIceRestart);
|
||||
NSString * const kRTCMediaConstraintsOfferToReceiveAudio =
|
||||
@(webrtc::MediaConstraintsInterface::kOfferToReceiveAudio);
|
||||
NSString * const kRTCMediaConstraintsOfferToReceiveVideo =
|
||||
@(webrtc::MediaConstraintsInterface::kOfferToReceiveVideo);
|
||||
NSString * const kRTCMediaConstraintsVoiceActivityDetection =
|
||||
@(webrtc::MediaConstraintsInterface::kVoiceActivityDetection);
|
||||
|
||||
NSString * const kRTCMediaConstraintsValueTrue =
|
||||
@(webrtc::MediaConstraintsInterface::kValueTrue);
|
||||
NSString * const kRTCMediaConstraintsValueFalse =
|
||||
@(webrtc::MediaConstraintsInterface::kValueFalse);
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
MediaConstraints::~MediaConstraints() {}
|
||||
|
||||
MediaConstraints::MediaConstraints() {}
|
||||
|
||||
MediaConstraints::MediaConstraints(
|
||||
const MediaConstraintsInterface::Constraints& mandatory,
|
||||
const MediaConstraintsInterface::Constraints& optional)
|
||||
: mandatory_(mandatory), optional_(optional) {}
|
||||
|
||||
const MediaConstraintsInterface::Constraints&
|
||||
MediaConstraints::GetMandatory() const {
|
||||
return mandatory_;
|
||||
}
|
||||
|
||||
const MediaConstraintsInterface::Constraints&
|
||||
MediaConstraints::GetOptional() const {
|
||||
return optional_;
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
|
||||
@implementation RTCMediaConstraints {
|
||||
NSDictionary<NSString *, NSString *> *_mandatory;
|
||||
NSDictionary<NSString *, NSString *> *_optional;
|
||||
}
|
||||
|
||||
- (instancetype)initWithMandatoryConstraints:
|
||||
(NSDictionary<NSString *, NSString *> *)mandatory
|
||||
optionalConstraints:
|
||||
(NSDictionary<NSString *, NSString *> *)optional {
|
||||
if (self = [super init]) {
|
||||
_mandatory = [[NSDictionary alloc] initWithDictionary:mandatory
|
||||
copyItems:YES];
|
||||
_optional = [[NSDictionary alloc] initWithDictionary:optional
|
||||
copyItems:YES];
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (NSString *)description {
|
||||
return [NSString stringWithFormat:@"RTCMediaConstraints:\n%@\n%@",
|
||||
_mandatory,
|
||||
_optional];
|
||||
}
|
||||
|
||||
#pragma mark - Private
|
||||
|
||||
- (std::unique_ptr<webrtc::MediaConstraints>)nativeConstraints {
|
||||
webrtc::MediaConstraintsInterface::Constraints mandatory =
|
||||
[[self class] nativeConstraintsForConstraints:_mandatory];
|
||||
webrtc::MediaConstraintsInterface::Constraints optional =
|
||||
[[self class] nativeConstraintsForConstraints:_optional];
|
||||
|
||||
webrtc::MediaConstraints *nativeConstraints =
|
||||
new webrtc::MediaConstraints(mandatory, optional);
|
||||
return std::unique_ptr<webrtc::MediaConstraints>(nativeConstraints);
|
||||
}
|
||||
|
||||
+ (webrtc::MediaConstraintsInterface::Constraints)
|
||||
nativeConstraintsForConstraints:
|
||||
(NSDictionary<NSString *, NSString *> *)constraints {
|
||||
webrtc::MediaConstraintsInterface::Constraints nativeConstraints;
|
||||
for (NSString *key in constraints) {
|
||||
NSAssert([key isKindOfClass:[NSString class]],
|
||||
@"%@ is not an NSString.", key);
|
||||
NSString *value = [constraints objectForKey:key];
|
||||
NSAssert([value isKindOfClass:[NSString class]],
|
||||
@"%@ is not an NSString.", value);
|
||||
if ([kRTCMediaConstraintsAudioNetworkAdaptorConfig isEqualToString:key]) {
|
||||
// This value is base64 encoded.
|
||||
NSData *charData = [[NSData alloc] initWithBase64EncodedString:value options:0];
|
||||
std::string configValue =
|
||||
std::string(reinterpret_cast<const char *>(charData.bytes), charData.length);
|
||||
nativeConstraints.push_back(webrtc::MediaConstraintsInterface::Constraint(
|
||||
key.stdString, configValue));
|
||||
} else {
|
||||
nativeConstraints.push_back(webrtc::MediaConstraintsInterface::Constraint(
|
||||
key.stdString, value.stdString));
|
||||
}
|
||||
}
|
||||
return nativeConstraints;
|
||||
}
|
||||
|
||||
@end
|
||||
@ -0,0 +1,42 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "WebRTC/RTCMediaSource.h"
|
||||
|
||||
#include "webrtc/api/mediastreaminterface.h"
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
typedef NS_ENUM(NSInteger, RTCMediaSourceType) {
|
||||
RTCMediaSourceTypeAudio,
|
||||
RTCMediaSourceTypeVideo,
|
||||
};
|
||||
|
||||
@interface RTCMediaSource ()
|
||||
|
||||
@property(nonatomic, readonly)
|
||||
rtc::scoped_refptr<webrtc::MediaSourceInterface> nativeMediaSource;
|
||||
|
||||
- (instancetype)initWithNativeMediaSource:
|
||||
(rtc::scoped_refptr<webrtc::MediaSourceInterface>)nativeMediaSource
|
||||
type:(RTCMediaSourceType)type
|
||||
NS_DESIGNATED_INITIALIZER;
|
||||
|
||||
+ (webrtc::MediaSourceInterface::SourceState)nativeSourceStateForState:
|
||||
(RTCSourceState)state;
|
||||
|
||||
+ (RTCSourceState)sourceStateForNativeState:
|
||||
(webrtc::MediaSourceInterface::SourceState)nativeState;
|
||||
|
||||
+ (NSString *)stringForState:(RTCSourceState)state;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
||||
79
sdk/objc/Framework/Classes/PeerConnection/RTCMediaSource.mm
Normal file
79
sdk/objc/Framework/Classes/PeerConnection/RTCMediaSource.mm
Normal file
@ -0,0 +1,79 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "RTCMediaSource+Private.h"
|
||||
|
||||
#include "webrtc/rtc_base/checks.h"
|
||||
|
||||
@implementation RTCMediaSource {
|
||||
RTCMediaSourceType _type;
|
||||
}
|
||||
|
||||
@synthesize nativeMediaSource = _nativeMediaSource;
|
||||
|
||||
- (instancetype)initWithNativeMediaSource:
|
||||
(rtc::scoped_refptr<webrtc::MediaSourceInterface>)nativeMediaSource
|
||||
type:(RTCMediaSourceType)type {
|
||||
RTC_DCHECK(nativeMediaSource);
|
||||
if (self = [super init]) {
|
||||
_nativeMediaSource = nativeMediaSource;
|
||||
_type = type;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (RTCSourceState)state {
|
||||
return [[self class] sourceStateForNativeState:_nativeMediaSource->state()];
|
||||
}
|
||||
|
||||
#pragma mark - Private
|
||||
|
||||
+ (webrtc::MediaSourceInterface::SourceState)nativeSourceStateForState:
|
||||
(RTCSourceState)state {
|
||||
switch (state) {
|
||||
case RTCSourceStateInitializing:
|
||||
return webrtc::MediaSourceInterface::kInitializing;
|
||||
case RTCSourceStateLive:
|
||||
return webrtc::MediaSourceInterface::kLive;
|
||||
case RTCSourceStateEnded:
|
||||
return webrtc::MediaSourceInterface::kEnded;
|
||||
case RTCSourceStateMuted:
|
||||
return webrtc::MediaSourceInterface::kMuted;
|
||||
}
|
||||
}
|
||||
|
||||
+ (RTCSourceState)sourceStateForNativeState:
|
||||
(webrtc::MediaSourceInterface::SourceState)nativeState {
|
||||
switch (nativeState) {
|
||||
case webrtc::MediaSourceInterface::kInitializing:
|
||||
return RTCSourceStateInitializing;
|
||||
case webrtc::MediaSourceInterface::kLive:
|
||||
return RTCSourceStateLive;
|
||||
case webrtc::MediaSourceInterface::kEnded:
|
||||
return RTCSourceStateEnded;
|
||||
case webrtc::MediaSourceInterface::kMuted:
|
||||
return RTCSourceStateMuted;
|
||||
}
|
||||
}
|
||||
|
||||
+ (NSString *)stringForState:(RTCSourceState)state {
|
||||
switch (state) {
|
||||
case RTCSourceStateInitializing:
|
||||
return @"Initializing";
|
||||
case RTCSourceStateLive:
|
||||
return @"Live";
|
||||
case RTCSourceStateEnded:
|
||||
return @"Ended";
|
||||
case RTCSourceStateMuted:
|
||||
return @"Muted";
|
||||
}
|
||||
}
|
||||
|
||||
@end
|
||||
@ -0,0 +1,36 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "WebRTC/RTCMediaStream.h"
|
||||
|
||||
#include "webrtc/api/mediastreaminterface.h"
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
@interface RTCMediaStream ()
|
||||
|
||||
/**
|
||||
* MediaStreamInterface representation of this RTCMediaStream object. This is
|
||||
* needed to pass to the underlying C++ APIs.
|
||||
*/
|
||||
@property(nonatomic, readonly)
|
||||
rtc::scoped_refptr<webrtc::MediaStreamInterface> nativeMediaStream;
|
||||
|
||||
/** Initialize an RTCMediaStream with an id. */
|
||||
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
|
||||
streamId:(NSString *)streamId;
|
||||
|
||||
/** Initialize an RTCMediaStream from a native MediaStreamInterface. */
|
||||
- (instancetype)initWithNativeMediaStream:
|
||||
(rtc::scoped_refptr<webrtc::MediaStreamInterface>)nativeMediaStream;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
||||
122
sdk/objc/Framework/Classes/PeerConnection/RTCMediaStream.mm
Normal file
122
sdk/objc/Framework/Classes/PeerConnection/RTCMediaStream.mm
Normal file
@ -0,0 +1,122 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "RTCMediaStream+Private.h"
|
||||
|
||||
#include <vector>
|
||||
|
||||
#import "NSString+StdString.h"
|
||||
#import "RTCAudioTrack+Private.h"
|
||||
#import "RTCMediaStreamTrack+Private.h"
|
||||
#import "RTCPeerConnectionFactory+Private.h"
|
||||
#import "RTCVideoTrack+Private.h"
|
||||
|
||||
@implementation RTCMediaStream {
|
||||
NSMutableArray *_audioTracks;
|
||||
NSMutableArray *_videoTracks;
|
||||
rtc::scoped_refptr<webrtc::MediaStreamInterface> _nativeMediaStream;
|
||||
}
|
||||
|
||||
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
|
||||
streamId:(NSString *)streamId {
|
||||
NSParameterAssert(factory);
|
||||
NSParameterAssert(streamId.length);
|
||||
std::string nativeId = [NSString stdStringForString:streamId];
|
||||
rtc::scoped_refptr<webrtc::MediaStreamInterface> stream =
|
||||
factory.nativeFactory->CreateLocalMediaStream(nativeId);
|
||||
return [self initWithNativeMediaStream:stream];
|
||||
}
|
||||
|
||||
- (NSArray<RTCAudioTrack *> *)audioTracks {
|
||||
return [_audioTracks copy];
|
||||
}
|
||||
|
||||
- (NSArray<RTCVideoTrack *> *)videoTracks {
|
||||
return [_videoTracks copy];
|
||||
}
|
||||
|
||||
- (NSString *)streamId {
|
||||
return [NSString stringForStdString:_nativeMediaStream->label()];
|
||||
}
|
||||
|
||||
- (void)addAudioTrack:(RTCAudioTrack *)audioTrack {
|
||||
if (_nativeMediaStream->AddTrack(audioTrack.nativeAudioTrack)) {
|
||||
[_audioTracks addObject:audioTrack];
|
||||
}
|
||||
}
|
||||
|
||||
- (void)addVideoTrack:(RTCVideoTrack *)videoTrack {
|
||||
if (_nativeMediaStream->AddTrack(videoTrack.nativeVideoTrack)) {
|
||||
[_videoTracks addObject:videoTrack];
|
||||
}
|
||||
}
|
||||
|
||||
- (void)removeAudioTrack:(RTCAudioTrack *)audioTrack {
|
||||
NSUInteger index = [_audioTracks indexOfObjectIdenticalTo:audioTrack];
|
||||
NSAssert(index != NSNotFound,
|
||||
@"|removeAudioTrack| called on unexpected RTCAudioTrack");
|
||||
if (index != NSNotFound &&
|
||||
_nativeMediaStream->RemoveTrack(audioTrack.nativeAudioTrack)) {
|
||||
[_audioTracks removeObjectAtIndex:index];
|
||||
}
|
||||
}
|
||||
|
||||
- (void)removeVideoTrack:(RTCVideoTrack *)videoTrack {
|
||||
NSUInteger index = [_videoTracks indexOfObjectIdenticalTo:videoTrack];
|
||||
NSAssert(index != NSNotFound,
|
||||
@"|removeVideoTrack| called on unexpected RTCVideoTrack");
|
||||
if (index != NSNotFound &&
|
||||
_nativeMediaStream->RemoveTrack(videoTrack.nativeVideoTrack)) {
|
||||
[_videoTracks removeObjectAtIndex:index];
|
||||
}
|
||||
}
|
||||
|
||||
- (NSString *)description {
|
||||
return [NSString stringWithFormat:@"RTCMediaStream:\n%@\nA=%lu\nV=%lu",
|
||||
self.streamId,
|
||||
(unsigned long)self.audioTracks.count,
|
||||
(unsigned long)self.videoTracks.count];
|
||||
}
|
||||
|
||||
#pragma mark - Private
|
||||
|
||||
- (rtc::scoped_refptr<webrtc::MediaStreamInterface>)nativeMediaStream {
|
||||
return _nativeMediaStream;
|
||||
}
|
||||
|
||||
- (instancetype)initWithNativeMediaStream:
|
||||
(rtc::scoped_refptr<webrtc::MediaStreamInterface>)nativeMediaStream {
|
||||
NSParameterAssert(nativeMediaStream);
|
||||
if (self = [super init]) {
|
||||
webrtc::AudioTrackVector audioTracks = nativeMediaStream->GetAudioTracks();
|
||||
webrtc::VideoTrackVector videoTracks = nativeMediaStream->GetVideoTracks();
|
||||
|
||||
_audioTracks = [NSMutableArray arrayWithCapacity:audioTracks.size()];
|
||||
_videoTracks = [NSMutableArray arrayWithCapacity:videoTracks.size()];
|
||||
_nativeMediaStream = nativeMediaStream;
|
||||
|
||||
for (auto &track : audioTracks) {
|
||||
RTCMediaStreamTrackType type = RTCMediaStreamTrackTypeAudio;
|
||||
RTCAudioTrack *audioTrack =
|
||||
[[RTCAudioTrack alloc] initWithNativeTrack:track type:type];
|
||||
[_audioTracks addObject:audioTrack];
|
||||
}
|
||||
|
||||
for (auto &track : videoTracks) {
|
||||
RTCMediaStreamTrackType type = RTCMediaStreamTrackTypeVideo;
|
||||
RTCVideoTrack *videoTrack =
|
||||
[[RTCVideoTrack alloc] initWithNativeTrack:track type:type];
|
||||
[_videoTracks addObject:videoTrack];
|
||||
}
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
@end
|
||||
@ -0,0 +1,54 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "WebRTC/RTCMediaStreamTrack.h"
|
||||
|
||||
#include "webrtc/api/mediastreaminterface.h"
|
||||
|
||||
typedef NS_ENUM(NSInteger, RTCMediaStreamTrackType) {
|
||||
RTCMediaStreamTrackTypeAudio,
|
||||
RTCMediaStreamTrackTypeVideo,
|
||||
};
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
@interface RTCMediaStreamTrack ()
|
||||
|
||||
/**
|
||||
* The native MediaStreamTrackInterface passed in or created during
|
||||
* construction.
|
||||
*/
|
||||
@property(nonatomic, readonly)
|
||||
rtc::scoped_refptr<webrtc::MediaStreamTrackInterface> nativeTrack;
|
||||
|
||||
/**
|
||||
* Initialize an RTCMediaStreamTrack from a native MediaStreamTrackInterface.
|
||||
*/
|
||||
- (instancetype)initWithNativeTrack:
|
||||
(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack
|
||||
type:(RTCMediaStreamTrackType)type
|
||||
NS_DESIGNATED_INITIALIZER;
|
||||
|
||||
- (instancetype)initWithNativeTrack:
|
||||
(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack;
|
||||
|
||||
- (BOOL)isEqualToTrack:(RTCMediaStreamTrack *)track;
|
||||
|
||||
+ (webrtc::MediaStreamTrackInterface::TrackState)nativeTrackStateForState:
|
||||
(RTCMediaStreamTrackState)state;
|
||||
|
||||
+ (RTCMediaStreamTrackState)trackStateForNativeState:
|
||||
(webrtc::MediaStreamTrackInterface::TrackState)nativeState;
|
||||
|
||||
+ (NSString *)stringForState:(RTCMediaStreamTrackState)state;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
||||
137
sdk/objc/Framework/Classes/PeerConnection/RTCMediaStreamTrack.mm
Normal file
137
sdk/objc/Framework/Classes/PeerConnection/RTCMediaStreamTrack.mm
Normal file
@ -0,0 +1,137 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "RTCMediaStreamTrack+Private.h"
|
||||
|
||||
#import "NSString+StdString.h"
|
||||
|
||||
NSString * const kRTCMediaStreamTrackKindAudio =
|
||||
@(webrtc::MediaStreamTrackInterface::kAudioKind);
|
||||
NSString * const kRTCMediaStreamTrackKindVideo =
|
||||
@(webrtc::MediaStreamTrackInterface::kVideoKind);
|
||||
|
||||
@implementation RTCMediaStreamTrack {
|
||||
rtc::scoped_refptr<webrtc::MediaStreamTrackInterface> _nativeTrack;
|
||||
RTCMediaStreamTrackType _type;
|
||||
}
|
||||
|
||||
- (NSString *)kind {
|
||||
return [NSString stringForStdString:_nativeTrack->kind()];
|
||||
}
|
||||
|
||||
- (NSString *)trackId {
|
||||
return [NSString stringForStdString:_nativeTrack->id()];
|
||||
}
|
||||
|
||||
- (BOOL)isEnabled {
|
||||
return _nativeTrack->enabled();
|
||||
}
|
||||
|
||||
- (void)setIsEnabled:(BOOL)isEnabled {
|
||||
_nativeTrack->set_enabled(isEnabled);
|
||||
}
|
||||
|
||||
- (RTCMediaStreamTrackState)readyState {
|
||||
return [[self class] trackStateForNativeState:_nativeTrack->state()];
|
||||
}
|
||||
|
||||
- (NSString *)description {
|
||||
NSString *readyState = [[self class] stringForState:self.readyState];
|
||||
return [NSString stringWithFormat:@"RTCMediaStreamTrack:\n%@\n%@\n%@\n%@",
|
||||
self.kind,
|
||||
self.trackId,
|
||||
self.isEnabled ? @"enabled" : @"disabled",
|
||||
readyState];
|
||||
}
|
||||
|
||||
- (BOOL)isEqual:(id)object {
|
||||
if (self == object) {
|
||||
return YES;
|
||||
}
|
||||
if (![object isMemberOfClass:[self class]]) {
|
||||
return NO;
|
||||
}
|
||||
return [self isEqualToTrack:(RTCMediaStreamTrack *)object];
|
||||
}
|
||||
|
||||
- (NSUInteger)hash {
|
||||
return (NSUInteger)_nativeTrack.get();
|
||||
}
|
||||
|
||||
#pragma mark - Private
|
||||
|
||||
- (rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack {
|
||||
return _nativeTrack;
|
||||
}
|
||||
|
||||
- (instancetype)initWithNativeTrack:
|
||||
(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack
|
||||
type:(RTCMediaStreamTrackType)type {
|
||||
NSParameterAssert(nativeTrack);
|
||||
if (self = [super init]) {
|
||||
_nativeTrack = nativeTrack;
|
||||
_type = type;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (instancetype)initWithNativeTrack:
|
||||
(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack {
|
||||
NSParameterAssert(nativeTrack);
|
||||
if (nativeTrack->kind() ==
|
||||
std::string(webrtc::MediaStreamTrackInterface::kAudioKind)) {
|
||||
return [self initWithNativeTrack:nativeTrack
|
||||
type:RTCMediaStreamTrackTypeAudio];
|
||||
}
|
||||
if (nativeTrack->kind() ==
|
||||
std::string(webrtc::MediaStreamTrackInterface::kVideoKind)) {
|
||||
return [self initWithNativeTrack:nativeTrack
|
||||
type:RTCMediaStreamTrackTypeVideo];
|
||||
}
|
||||
return nil;
|
||||
}
|
||||
|
||||
- (BOOL)isEqualToTrack:(RTCMediaStreamTrack *)track {
|
||||
if (!track) {
|
||||
return NO;
|
||||
}
|
||||
return _nativeTrack == track.nativeTrack;
|
||||
}
|
||||
|
||||
+ (webrtc::MediaStreamTrackInterface::TrackState)nativeTrackStateForState:
|
||||
(RTCMediaStreamTrackState)state {
|
||||
switch (state) {
|
||||
case RTCMediaStreamTrackStateLive:
|
||||
return webrtc::MediaStreamTrackInterface::kLive;
|
||||
case RTCMediaStreamTrackStateEnded:
|
||||
return webrtc::MediaStreamTrackInterface::kEnded;
|
||||
}
|
||||
}
|
||||
|
||||
+ (RTCMediaStreamTrackState)trackStateForNativeState:
|
||||
(webrtc::MediaStreamTrackInterface::TrackState)nativeState {
|
||||
switch (nativeState) {
|
||||
case webrtc::MediaStreamTrackInterface::kLive:
|
||||
return RTCMediaStreamTrackStateLive;
|
||||
case webrtc::MediaStreamTrackInterface::kEnded:
|
||||
return RTCMediaStreamTrackStateEnded;
|
||||
}
|
||||
}
|
||||
|
||||
+ (NSString *)stringForState:(RTCMediaStreamTrackState)state {
|
||||
switch (state) {
|
||||
case RTCMediaStreamTrackStateLive:
|
||||
return @"Live";
|
||||
case RTCMediaStreamTrackStateEnded:
|
||||
return @"Ended";
|
||||
}
|
||||
}
|
||||
|
||||
@end
|
||||
32
sdk/objc/Framework/Classes/PeerConnection/RTCMetrics.mm
Normal file
32
sdk/objc/Framework/Classes/PeerConnection/RTCMetrics.mm
Normal file
@ -0,0 +1,32 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "WebRTC/RTCMetrics.h"
|
||||
|
||||
#import "RTCMetricsSampleInfo+Private.h"
|
||||
|
||||
void RTCEnableMetrics() {
|
||||
webrtc::metrics::Enable();
|
||||
}
|
||||
|
||||
NSArray<RTCMetricsSampleInfo *> *RTCGetAndResetMetrics() {
|
||||
std::map<std::string, std::unique_ptr<webrtc::metrics::SampleInfo>>
|
||||
histograms;
|
||||
webrtc::metrics::GetAndReset(&histograms);
|
||||
|
||||
NSMutableArray *metrics =
|
||||
[NSMutableArray arrayWithCapacity:histograms.size()];
|
||||
for (auto const &histogram : histograms) {
|
||||
RTCMetricsSampleInfo *metric = [[RTCMetricsSampleInfo alloc]
|
||||
initWithNativeSampleInfo:*histogram.second];
|
||||
[metrics addObject:metric];
|
||||
}
|
||||
return metrics;
|
||||
}
|
||||
@ -0,0 +1,28 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "WebRTC/RTCMetricsSampleInfo.h"
|
||||
|
||||
// Adding 'nogncheck' to disable the gn include headers check.
|
||||
// We don't want to depend on 'system_wrappers:metrics_default' because
|
||||
// clients should be able to provide their own implementation.
|
||||
#include "webrtc/system_wrappers/include/metrics_default.h" // nogncheck
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
@interface RTCMetricsSampleInfo ()
|
||||
|
||||
/** Initialize an RTCMetricsSampleInfo object from native SampleInfo. */
|
||||
- (instancetype)initWithNativeSampleInfo:
|
||||
(const webrtc::metrics::SampleInfo &)info;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
||||
@ -0,0 +1,43 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "RTCMetricsSampleInfo+Private.h"
|
||||
|
||||
#import "NSString+StdString.h"
|
||||
|
||||
@implementation RTCMetricsSampleInfo
|
||||
|
||||
@synthesize name = _name;
|
||||
@synthesize min = _min;
|
||||
@synthesize max = _max;
|
||||
@synthesize bucketCount = _bucketCount;
|
||||
@synthesize samples = _samples;
|
||||
|
||||
#pragma mark - Private
|
||||
|
||||
- (instancetype)initWithNativeSampleInfo:
|
||||
(const webrtc::metrics::SampleInfo &)info {
|
||||
if (self = [super init]) {
|
||||
_name = [NSString stringForStdString:info.name];
|
||||
_min = info.min;
|
||||
_max = info.max;
|
||||
_bucketCount = info.bucket_count;
|
||||
|
||||
NSMutableDictionary *samples =
|
||||
[NSMutableDictionary dictionaryWithCapacity:info.samples.size()];
|
||||
for (auto const &sample : info.samples) {
|
||||
[samples setObject:@(sample.second) forKey:@(sample.first)];
|
||||
}
|
||||
_samples = samples;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
@end
|
||||
@ -0,0 +1,34 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "RTCPeerConnection+Private.h"
|
||||
|
||||
#import "NSString+StdString.h"
|
||||
#import "RTCDataChannel+Private.h"
|
||||
#import "RTCDataChannelConfiguration+Private.h"
|
||||
|
||||
@implementation RTCPeerConnection (DataChannel)
|
||||
|
||||
- (RTCDataChannel *)dataChannelForLabel:(NSString *)label
|
||||
configuration:
|
||||
(RTCDataChannelConfiguration *)configuration {
|
||||
std::string labelString = [NSString stdStringForString:label];
|
||||
const webrtc::DataChannelInit nativeInit =
|
||||
configuration.nativeDataChannelInit;
|
||||
rtc::scoped_refptr<webrtc::DataChannelInterface> dataChannel =
|
||||
self.nativePeerConnection->CreateDataChannel(labelString,
|
||||
&nativeInit);
|
||||
if (!dataChannel) {
|
||||
return nil;
|
||||
}
|
||||
return [[RTCDataChannel alloc] initWithNativeDataChannel:dataChannel];
|
||||
}
|
||||
|
||||
@end
|
||||
@ -0,0 +1,107 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "WebRTC/RTCPeerConnection.h"
|
||||
|
||||
#include "webrtc/api/peerconnectioninterface.h"
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
/**
|
||||
* These objects are created by RTCPeerConnectionFactory to wrap an
|
||||
* id<RTCPeerConnectionDelegate> and call methods on that interface.
|
||||
*/
|
||||
class PeerConnectionDelegateAdapter : public PeerConnectionObserver {
|
||||
|
||||
public:
|
||||
PeerConnectionDelegateAdapter(RTCPeerConnection *peerConnection);
|
||||
virtual ~PeerConnectionDelegateAdapter();
|
||||
|
||||
void OnSignalingChange(
|
||||
PeerConnectionInterface::SignalingState new_state) override;
|
||||
|
||||
void OnAddStream(rtc::scoped_refptr<MediaStreamInterface> stream) override;
|
||||
|
||||
void OnRemoveStream(rtc::scoped_refptr<MediaStreamInterface> stream) override;
|
||||
|
||||
void OnDataChannel(
|
||||
rtc::scoped_refptr<DataChannelInterface> data_channel) override;
|
||||
|
||||
void OnRenegotiationNeeded() override;
|
||||
|
||||
void OnIceConnectionChange(
|
||||
PeerConnectionInterface::IceConnectionState new_state) override;
|
||||
|
||||
void OnIceGatheringChange(
|
||||
PeerConnectionInterface::IceGatheringState new_state) override;
|
||||
|
||||
void OnIceCandidate(const IceCandidateInterface *candidate) override;
|
||||
|
||||
void OnIceCandidatesRemoved(
|
||||
const std::vector<cricket::Candidate>& candidates) override;
|
||||
|
||||
private:
|
||||
__weak RTCPeerConnection *peer_connection_;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
|
||||
@interface RTCPeerConnection ()
|
||||
|
||||
/** The native PeerConnectionInterface created during construction. */
|
||||
@property(nonatomic, readonly)
|
||||
rtc::scoped_refptr<webrtc::PeerConnectionInterface> nativePeerConnection;
|
||||
|
||||
/** Initialize an RTCPeerConnection with a configuration, constraints, and
|
||||
* delegate.
|
||||
*/
|
||||
- (instancetype)initWithFactory:
|
||||
(RTCPeerConnectionFactory *)factory
|
||||
configuration:
|
||||
(RTCConfiguration *)configuration
|
||||
constraints:
|
||||
(RTCMediaConstraints *)constraints
|
||||
delegate:
|
||||
(nullable id<RTCPeerConnectionDelegate>)delegate
|
||||
NS_DESIGNATED_INITIALIZER;
|
||||
|
||||
+ (webrtc::PeerConnectionInterface::SignalingState)nativeSignalingStateForState:
|
||||
(RTCSignalingState)state;
|
||||
|
||||
+ (RTCSignalingState)signalingStateForNativeState:
|
||||
(webrtc::PeerConnectionInterface::SignalingState)nativeState;
|
||||
|
||||
+ (NSString *)stringForSignalingState:(RTCSignalingState)state;
|
||||
|
||||
+ (webrtc::PeerConnectionInterface::IceConnectionState)
|
||||
nativeIceConnectionStateForState:(RTCIceConnectionState)state;
|
||||
|
||||
+ (RTCIceConnectionState)iceConnectionStateForNativeState:
|
||||
(webrtc::PeerConnectionInterface::IceConnectionState)nativeState;
|
||||
|
||||
+ (NSString *)stringForIceConnectionState:(RTCIceConnectionState)state;
|
||||
|
||||
+ (webrtc::PeerConnectionInterface::IceGatheringState)
|
||||
nativeIceGatheringStateForState:(RTCIceGatheringState)state;
|
||||
|
||||
+ (RTCIceGatheringState)iceGatheringStateForNativeState:
|
||||
(webrtc::PeerConnectionInterface::IceGatheringState)nativeState;
|
||||
|
||||
+ (NSString *)stringForIceGatheringState:(RTCIceGatheringState)state;
|
||||
|
||||
+ (webrtc::PeerConnectionInterface::StatsOutputLevel)
|
||||
nativeStatsOutputLevelForLevel:(RTCStatsOutputLevel)level;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
||||
@ -0,0 +1,64 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "RTCPeerConnection+Private.h"
|
||||
|
||||
#import "NSString+StdString.h"
|
||||
#import "RTCMediaStreamTrack+Private.h"
|
||||
#import "RTCLegacyStatsReport+Private.h"
|
||||
|
||||
#include "webrtc/rtc_base/checks.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
class StatsObserverAdapter : public StatsObserver {
|
||||
public:
|
||||
StatsObserverAdapter(void (^completionHandler)
|
||||
(NSArray<RTCLegacyStatsReport *> *stats)) {
|
||||
completion_handler_ = completionHandler;
|
||||
}
|
||||
|
||||
~StatsObserverAdapter() {
|
||||
completion_handler_ = nil;
|
||||
}
|
||||
|
||||
void OnComplete(const StatsReports& reports) override {
|
||||
RTC_DCHECK(completion_handler_);
|
||||
NSMutableArray *stats = [NSMutableArray arrayWithCapacity:reports.size()];
|
||||
for (const auto* report : reports) {
|
||||
RTCLegacyStatsReport *statsReport =
|
||||
[[RTCLegacyStatsReport alloc] initWithNativeReport:*report];
|
||||
[stats addObject:statsReport];
|
||||
}
|
||||
completion_handler_(stats);
|
||||
completion_handler_ = nil;
|
||||
}
|
||||
|
||||
private:
|
||||
void (^completion_handler_)(NSArray<RTCLegacyStatsReport *> *stats);
|
||||
};
|
||||
} // namespace webrtc
|
||||
|
||||
@implementation RTCPeerConnection (Stats)
|
||||
|
||||
- (void)statsForTrack:(RTCMediaStreamTrack *)mediaStreamTrack
|
||||
statsOutputLevel:(RTCStatsOutputLevel)statsOutputLevel
|
||||
completionHandler:
|
||||
(void (^)(NSArray<RTCLegacyStatsReport *> *stats))completionHandler {
|
||||
rtc::scoped_refptr<webrtc::StatsObserverAdapter> observer(
|
||||
new rtc::RefCountedObject<webrtc::StatsObserverAdapter>
|
||||
(completionHandler));
|
||||
webrtc::PeerConnectionInterface::StatsOutputLevel nativeOutputLevel =
|
||||
[[self class] nativeStatsOutputLevelForLevel:statsOutputLevel];
|
||||
self.nativePeerConnection->GetStats(
|
||||
observer, mediaStreamTrack.nativeTrack, nativeOutputLevel);
|
||||
}
|
||||
|
||||
@end
|
||||
617
sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnection.mm
Normal file
617
sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnection.mm
Normal file
@ -0,0 +1,617 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "RTCPeerConnection+Private.h"
|
||||
|
||||
#import "NSString+StdString.h"
|
||||
#import "RTCConfiguration+Private.h"
|
||||
#import "RTCDataChannel+Private.h"
|
||||
#import "RTCIceCandidate+Private.h"
|
||||
#import "RTCLegacyStatsReport+Private.h"
|
||||
#import "RTCMediaConstraints+Private.h"
|
||||
#import "RTCMediaStream+Private.h"
|
||||
#import "RTCPeerConnectionFactory+Private.h"
|
||||
#import "RTCRtpReceiver+Private.h"
|
||||
#import "RTCRtpSender+Private.h"
|
||||
#import "RTCSessionDescription+Private.h"
|
||||
#import "WebRTC/RTCLogging.h"
|
||||
|
||||
#include <memory>
|
||||
|
||||
#include "webrtc/api/jsepicecandidate.h"
|
||||
#include "webrtc/rtc_base/checks.h"
|
||||
|
||||
NSString * const kRTCPeerConnectionErrorDomain =
|
||||
@"org.webrtc.RTCPeerConnection";
|
||||
int const kRTCPeerConnnectionSessionDescriptionError = -1;
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
class CreateSessionDescriptionObserverAdapter
|
||||
: public CreateSessionDescriptionObserver {
|
||||
public:
|
||||
CreateSessionDescriptionObserverAdapter(
|
||||
void (^completionHandler)(RTCSessionDescription *sessionDescription,
|
||||
NSError *error)) {
|
||||
completion_handler_ = completionHandler;
|
||||
}
|
||||
|
||||
~CreateSessionDescriptionObserverAdapter() {
|
||||
completion_handler_ = nil;
|
||||
}
|
||||
|
||||
void OnSuccess(SessionDescriptionInterface *desc) override {
|
||||
RTC_DCHECK(completion_handler_);
|
||||
std::unique_ptr<webrtc::SessionDescriptionInterface> description =
|
||||
std::unique_ptr<webrtc::SessionDescriptionInterface>(desc);
|
||||
RTCSessionDescription* session =
|
||||
[[RTCSessionDescription alloc] initWithNativeDescription:
|
||||
description.get()];
|
||||
completion_handler_(session, nil);
|
||||
completion_handler_ = nil;
|
||||
}
|
||||
|
||||
void OnFailure(const std::string& error) override {
|
||||
RTC_DCHECK(completion_handler_);
|
||||
NSString* str = [NSString stringForStdString:error];
|
||||
NSError* err =
|
||||
[NSError errorWithDomain:kRTCPeerConnectionErrorDomain
|
||||
code:kRTCPeerConnnectionSessionDescriptionError
|
||||
userInfo:@{ NSLocalizedDescriptionKey : str }];
|
||||
completion_handler_(nil, err);
|
||||
completion_handler_ = nil;
|
||||
}
|
||||
|
||||
private:
|
||||
void (^completion_handler_)
|
||||
(RTCSessionDescription *sessionDescription, NSError *error);
|
||||
};
|
||||
|
||||
class SetSessionDescriptionObserverAdapter :
|
||||
public SetSessionDescriptionObserver {
|
||||
public:
|
||||
SetSessionDescriptionObserverAdapter(void (^completionHandler)
|
||||
(NSError *error)) {
|
||||
completion_handler_ = completionHandler;
|
||||
}
|
||||
|
||||
~SetSessionDescriptionObserverAdapter() {
|
||||
completion_handler_ = nil;
|
||||
}
|
||||
|
||||
void OnSuccess() override {
|
||||
RTC_DCHECK(completion_handler_);
|
||||
completion_handler_(nil);
|
||||
completion_handler_ = nil;
|
||||
}
|
||||
|
||||
void OnFailure(const std::string& error) override {
|
||||
RTC_DCHECK(completion_handler_);
|
||||
NSString* str = [NSString stringForStdString:error];
|
||||
NSError* err =
|
||||
[NSError errorWithDomain:kRTCPeerConnectionErrorDomain
|
||||
code:kRTCPeerConnnectionSessionDescriptionError
|
||||
userInfo:@{ NSLocalizedDescriptionKey : str }];
|
||||
completion_handler_(err);
|
||||
completion_handler_ = nil;
|
||||
}
|
||||
|
||||
private:
|
||||
void (^completion_handler_)(NSError *error);
|
||||
};
|
||||
|
||||
PeerConnectionDelegateAdapter::PeerConnectionDelegateAdapter(
|
||||
RTCPeerConnection *peerConnection) {
|
||||
peer_connection_ = peerConnection;
|
||||
}
|
||||
|
||||
PeerConnectionDelegateAdapter::~PeerConnectionDelegateAdapter() {
|
||||
peer_connection_ = nil;
|
||||
}
|
||||
|
||||
void PeerConnectionDelegateAdapter::OnSignalingChange(
|
||||
PeerConnectionInterface::SignalingState new_state) {
|
||||
RTCSignalingState state =
|
||||
[[RTCPeerConnection class] signalingStateForNativeState:new_state];
|
||||
RTCPeerConnection *peer_connection = peer_connection_;
|
||||
[peer_connection.delegate peerConnection:peer_connection
|
||||
didChangeSignalingState:state];
|
||||
}
|
||||
|
||||
void PeerConnectionDelegateAdapter::OnAddStream(
|
||||
rtc::scoped_refptr<MediaStreamInterface> stream) {
|
||||
RTCMediaStream *mediaStream =
|
||||
[[RTCMediaStream alloc] initWithNativeMediaStream:stream];
|
||||
RTCPeerConnection *peer_connection = peer_connection_;
|
||||
[peer_connection.delegate peerConnection:peer_connection
|
||||
didAddStream:mediaStream];
|
||||
}
|
||||
|
||||
void PeerConnectionDelegateAdapter::OnRemoveStream(
|
||||
rtc::scoped_refptr<MediaStreamInterface> stream) {
|
||||
RTCMediaStream *mediaStream =
|
||||
[[RTCMediaStream alloc] initWithNativeMediaStream:stream];
|
||||
RTCPeerConnection *peer_connection = peer_connection_;
|
||||
[peer_connection.delegate peerConnection:peer_connection
|
||||
didRemoveStream:mediaStream];
|
||||
}
|
||||
|
||||
void PeerConnectionDelegateAdapter::OnDataChannel(
|
||||
rtc::scoped_refptr<DataChannelInterface> data_channel) {
|
||||
RTCDataChannel *dataChannel =
|
||||
[[RTCDataChannel alloc] initWithNativeDataChannel:data_channel];
|
||||
RTCPeerConnection *peer_connection = peer_connection_;
|
||||
[peer_connection.delegate peerConnection:peer_connection
|
||||
didOpenDataChannel:dataChannel];
|
||||
}
|
||||
|
||||
void PeerConnectionDelegateAdapter::OnRenegotiationNeeded() {
|
||||
RTCPeerConnection *peer_connection = peer_connection_;
|
||||
[peer_connection.delegate peerConnectionShouldNegotiate:peer_connection];
|
||||
}
|
||||
|
||||
void PeerConnectionDelegateAdapter::OnIceConnectionChange(
|
||||
PeerConnectionInterface::IceConnectionState new_state) {
|
||||
RTCIceConnectionState state =
|
||||
[[RTCPeerConnection class] iceConnectionStateForNativeState:new_state];
|
||||
RTCPeerConnection *peer_connection = peer_connection_;
|
||||
[peer_connection.delegate peerConnection:peer_connection
|
||||
didChangeIceConnectionState:state];
|
||||
}
|
||||
|
||||
void PeerConnectionDelegateAdapter::OnIceGatheringChange(
|
||||
PeerConnectionInterface::IceGatheringState new_state) {
|
||||
RTCIceGatheringState state =
|
||||
[[RTCPeerConnection class] iceGatheringStateForNativeState:new_state];
|
||||
RTCPeerConnection *peer_connection = peer_connection_;
|
||||
[peer_connection.delegate peerConnection:peer_connection
|
||||
didChangeIceGatheringState:state];
|
||||
}
|
||||
|
||||
void PeerConnectionDelegateAdapter::OnIceCandidate(
|
||||
const IceCandidateInterface *candidate) {
|
||||
RTCIceCandidate *iceCandidate =
|
||||
[[RTCIceCandidate alloc] initWithNativeCandidate:candidate];
|
||||
RTCPeerConnection *peer_connection = peer_connection_;
|
||||
[peer_connection.delegate peerConnection:peer_connection
|
||||
didGenerateIceCandidate:iceCandidate];
|
||||
}
|
||||
|
||||
void PeerConnectionDelegateAdapter::OnIceCandidatesRemoved(
|
||||
const std::vector<cricket::Candidate>& candidates) {
|
||||
NSMutableArray* ice_candidates =
|
||||
[NSMutableArray arrayWithCapacity:candidates.size()];
|
||||
for (const auto& candidate : candidates) {
|
||||
std::unique_ptr<JsepIceCandidate> candidate_wrapper(
|
||||
new JsepIceCandidate(candidate.transport_name(), -1, candidate));
|
||||
RTCIceCandidate* ice_candidate = [[RTCIceCandidate alloc]
|
||||
initWithNativeCandidate:candidate_wrapper.get()];
|
||||
[ice_candidates addObject:ice_candidate];
|
||||
}
|
||||
RTCPeerConnection* peer_connection = peer_connection_;
|
||||
[peer_connection.delegate peerConnection:peer_connection
|
||||
didRemoveIceCandidates:ice_candidates];
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
|
||||
@implementation RTCPeerConnection {
|
||||
NSMutableArray<RTCMediaStream *> *_localStreams;
|
||||
std::unique_ptr<webrtc::PeerConnectionDelegateAdapter> _observer;
|
||||
rtc::scoped_refptr<webrtc::PeerConnectionInterface> _peerConnection;
|
||||
std::unique_ptr<webrtc::MediaConstraints> _nativeConstraints;
|
||||
BOOL _hasStartedRtcEventLog;
|
||||
}
|
||||
|
||||
@synthesize delegate = _delegate;
|
||||
|
||||
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
|
||||
configuration:(RTCConfiguration *)configuration
|
||||
constraints:(RTCMediaConstraints *)constraints
|
||||
delegate:(id<RTCPeerConnectionDelegate>)delegate {
|
||||
NSParameterAssert(factory);
|
||||
std::unique_ptr<webrtc::PeerConnectionInterface::RTCConfiguration> config(
|
||||
[configuration createNativeConfiguration]);
|
||||
if (!config) {
|
||||
return nil;
|
||||
}
|
||||
if (self = [super init]) {
|
||||
_observer.reset(new webrtc::PeerConnectionDelegateAdapter(self));
|
||||
_nativeConstraints = constraints.nativeConstraints;
|
||||
CopyConstraintsIntoRtcConfiguration(_nativeConstraints.get(),
|
||||
config.get());
|
||||
_peerConnection =
|
||||
factory.nativeFactory->CreatePeerConnection(*config,
|
||||
nullptr,
|
||||
nullptr,
|
||||
_observer.get());
|
||||
if (!_peerConnection) {
|
||||
return nil;
|
||||
}
|
||||
_localStreams = [[NSMutableArray alloc] init];
|
||||
_delegate = delegate;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (NSArray<RTCMediaStream *> *)localStreams {
|
||||
return [_localStreams copy];
|
||||
}
|
||||
|
||||
- (RTCSessionDescription *)localDescription {
|
||||
const webrtc::SessionDescriptionInterface *description =
|
||||
_peerConnection->local_description();
|
||||
return description ?
|
||||
[[RTCSessionDescription alloc] initWithNativeDescription:description]
|
||||
: nil;
|
||||
}
|
||||
|
||||
- (RTCSessionDescription *)remoteDescription {
|
||||
const webrtc::SessionDescriptionInterface *description =
|
||||
_peerConnection->remote_description();
|
||||
return description ?
|
||||
[[RTCSessionDescription alloc] initWithNativeDescription:description]
|
||||
: nil;
|
||||
}
|
||||
|
||||
- (RTCSignalingState)signalingState {
|
||||
return [[self class]
|
||||
signalingStateForNativeState:_peerConnection->signaling_state()];
|
||||
}
|
||||
|
||||
- (RTCIceConnectionState)iceConnectionState {
|
||||
return [[self class] iceConnectionStateForNativeState:
|
||||
_peerConnection->ice_connection_state()];
|
||||
}
|
||||
|
||||
- (RTCIceGatheringState)iceGatheringState {
|
||||
return [[self class] iceGatheringStateForNativeState:
|
||||
_peerConnection->ice_gathering_state()];
|
||||
}
|
||||
|
||||
- (BOOL)setConfiguration:(RTCConfiguration *)configuration {
|
||||
std::unique_ptr<webrtc::PeerConnectionInterface::RTCConfiguration> config(
|
||||
[configuration createNativeConfiguration]);
|
||||
if (!config) {
|
||||
return NO;
|
||||
}
|
||||
CopyConstraintsIntoRtcConfiguration(_nativeConstraints.get(),
|
||||
config.get());
|
||||
return _peerConnection->SetConfiguration(*config);
|
||||
}
|
||||
|
||||
- (RTCConfiguration *)configuration {
|
||||
webrtc::PeerConnectionInterface::RTCConfiguration config =
|
||||
_peerConnection->GetConfiguration();
|
||||
return [[RTCConfiguration alloc] initWithNativeConfiguration:config];
|
||||
}
|
||||
|
||||
- (void)close {
|
||||
_peerConnection->Close();
|
||||
}
|
||||
|
||||
- (void)addIceCandidate:(RTCIceCandidate *)candidate {
|
||||
std::unique_ptr<const webrtc::IceCandidateInterface> iceCandidate(
|
||||
candidate.nativeCandidate);
|
||||
_peerConnection->AddIceCandidate(iceCandidate.get());
|
||||
}
|
||||
|
||||
- (void)removeIceCandidates:(NSArray<RTCIceCandidate *> *)iceCandidates {
|
||||
std::vector<cricket::Candidate> candidates;
|
||||
for (RTCIceCandidate *iceCandidate in iceCandidates) {
|
||||
std::unique_ptr<const webrtc::IceCandidateInterface> candidate(
|
||||
iceCandidate.nativeCandidate);
|
||||
if (candidate) {
|
||||
candidates.push_back(candidate->candidate());
|
||||
// Need to fill the transport name from the sdp_mid.
|
||||
candidates.back().set_transport_name(candidate->sdp_mid());
|
||||
}
|
||||
}
|
||||
if (!candidates.empty()) {
|
||||
_peerConnection->RemoveIceCandidates(candidates);
|
||||
}
|
||||
}
|
||||
|
||||
- (void)addStream:(RTCMediaStream *)stream {
|
||||
if (!_peerConnection->AddStream(stream.nativeMediaStream)) {
|
||||
RTCLogError(@"Failed to add stream: %@", stream);
|
||||
return;
|
||||
}
|
||||
[_localStreams addObject:stream];
|
||||
}
|
||||
|
||||
- (void)removeStream:(RTCMediaStream *)stream {
|
||||
_peerConnection->RemoveStream(stream.nativeMediaStream);
|
||||
[_localStreams removeObject:stream];
|
||||
}
|
||||
|
||||
- (void)offerForConstraints:(RTCMediaConstraints *)constraints
|
||||
completionHandler:
|
||||
(void (^)(RTCSessionDescription *sessionDescription,
|
||||
NSError *error))completionHandler {
|
||||
rtc::scoped_refptr<webrtc::CreateSessionDescriptionObserverAdapter>
|
||||
observer(new rtc::RefCountedObject
|
||||
<webrtc::CreateSessionDescriptionObserverAdapter>(completionHandler));
|
||||
_peerConnection->CreateOffer(observer, constraints.nativeConstraints.get());
|
||||
}
|
||||
|
||||
- (void)answerForConstraints:(RTCMediaConstraints *)constraints
|
||||
completionHandler:
|
||||
(void (^)(RTCSessionDescription *sessionDescription,
|
||||
NSError *error))completionHandler {
|
||||
rtc::scoped_refptr<webrtc::CreateSessionDescriptionObserverAdapter>
|
||||
observer(new rtc::RefCountedObject
|
||||
<webrtc::CreateSessionDescriptionObserverAdapter>(completionHandler));
|
||||
_peerConnection->CreateAnswer(observer, constraints.nativeConstraints.get());
|
||||
}
|
||||
|
||||
- (void)setLocalDescription:(RTCSessionDescription *)sdp
|
||||
completionHandler:(void (^)(NSError *error))completionHandler {
|
||||
rtc::scoped_refptr<webrtc::SetSessionDescriptionObserverAdapter> observer(
|
||||
new rtc::RefCountedObject<webrtc::SetSessionDescriptionObserverAdapter>(
|
||||
completionHandler));
|
||||
_peerConnection->SetLocalDescription(observer, sdp.nativeDescription);
|
||||
}
|
||||
|
||||
- (void)setRemoteDescription:(RTCSessionDescription *)sdp
|
||||
completionHandler:(void (^)(NSError *error))completionHandler {
|
||||
rtc::scoped_refptr<webrtc::SetSessionDescriptionObserverAdapter> observer(
|
||||
new rtc::RefCountedObject<webrtc::SetSessionDescriptionObserverAdapter>(
|
||||
completionHandler));
|
||||
_peerConnection->SetRemoteDescription(observer, sdp.nativeDescription);
|
||||
}
|
||||
|
||||
- (BOOL)setBweMinBitrateBps:(nullable NSNumber *)minBitrateBps
|
||||
currentBitrateBps:(nullable NSNumber *)currentBitrateBps
|
||||
maxBitrateBps:(nullable NSNumber *)maxBitrateBps {
|
||||
webrtc::PeerConnectionInterface::BitrateParameters params;
|
||||
if (minBitrateBps != nil) {
|
||||
params.min_bitrate_bps = rtc::Optional<int>(minBitrateBps.intValue);
|
||||
}
|
||||
if (currentBitrateBps != nil) {
|
||||
params.current_bitrate_bps = rtc::Optional<int>(currentBitrateBps.intValue);
|
||||
}
|
||||
if (maxBitrateBps != nil) {
|
||||
params.max_bitrate_bps = rtc::Optional<int>(maxBitrateBps.intValue);
|
||||
}
|
||||
return _peerConnection->SetBitrate(params).ok();
|
||||
}
|
||||
|
||||
- (BOOL)startRtcEventLogWithFilePath:(NSString *)filePath
|
||||
maxSizeInBytes:(int64_t)maxSizeInBytes {
|
||||
RTC_DCHECK(filePath.length);
|
||||
RTC_DCHECK_GT(maxSizeInBytes, 0);
|
||||
RTC_DCHECK(!_hasStartedRtcEventLog);
|
||||
if (_hasStartedRtcEventLog) {
|
||||
RTCLogError(@"Event logging already started.");
|
||||
return NO;
|
||||
}
|
||||
int fd = open(filePath.UTF8String, O_WRONLY | O_CREAT | O_TRUNC,
|
||||
S_IRUSR | S_IWUSR);
|
||||
if (fd < 0) {
|
||||
RTCLogError(@"Error opening file: %@. Error: %d", filePath, errno);
|
||||
return NO;
|
||||
}
|
||||
_hasStartedRtcEventLog =
|
||||
_peerConnection->StartRtcEventLog(fd, maxSizeInBytes);
|
||||
return _hasStartedRtcEventLog;
|
||||
}
|
||||
|
||||
- (void)stopRtcEventLog {
|
||||
_peerConnection->StopRtcEventLog();
|
||||
_hasStartedRtcEventLog = NO;
|
||||
}
|
||||
|
||||
- (RTCRtpSender *)senderWithKind:(NSString *)kind
|
||||
streamId:(NSString *)streamId {
|
||||
std::string nativeKind = [NSString stdStringForString:kind];
|
||||
std::string nativeStreamId = [NSString stdStringForString:streamId];
|
||||
rtc::scoped_refptr<webrtc::RtpSenderInterface> nativeSender(
|
||||
_peerConnection->CreateSender(nativeKind, nativeStreamId));
|
||||
return nativeSender ?
|
||||
[[RTCRtpSender alloc] initWithNativeRtpSender:nativeSender]
|
||||
: nil;
|
||||
}
|
||||
|
||||
- (NSArray<RTCRtpSender *> *)senders {
|
||||
std::vector<rtc::scoped_refptr<webrtc::RtpSenderInterface>> nativeSenders(
|
||||
_peerConnection->GetSenders());
|
||||
NSMutableArray *senders = [[NSMutableArray alloc] init];
|
||||
for (const auto &nativeSender : nativeSenders) {
|
||||
RTCRtpSender *sender =
|
||||
[[RTCRtpSender alloc] initWithNativeRtpSender:nativeSender];
|
||||
[senders addObject:sender];
|
||||
}
|
||||
return senders;
|
||||
}
|
||||
|
||||
- (NSArray<RTCRtpReceiver *> *)receivers {
|
||||
std::vector<rtc::scoped_refptr<webrtc::RtpReceiverInterface>> nativeReceivers(
|
||||
_peerConnection->GetReceivers());
|
||||
NSMutableArray *receivers = [[NSMutableArray alloc] init];
|
||||
for (const auto &nativeReceiver : nativeReceivers) {
|
||||
RTCRtpReceiver *receiver =
|
||||
[[RTCRtpReceiver alloc] initWithNativeRtpReceiver:nativeReceiver];
|
||||
[receivers addObject:receiver];
|
||||
}
|
||||
return receivers;
|
||||
}
|
||||
|
||||
#pragma mark - Private
|
||||
|
||||
+ (webrtc::PeerConnectionInterface::SignalingState)nativeSignalingStateForState:
|
||||
(RTCSignalingState)state {
|
||||
switch (state) {
|
||||
case RTCSignalingStateStable:
|
||||
return webrtc::PeerConnectionInterface::kStable;
|
||||
case RTCSignalingStateHaveLocalOffer:
|
||||
return webrtc::PeerConnectionInterface::kHaveLocalOffer;
|
||||
case RTCSignalingStateHaveLocalPrAnswer:
|
||||
return webrtc::PeerConnectionInterface::kHaveLocalPrAnswer;
|
||||
case RTCSignalingStateHaveRemoteOffer:
|
||||
return webrtc::PeerConnectionInterface::kHaveRemoteOffer;
|
||||
case RTCSignalingStateHaveRemotePrAnswer:
|
||||
return webrtc::PeerConnectionInterface::kHaveRemotePrAnswer;
|
||||
case RTCSignalingStateClosed:
|
||||
return webrtc::PeerConnectionInterface::kClosed;
|
||||
}
|
||||
}
|
||||
|
||||
+ (RTCSignalingState)signalingStateForNativeState:
|
||||
(webrtc::PeerConnectionInterface::SignalingState)nativeState {
|
||||
switch (nativeState) {
|
||||
case webrtc::PeerConnectionInterface::kStable:
|
||||
return RTCSignalingStateStable;
|
||||
case webrtc::PeerConnectionInterface::kHaveLocalOffer:
|
||||
return RTCSignalingStateHaveLocalOffer;
|
||||
case webrtc::PeerConnectionInterface::kHaveLocalPrAnswer:
|
||||
return RTCSignalingStateHaveLocalPrAnswer;
|
||||
case webrtc::PeerConnectionInterface::kHaveRemoteOffer:
|
||||
return RTCSignalingStateHaveRemoteOffer;
|
||||
case webrtc::PeerConnectionInterface::kHaveRemotePrAnswer:
|
||||
return RTCSignalingStateHaveRemotePrAnswer;
|
||||
case webrtc::PeerConnectionInterface::kClosed:
|
||||
return RTCSignalingStateClosed;
|
||||
}
|
||||
}
|
||||
|
||||
+ (NSString *)stringForSignalingState:(RTCSignalingState)state {
|
||||
switch (state) {
|
||||
case RTCSignalingStateStable:
|
||||
return @"STABLE";
|
||||
case RTCSignalingStateHaveLocalOffer:
|
||||
return @"HAVE_LOCAL_OFFER";
|
||||
case RTCSignalingStateHaveLocalPrAnswer:
|
||||
return @"HAVE_LOCAL_PRANSWER";
|
||||
case RTCSignalingStateHaveRemoteOffer:
|
||||
return @"HAVE_REMOTE_OFFER";
|
||||
case RTCSignalingStateHaveRemotePrAnswer:
|
||||
return @"HAVE_REMOTE_PRANSWER";
|
||||
case RTCSignalingStateClosed:
|
||||
return @"CLOSED";
|
||||
}
|
||||
}
|
||||
|
||||
+ (webrtc::PeerConnectionInterface::IceConnectionState)
|
||||
nativeIceConnectionStateForState:(RTCIceConnectionState)state {
|
||||
switch (state) {
|
||||
case RTCIceConnectionStateNew:
|
||||
return webrtc::PeerConnectionInterface::kIceConnectionNew;
|
||||
case RTCIceConnectionStateChecking:
|
||||
return webrtc::PeerConnectionInterface::kIceConnectionChecking;
|
||||
case RTCIceConnectionStateConnected:
|
||||
return webrtc::PeerConnectionInterface::kIceConnectionConnected;
|
||||
case RTCIceConnectionStateCompleted:
|
||||
return webrtc::PeerConnectionInterface::kIceConnectionCompleted;
|
||||
case RTCIceConnectionStateFailed:
|
||||
return webrtc::PeerConnectionInterface::kIceConnectionFailed;
|
||||
case RTCIceConnectionStateDisconnected:
|
||||
return webrtc::PeerConnectionInterface::kIceConnectionDisconnected;
|
||||
case RTCIceConnectionStateClosed:
|
||||
return webrtc::PeerConnectionInterface::kIceConnectionClosed;
|
||||
case RTCIceConnectionStateCount:
|
||||
return webrtc::PeerConnectionInterface::kIceConnectionMax;
|
||||
}
|
||||
}
|
||||
|
||||
+ (RTCIceConnectionState)iceConnectionStateForNativeState:
|
||||
(webrtc::PeerConnectionInterface::IceConnectionState)nativeState {
|
||||
switch (nativeState) {
|
||||
case webrtc::PeerConnectionInterface::kIceConnectionNew:
|
||||
return RTCIceConnectionStateNew;
|
||||
case webrtc::PeerConnectionInterface::kIceConnectionChecking:
|
||||
return RTCIceConnectionStateChecking;
|
||||
case webrtc::PeerConnectionInterface::kIceConnectionConnected:
|
||||
return RTCIceConnectionStateConnected;
|
||||
case webrtc::PeerConnectionInterface::kIceConnectionCompleted:
|
||||
return RTCIceConnectionStateCompleted;
|
||||
case webrtc::PeerConnectionInterface::kIceConnectionFailed:
|
||||
return RTCIceConnectionStateFailed;
|
||||
case webrtc::PeerConnectionInterface::kIceConnectionDisconnected:
|
||||
return RTCIceConnectionStateDisconnected;
|
||||
case webrtc::PeerConnectionInterface::kIceConnectionClosed:
|
||||
return RTCIceConnectionStateClosed;
|
||||
case webrtc::PeerConnectionInterface::kIceConnectionMax:
|
||||
return RTCIceConnectionStateCount;
|
||||
}
|
||||
}
|
||||
|
||||
+ (NSString *)stringForIceConnectionState:(RTCIceConnectionState)state {
|
||||
switch (state) {
|
||||
case RTCIceConnectionStateNew:
|
||||
return @"NEW";
|
||||
case RTCIceConnectionStateChecking:
|
||||
return @"CHECKING";
|
||||
case RTCIceConnectionStateConnected:
|
||||
return @"CONNECTED";
|
||||
case RTCIceConnectionStateCompleted:
|
||||
return @"COMPLETED";
|
||||
case RTCIceConnectionStateFailed:
|
||||
return @"FAILED";
|
||||
case RTCIceConnectionStateDisconnected:
|
||||
return @"DISCONNECTED";
|
||||
case RTCIceConnectionStateClosed:
|
||||
return @"CLOSED";
|
||||
case RTCIceConnectionStateCount:
|
||||
return @"COUNT";
|
||||
}
|
||||
}
|
||||
|
||||
+ (webrtc::PeerConnectionInterface::IceGatheringState)
|
||||
nativeIceGatheringStateForState:(RTCIceGatheringState)state {
|
||||
switch (state) {
|
||||
case RTCIceGatheringStateNew:
|
||||
return webrtc::PeerConnectionInterface::kIceGatheringNew;
|
||||
case RTCIceGatheringStateGathering:
|
||||
return webrtc::PeerConnectionInterface::kIceGatheringGathering;
|
||||
case RTCIceGatheringStateComplete:
|
||||
return webrtc::PeerConnectionInterface::kIceGatheringComplete;
|
||||
}
|
||||
}
|
||||
|
||||
+ (RTCIceGatheringState)iceGatheringStateForNativeState:
|
||||
(webrtc::PeerConnectionInterface::IceGatheringState)nativeState {
|
||||
switch (nativeState) {
|
||||
case webrtc::PeerConnectionInterface::kIceGatheringNew:
|
||||
return RTCIceGatheringStateNew;
|
||||
case webrtc::PeerConnectionInterface::kIceGatheringGathering:
|
||||
return RTCIceGatheringStateGathering;
|
||||
case webrtc::PeerConnectionInterface::kIceGatheringComplete:
|
||||
return RTCIceGatheringStateComplete;
|
||||
}
|
||||
}
|
||||
|
||||
+ (NSString *)stringForIceGatheringState:(RTCIceGatheringState)state {
|
||||
switch (state) {
|
||||
case RTCIceGatheringStateNew:
|
||||
return @"NEW";
|
||||
case RTCIceGatheringStateGathering:
|
||||
return @"GATHERING";
|
||||
case RTCIceGatheringStateComplete:
|
||||
return @"COMPLETE";
|
||||
}
|
||||
}
|
||||
|
||||
+ (webrtc::PeerConnectionInterface::StatsOutputLevel)
|
||||
nativeStatsOutputLevelForLevel:(RTCStatsOutputLevel)level {
|
||||
switch (level) {
|
||||
case RTCStatsOutputLevelStandard:
|
||||
return webrtc::PeerConnectionInterface::kStatsOutputLevelStandard;
|
||||
case RTCStatsOutputLevelDebug:
|
||||
return webrtc::PeerConnectionInterface::kStatsOutputLevelDebug;
|
||||
}
|
||||
}
|
||||
|
||||
- (rtc::scoped_refptr<webrtc::PeerConnectionInterface>)nativePeerConnection {
|
||||
return _peerConnection;
|
||||
}
|
||||
|
||||
@end
|
||||
@ -0,0 +1,49 @@
|
||||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "WebRTC/RTCPeerConnectionFactory.h"
|
||||
|
||||
#include "webrtc/rtc_base/scoped_ref_ptr.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
class AudioEncoderFactory;
|
||||
class AudioDecoderFactory;
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
namespace cricket {
|
||||
|
||||
class WebRtcVideoEncoderFactory;
|
||||
class WebRtcVideoDecoderFactory;
|
||||
|
||||
} // namespace cricket
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
/**
|
||||
* This class extension exposes methods that work directly with injectable C++ components.
|
||||
*/
|
||||
@interface RTCPeerConnectionFactory ()
|
||||
|
||||
/* Initialize object with injectable native audio/video encoder/decoder factories */
|
||||
- (instancetype)initWithNativeAudioEncoderFactory:
|
||||
(rtc::scoped_refptr<webrtc::AudioEncoderFactory>)audioEncoderFactory
|
||||
nativeAudioDecoderFactory:
|
||||
(rtc::scoped_refptr<webrtc::AudioDecoderFactory>)audioDecoderFactory
|
||||
nativeVideoEncoderFactory:
|
||||
(nullable cricket::WebRtcVideoEncoderFactory *)videoEncoderFactory
|
||||
nativeVideoDecoderFactory:
|
||||
(nullable cricket::WebRtcVideoDecoderFactory *)videoDecoderFactory
|
||||
NS_DESIGNATED_INITIALIZER;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
||||
@ -0,0 +1,30 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "WebRTC/RTCPeerConnectionFactory.h"
|
||||
|
||||
#include "webrtc/api/peerconnectioninterface.h"
|
||||
#include "webrtc/rtc_base/scoped_ref_ptr.h"
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
@interface RTCPeerConnectionFactory ()
|
||||
|
||||
/**
|
||||
* PeerConnectionFactoryInterface created and held by this
|
||||
* RTCPeerConnectionFactory object. This is needed to pass to the underlying
|
||||
* C++ APIs.
|
||||
*/
|
||||
@property(nonatomic, readonly)
|
||||
rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> nativeFactory;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
||||
@ -0,0 +1,226 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "RTCPeerConnectionFactory+Native.h"
|
||||
#import "RTCPeerConnectionFactory+Private.h"
|
||||
|
||||
#import "NSString+StdString.h"
|
||||
#import "RTCAVFoundationVideoSource+Private.h"
|
||||
#import "RTCAudioSource+Private.h"
|
||||
#import "RTCAudioTrack+Private.h"
|
||||
#import "RTCMediaConstraints+Private.h"
|
||||
#import "RTCMediaStream+Private.h"
|
||||
#import "RTCPeerConnection+Private.h"
|
||||
#import "RTCVideoSource+Private.h"
|
||||
#import "RTCVideoTrack+Private.h"
|
||||
#import "WebRTC/RTCLogging.h"
|
||||
#import "WebRTC/RTCVideoCodecFactory.h"
|
||||
#ifndef HAVE_NO_MEDIA
|
||||
#include "VideoToolbox/objc_video_decoder_factory.h"
|
||||
#include "VideoToolbox/objc_video_encoder_factory.h"
|
||||
#import "WebRTC/RTCVideoCodecH264.h"
|
||||
// The no-media version PeerConnectionFactory doesn't depend on these files, but the gn check tool
|
||||
// is not smart enough to take the #ifdef into account.
|
||||
#include "webrtc/api/audio_codecs/builtin_audio_decoder_factory.h" // nogncheck
|
||||
#include "webrtc/api/audio_codecs/builtin_audio_encoder_factory.h" // nogncheck
|
||||
#endif
|
||||
|
||||
#include "Video/objcvideotracksource.h"
|
||||
#include "webrtc/api/videosourceproxy.h"
|
||||
// Adding the nogncheck to disable the including header check.
|
||||
// The no-media version PeerConnectionFactory doesn't depend on media related
|
||||
// C++ target.
|
||||
// TODO(zhihuang): Remove nogncheck once MediaEngineInterface is moved to C++
|
||||
// API layer.
|
||||
#include "webrtc/media/engine/webrtcmediaengine.h" // nogncheck
|
||||
|
||||
@implementation RTCPeerConnectionFactory {
|
||||
std::unique_ptr<rtc::Thread> _networkThread;
|
||||
std::unique_ptr<rtc::Thread> _workerThread;
|
||||
std::unique_ptr<rtc::Thread> _signalingThread;
|
||||
BOOL _hasStartedAecDump;
|
||||
}
|
||||
|
||||
@synthesize nativeFactory = _nativeFactory;
|
||||
|
||||
- (instancetype)init {
|
||||
#ifdef HAVE_NO_MEDIA
|
||||
return [self initWithNativeAudioEncoderFactory:nil
|
||||
nativeAudioDecoderFactory:nil
|
||||
nativeVideoEncoderFactory:nil
|
||||
nativeVideoDecoderFactory:nil];
|
||||
#else
|
||||
return [self initWithNativeAudioEncoderFactory:webrtc::CreateBuiltinAudioEncoderFactory()
|
||||
nativeAudioDecoderFactory:webrtc::CreateBuiltinAudioDecoderFactory()
|
||||
nativeVideoEncoderFactory:new webrtc::ObjCVideoEncoderFactory(
|
||||
[[RTCVideoEncoderFactoryH264 alloc] init])
|
||||
nativeVideoDecoderFactory:new webrtc::ObjCVideoDecoderFactory(
|
||||
[[RTCVideoDecoderFactoryH264 alloc] init])];
|
||||
#endif
|
||||
}
|
||||
|
||||
- (instancetype)initWithEncoderFactory:(nullable id<RTCVideoEncoderFactory>)encoderFactory
|
||||
decoderFactory:(nullable id<RTCVideoDecoderFactory>)decoderFactory {
|
||||
#ifdef HAVE_NO_MEDIA
|
||||
return [self initWithNativeAudioEncoderFactory:nil
|
||||
nativeAudioDecoderFactory:nil
|
||||
nativeVideoEncoderFactory:nil
|
||||
nativeVideoDecoderFactory:nil];
|
||||
#else
|
||||
cricket::WebRtcVideoEncoderFactory *native_encoder_factory =
|
||||
encoderFactory ? new webrtc::ObjCVideoEncoderFactory(encoderFactory) : nullptr;
|
||||
cricket::WebRtcVideoDecoderFactory *native_decoder_factory =
|
||||
decoderFactory ? new webrtc::ObjCVideoDecoderFactory(decoderFactory) : nullptr;
|
||||
return [self initWithNativeAudioEncoderFactory:webrtc::CreateBuiltinAudioEncoderFactory()
|
||||
nativeAudioDecoderFactory:webrtc::CreateBuiltinAudioDecoderFactory()
|
||||
nativeVideoEncoderFactory:native_encoder_factory
|
||||
nativeVideoDecoderFactory:native_decoder_factory];
|
||||
#endif
|
||||
}
|
||||
|
||||
- (instancetype)initWithNativeAudioEncoderFactory:
|
||||
(rtc::scoped_refptr<webrtc::AudioEncoderFactory>)audioEncoderFactory
|
||||
nativeAudioDecoderFactory:
|
||||
(rtc::scoped_refptr<webrtc::AudioDecoderFactory>)audioDecoderFactory
|
||||
nativeVideoEncoderFactory:
|
||||
(nullable cricket::WebRtcVideoEncoderFactory *)videoEncoderFactory
|
||||
nativeVideoDecoderFactory:
|
||||
(nullable cricket::WebRtcVideoDecoderFactory *)videoDecoderFactory {
|
||||
if (self = [super init]) {
|
||||
_networkThread = rtc::Thread::CreateWithSocketServer();
|
||||
BOOL result = _networkThread->Start();
|
||||
NSAssert(result, @"Failed to start network thread.");
|
||||
|
||||
_workerThread = rtc::Thread::Create();
|
||||
result = _workerThread->Start();
|
||||
NSAssert(result, @"Failed to start worker thread.");
|
||||
|
||||
_signalingThread = rtc::Thread::Create();
|
||||
result = _signalingThread->Start();
|
||||
NSAssert(result, @"Failed to start signaling thread.");
|
||||
#ifdef HAVE_NO_MEDIA
|
||||
_nativeFactory = webrtc::CreateModularPeerConnectionFactory(
|
||||
_networkThread.get(),
|
||||
_workerThread.get(),
|
||||
_signalingThread.get(),
|
||||
nullptr, // default_adm
|
||||
nullptr, // audio_encoder_factory
|
||||
nullptr, // audio_decoder_factory
|
||||
nullptr, // video_encoder_factory
|
||||
nullptr, // video_decoder_factory
|
||||
nullptr, // audio_mixer
|
||||
std::unique_ptr<cricket::MediaEngineInterface>(),
|
||||
std::unique_ptr<webrtc::CallFactoryInterface>(),
|
||||
std::unique_ptr<webrtc::RtcEventLogFactoryInterface>());
|
||||
#else
|
||||
// Ownership of encoder/decoder factories is passed on to the
|
||||
// peerconnectionfactory, that handles deleting them.
|
||||
_nativeFactory = webrtc::CreatePeerConnectionFactory(_networkThread.get(),
|
||||
_workerThread.get(),
|
||||
_signalingThread.get(),
|
||||
nullptr, // audio device module
|
||||
audioEncoderFactory,
|
||||
audioDecoderFactory,
|
||||
videoEncoderFactory,
|
||||
videoDecoderFactory);
|
||||
#endif
|
||||
NSAssert(_nativeFactory, @"Failed to initialize PeerConnectionFactory!");
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (RTCAudioSource *)audioSourceWithConstraints:(nullable RTCMediaConstraints *)constraints {
|
||||
std::unique_ptr<webrtc::MediaConstraints> nativeConstraints;
|
||||
if (constraints) {
|
||||
nativeConstraints = constraints.nativeConstraints;
|
||||
}
|
||||
rtc::scoped_refptr<webrtc::AudioSourceInterface> source =
|
||||
_nativeFactory->CreateAudioSource(nativeConstraints.get());
|
||||
return [[RTCAudioSource alloc] initWithNativeAudioSource:source];
|
||||
}
|
||||
|
||||
- (RTCAudioTrack *)audioTrackWithTrackId:(NSString *)trackId {
|
||||
RTCAudioSource *audioSource = [self audioSourceWithConstraints:nil];
|
||||
return [self audioTrackWithSource:audioSource trackId:trackId];
|
||||
}
|
||||
|
||||
- (RTCAudioTrack *)audioTrackWithSource:(RTCAudioSource *)source
|
||||
trackId:(NSString *)trackId {
|
||||
return [[RTCAudioTrack alloc] initWithFactory:self
|
||||
source:source
|
||||
trackId:trackId];
|
||||
}
|
||||
|
||||
- (RTCAVFoundationVideoSource *)avFoundationVideoSourceWithConstraints:
|
||||
(nullable RTCMediaConstraints *)constraints {
|
||||
#ifdef HAVE_NO_MEDIA
|
||||
return nil;
|
||||
#else
|
||||
return [[RTCAVFoundationVideoSource alloc] initWithFactory:self constraints:constraints];
|
||||
#endif
|
||||
}
|
||||
|
||||
- (RTCVideoSource *)videoSource {
|
||||
rtc::scoped_refptr<webrtc::ObjcVideoTrackSource> objcVideoTrackSource(
|
||||
new rtc::RefCountedObject<webrtc::ObjcVideoTrackSource>());
|
||||
return [[RTCVideoSource alloc]
|
||||
initWithNativeVideoSource:webrtc::VideoTrackSourceProxy::Create(_signalingThread.get(),
|
||||
_workerThread.get(),
|
||||
objcVideoTrackSource)];
|
||||
}
|
||||
|
||||
- (RTCVideoTrack *)videoTrackWithSource:(RTCVideoSource *)source
|
||||
trackId:(NSString *)trackId {
|
||||
return [[RTCVideoTrack alloc] initWithFactory:self
|
||||
source:source
|
||||
trackId:trackId];
|
||||
}
|
||||
|
||||
- (RTCMediaStream *)mediaStreamWithStreamId:(NSString *)streamId {
|
||||
return [[RTCMediaStream alloc] initWithFactory:self
|
||||
streamId:streamId];
|
||||
}
|
||||
|
||||
- (RTCPeerConnection *)peerConnectionWithConfiguration:
|
||||
(RTCConfiguration *)configuration
|
||||
constraints:
|
||||
(RTCMediaConstraints *)constraints
|
||||
delegate:
|
||||
(nullable id<RTCPeerConnectionDelegate>)delegate {
|
||||
return [[RTCPeerConnection alloc] initWithFactory:self
|
||||
configuration:configuration
|
||||
constraints:constraints
|
||||
delegate:delegate];
|
||||
}
|
||||
|
||||
- (BOOL)startAecDumpWithFilePath:(NSString *)filePath
|
||||
maxSizeInBytes:(int64_t)maxSizeInBytes {
|
||||
RTC_DCHECK(filePath.length);
|
||||
RTC_DCHECK_GT(maxSizeInBytes, 0);
|
||||
|
||||
if (_hasStartedAecDump) {
|
||||
RTCLogError(@"Aec dump already started.");
|
||||
return NO;
|
||||
}
|
||||
int fd = open(filePath.UTF8String, O_WRONLY | O_CREAT | O_TRUNC, S_IRUSR | S_IWUSR);
|
||||
if (fd < 0) {
|
||||
RTCLogError(@"Error opening file: %@. Error: %d", filePath, errno);
|
||||
return NO;
|
||||
}
|
||||
_hasStartedAecDump = _nativeFactory->StartAecDump(fd, maxSizeInBytes);
|
||||
return _hasStartedAecDump;
|
||||
}
|
||||
|
||||
- (void)stopAecDump {
|
||||
_nativeFactory->StopAecDump();
|
||||
_hasStartedAecDump = NO;
|
||||
}
|
||||
|
||||
@end
|
||||
@ -0,0 +1,28 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "WebRTC/RTCRtpCodecParameters.h"
|
||||
|
||||
#include "webrtc/api/rtpparameters.h"
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
@interface RTCRtpCodecParameters ()
|
||||
|
||||
/** Returns the equivalent native RtpCodecParameters structure. */
|
||||
@property(nonatomic, readonly) webrtc::RtpCodecParameters nativeParameters;
|
||||
|
||||
/** Initialize the object with a native RtpCodecParameters structure. */
|
||||
- (instancetype)initWithNativeParameters:
|
||||
(const webrtc::RtpCodecParameters &)nativeParameters;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
||||
@ -0,0 +1,97 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "RTCRtpCodecParameters+Private.h"
|
||||
|
||||
#import "NSString+StdString.h"
|
||||
#import "WebRTC/RTCMediaStreamTrack.h" // For "kind" strings.
|
||||
|
||||
#include "webrtc/media/base/mediaconstants.h"
|
||||
#include "webrtc/rtc_base/checks.h"
|
||||
|
||||
const NSString * const kRTCRtxCodecName = @(cricket::kRtxCodecName);
|
||||
const NSString * const kRTCRedCodecName = @(cricket::kRedCodecName);
|
||||
const NSString * const kRTCUlpfecCodecName = @(cricket::kUlpfecCodecName);
|
||||
const NSString * const kRTCFlexfecCodecName = @(cricket::kFlexfecCodecName);
|
||||
const NSString * const kRTCOpusCodecName = @(cricket::kOpusCodecName);
|
||||
const NSString * const kRTCIsacCodecName = @(cricket::kIsacCodecName);
|
||||
const NSString * const kRTCL16CodecName = @(cricket::kL16CodecName);
|
||||
const NSString * const kRTCG722CodecName = @(cricket::kG722CodecName);
|
||||
const NSString * const kRTCIlbcCodecName = @(cricket::kIlbcCodecName);
|
||||
const NSString * const kRTCPcmuCodecName = @(cricket::kPcmuCodecName);
|
||||
const NSString * const kRTCPcmaCodecName = @(cricket::kPcmaCodecName);
|
||||
const NSString * const kRTCDtmfCodecName = @(cricket::kDtmfCodecName);
|
||||
const NSString * const kRTCComfortNoiseCodecName =
|
||||
@(cricket::kComfortNoiseCodecName);
|
||||
const NSString * const kRTCVp8CodecName = @(cricket::kVp8CodecName);
|
||||
const NSString * const kRTCVp9CodecName = @(cricket::kVp9CodecName);
|
||||
const NSString * const kRTCH264CodecName = @(cricket::kH264CodecName);
|
||||
|
||||
@implementation RTCRtpCodecParameters
|
||||
|
||||
@synthesize payloadType = _payloadType;
|
||||
@synthesize name = _name;
|
||||
@synthesize kind = _kind;
|
||||
@synthesize clockRate = _clockRate;
|
||||
@synthesize numChannels = _numChannels;
|
||||
|
||||
- (instancetype)init {
|
||||
return [super init];
|
||||
}
|
||||
|
||||
- (instancetype)initWithNativeParameters:
|
||||
(const webrtc::RtpCodecParameters &)nativeParameters {
|
||||
if (self = [self init]) {
|
||||
_payloadType = nativeParameters.payload_type;
|
||||
_name = [NSString stringForStdString:nativeParameters.name];
|
||||
switch (nativeParameters.kind) {
|
||||
case cricket::MEDIA_TYPE_AUDIO:
|
||||
_kind = kRTCMediaStreamTrackKindAudio;
|
||||
break;
|
||||
case cricket::MEDIA_TYPE_VIDEO:
|
||||
_kind = kRTCMediaStreamTrackKindVideo;
|
||||
break;
|
||||
case cricket::MEDIA_TYPE_DATA:
|
||||
RTC_NOTREACHED();
|
||||
break;
|
||||
}
|
||||
if (nativeParameters.clock_rate) {
|
||||
_clockRate = [NSNumber numberWithInt:*nativeParameters.clock_rate];
|
||||
}
|
||||
if (nativeParameters.num_channels) {
|
||||
_numChannels = [NSNumber numberWithInt:*nativeParameters.num_channels];
|
||||
}
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (webrtc::RtpCodecParameters)nativeParameters {
|
||||
webrtc::RtpCodecParameters parameters;
|
||||
parameters.payload_type = _payloadType;
|
||||
parameters.name = [NSString stdStringForString:_name];
|
||||
// NSString pointer comparison is safe here since "kind" is readonly and only
|
||||
// populated above.
|
||||
if (_kind == kRTCMediaStreamTrackKindAudio) {
|
||||
parameters.kind = cricket::MEDIA_TYPE_AUDIO;
|
||||
} else if (_kind == kRTCMediaStreamTrackKindVideo) {
|
||||
parameters.kind = cricket::MEDIA_TYPE_VIDEO;
|
||||
} else {
|
||||
RTC_NOTREACHED();
|
||||
}
|
||||
if (_clockRate != nil) {
|
||||
parameters.clock_rate = rtc::Optional<int>(_clockRate.intValue);
|
||||
}
|
||||
if (_numChannels != nil) {
|
||||
parameters.num_channels = rtc::Optional<int>(_numChannels.intValue);
|
||||
}
|
||||
return parameters;
|
||||
}
|
||||
|
||||
@end
|
||||
@ -0,0 +1,28 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "WebRTC/RTCRtpEncodingParameters.h"
|
||||
|
||||
#include "webrtc/api/rtpparameters.h"
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
@interface RTCRtpEncodingParameters ()
|
||||
|
||||
/** Returns the equivalent native RtpEncodingParameters structure. */
|
||||
@property(nonatomic, readonly) webrtc::RtpEncodingParameters nativeParameters;
|
||||
|
||||
/** Initialize the object with a native RtpEncodingParameters structure. */
|
||||
- (instancetype)initWithNativeParameters:
|
||||
(const webrtc::RtpEncodingParameters &)nativeParameters;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
||||
@ -0,0 +1,50 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "RTCRtpEncodingParameters+Private.h"
|
||||
|
||||
@implementation RTCRtpEncodingParameters
|
||||
|
||||
@synthesize isActive = _isActive;
|
||||
@synthesize maxBitrateBps = _maxBitrateBps;
|
||||
@synthesize ssrc = _ssrc;
|
||||
|
||||
- (instancetype)init {
|
||||
return [super init];
|
||||
}
|
||||
|
||||
- (instancetype)initWithNativeParameters:
|
||||
(const webrtc::RtpEncodingParameters &)nativeParameters {
|
||||
if (self = [self init]) {
|
||||
_isActive = nativeParameters.active;
|
||||
if (nativeParameters.max_bitrate_bps) {
|
||||
_maxBitrateBps =
|
||||
[NSNumber numberWithInt:*nativeParameters.max_bitrate_bps];
|
||||
}
|
||||
if (nativeParameters.ssrc) {
|
||||
_ssrc = [NSNumber numberWithUnsignedLong:*nativeParameters.ssrc];
|
||||
}
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (webrtc::RtpEncodingParameters)nativeParameters {
|
||||
webrtc::RtpEncodingParameters parameters;
|
||||
parameters.active = _isActive;
|
||||
if (_maxBitrateBps != nil) {
|
||||
parameters.max_bitrate_bps = rtc::Optional<int>(_maxBitrateBps.intValue);
|
||||
}
|
||||
if (_ssrc != nil) {
|
||||
parameters.ssrc = rtc::Optional<uint32_t>(_ssrc.unsignedLongValue);
|
||||
}
|
||||
return parameters;
|
||||
}
|
||||
|
||||
@end
|
||||
@ -0,0 +1,62 @@
|
||||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "WebRTC/RTCVideoCodec.h"
|
||||
|
||||
#include "webrtc/modules/include/module_common_types.h"
|
||||
|
||||
@implementation RTCRtpFragmentationHeader
|
||||
|
||||
@synthesize fragmentationOffset = _fragmentationOffset;
|
||||
@synthesize fragmentationLength = _fragmentationLength;
|
||||
@synthesize fragmentationTimeDiff = _fragmentationTimeDiff;
|
||||
@synthesize fragmentationPlType = _fragmentationPlType;
|
||||
|
||||
- (instancetype)initWithNativeFragmentationHeader:
|
||||
(const webrtc::RTPFragmentationHeader *)fragmentationHeader {
|
||||
if (self = [super init]) {
|
||||
if (fragmentationHeader) {
|
||||
int count = fragmentationHeader->fragmentationVectorSize;
|
||||
NSMutableArray *offsets = [NSMutableArray array];
|
||||
NSMutableArray *lengths = [NSMutableArray array];
|
||||
NSMutableArray *timeDiffs = [NSMutableArray array];
|
||||
NSMutableArray *plTypes = [NSMutableArray array];
|
||||
for (int i = 0; i < count; ++i) {
|
||||
[offsets addObject:@(fragmentationHeader->fragmentationOffset[i])];
|
||||
[lengths addObject:@(fragmentationHeader->fragmentationLength[i])];
|
||||
[timeDiffs addObject:@(fragmentationHeader->fragmentationTimeDiff[i])];
|
||||
[plTypes addObject:@(fragmentationHeader->fragmentationPlType[i])];
|
||||
}
|
||||
_fragmentationOffset = [offsets copy];
|
||||
_fragmentationLength = [lengths copy];
|
||||
_fragmentationTimeDiff = [timeDiffs copy];
|
||||
_fragmentationPlType = [plTypes copy];
|
||||
}
|
||||
}
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
- (std::unique_ptr<webrtc::RTPFragmentationHeader>)createNativeFragmentationHeader {
|
||||
auto fragmentationHeader =
|
||||
std::unique_ptr<webrtc::RTPFragmentationHeader>(new webrtc::RTPFragmentationHeader);
|
||||
fragmentationHeader->VerifyAndAllocateFragmentationHeader(_fragmentationOffset.count);
|
||||
for (NSUInteger i = 0; i < _fragmentationOffset.count; ++i) {
|
||||
fragmentationHeader->fragmentationOffset[i] = (size_t)_fragmentationOffset[i].unsignedIntValue;
|
||||
fragmentationHeader->fragmentationLength[i] = (size_t)_fragmentationLength[i].unsignedIntValue;
|
||||
fragmentationHeader->fragmentationTimeDiff[i] =
|
||||
(uint16_t)_fragmentationOffset[i].unsignedIntValue;
|
||||
fragmentationHeader->fragmentationPlType[i] = (uint8_t)_fragmentationOffset[i].unsignedIntValue;
|
||||
}
|
||||
|
||||
return fragmentationHeader;
|
||||
}
|
||||
|
||||
@end
|
||||
@ -0,0 +1,28 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "WebRTC/RTCRtpParameters.h"
|
||||
|
||||
#include "webrtc/api/rtpparameters.h"
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
@interface RTCRtpParameters ()
|
||||
|
||||
/** Returns the equivalent native RtpParameters structure. */
|
||||
@property(nonatomic, readonly) webrtc::RtpParameters nativeParameters;
|
||||
|
||||
/** Initialize the object with a native RtpParameters structure. */
|
||||
- (instancetype)initWithNativeParameters:
|
||||
(const webrtc::RtpParameters &)nativeParameters;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
||||
@ -0,0 +1,56 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "RTCRtpParameters+Private.h"
|
||||
|
||||
#import "RTCRtpCodecParameters+Private.h"
|
||||
#import "RTCRtpEncodingParameters+Private.h"
|
||||
|
||||
@implementation RTCRtpParameters
|
||||
|
||||
@synthesize encodings = _encodings;
|
||||
@synthesize codecs = _codecs;
|
||||
|
||||
- (instancetype)init {
|
||||
return [super init];
|
||||
}
|
||||
|
||||
- (instancetype)initWithNativeParameters:
|
||||
(const webrtc::RtpParameters &)nativeParameters {
|
||||
if (self = [self init]) {
|
||||
NSMutableArray *encodings = [[NSMutableArray alloc] init];
|
||||
for (const auto &encoding : nativeParameters.encodings) {
|
||||
[encodings addObject:[[RTCRtpEncodingParameters alloc]
|
||||
initWithNativeParameters:encoding]];
|
||||
}
|
||||
_encodings = encodings;
|
||||
|
||||
NSMutableArray *codecs = [[NSMutableArray alloc] init];
|
||||
for (const auto &codec : nativeParameters.codecs) {
|
||||
[codecs addObject:[[RTCRtpCodecParameters alloc]
|
||||
initWithNativeParameters:codec]];
|
||||
}
|
||||
_codecs = codecs;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (webrtc::RtpParameters)nativeParameters {
|
||||
webrtc::RtpParameters parameters;
|
||||
for (RTCRtpEncodingParameters *encoding in _encodings) {
|
||||
parameters.encodings.push_back(encoding.nativeParameters);
|
||||
}
|
||||
for (RTCRtpCodecParameters *codec in _codecs) {
|
||||
parameters.codecs.push_back(codec.nativeParameters);
|
||||
}
|
||||
return parameters;
|
||||
}
|
||||
|
||||
@end
|
||||
@ -0,0 +1,45 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "WebRTC/RTCRtpReceiver.h"
|
||||
|
||||
#include "webrtc/api/rtpreceiverinterface.h"
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
class RtpReceiverDelegateAdapter : public RtpReceiverObserverInterface {
|
||||
public:
|
||||
RtpReceiverDelegateAdapter(RTCRtpReceiver* receiver);
|
||||
|
||||
void OnFirstPacketReceived(cricket::MediaType media_type) override;
|
||||
|
||||
private:
|
||||
__weak RTCRtpReceiver* receiver_;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
@interface RTCRtpReceiver ()
|
||||
|
||||
@property(nonatomic, readonly)
|
||||
rtc::scoped_refptr<webrtc::RtpReceiverInterface> nativeRtpReceiver;
|
||||
|
||||
/** Initialize an RTCRtpReceiver with a native RtpReceiverInterface. */
|
||||
- (instancetype)initWithNativeRtpReceiver:
|
||||
(rtc::scoped_refptr<webrtc::RtpReceiverInterface>)nativeRtpReceiver
|
||||
NS_DESIGNATED_INITIALIZER;
|
||||
|
||||
+ (RTCRtpMediaType)mediaTypeForNativeMediaType:(cricket::MediaType)nativeMediaType;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
||||
123
sdk/objc/Framework/Classes/PeerConnection/RTCRtpReceiver.mm
Normal file
123
sdk/objc/Framework/Classes/PeerConnection/RTCRtpReceiver.mm
Normal file
@ -0,0 +1,123 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "RTCRtpReceiver+Private.h"
|
||||
|
||||
#import "NSString+StdString.h"
|
||||
#import "RTCMediaStreamTrack+Private.h"
|
||||
#import "RTCRtpParameters+Private.h"
|
||||
#import "WebRTC/RTCLogging.h"
|
||||
|
||||
#include "webrtc/api/mediastreaminterface.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
RtpReceiverDelegateAdapter::RtpReceiverDelegateAdapter(
|
||||
RTCRtpReceiver *receiver) {
|
||||
RTC_CHECK(receiver);
|
||||
receiver_ = receiver;
|
||||
}
|
||||
|
||||
void RtpReceiverDelegateAdapter::OnFirstPacketReceived(
|
||||
cricket::MediaType media_type) {
|
||||
RTCRtpMediaType packet_media_type =
|
||||
[RTCRtpReceiver mediaTypeForNativeMediaType:media_type];
|
||||
RTCRtpReceiver *receiver = receiver_;
|
||||
[receiver.delegate rtpReceiver:receiver didReceiveFirstPacketForMediaType:packet_media_type];
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
@implementation RTCRtpReceiver {
|
||||
rtc::scoped_refptr<webrtc::RtpReceiverInterface> _nativeRtpReceiver;
|
||||
std::unique_ptr<webrtc::RtpReceiverDelegateAdapter> _observer;
|
||||
}
|
||||
|
||||
@synthesize delegate = _delegate;
|
||||
|
||||
- (NSString *)receiverId {
|
||||
return [NSString stringForStdString:_nativeRtpReceiver->id()];
|
||||
}
|
||||
|
||||
- (RTCRtpParameters *)parameters {
|
||||
return [[RTCRtpParameters alloc]
|
||||
initWithNativeParameters:_nativeRtpReceiver->GetParameters()];
|
||||
}
|
||||
|
||||
- (void)setParameters:(RTCRtpParameters *)parameters {
|
||||
if (!_nativeRtpReceiver->SetParameters(parameters.nativeParameters)) {
|
||||
RTCLogError(@"RTCRtpReceiver(%p): Failed to set parameters: %@", self,
|
||||
parameters);
|
||||
}
|
||||
}
|
||||
|
||||
- (RTCMediaStreamTrack *)track {
|
||||
rtc::scoped_refptr<webrtc::MediaStreamTrackInterface> nativeTrack(
|
||||
_nativeRtpReceiver->track());
|
||||
if (nativeTrack) {
|
||||
return [[RTCMediaStreamTrack alloc] initWithNativeTrack:nativeTrack];
|
||||
}
|
||||
return nil;
|
||||
}
|
||||
|
||||
- (NSString *)description {
|
||||
return [NSString stringWithFormat:@"RTCRtpReceiver {\n receiverId: %@\n}",
|
||||
self.receiverId];
|
||||
}
|
||||
|
||||
- (BOOL)isEqual:(id)object {
|
||||
if (self == object) {
|
||||
return YES;
|
||||
}
|
||||
if (object == nil) {
|
||||
return NO;
|
||||
}
|
||||
if (![object isMemberOfClass:[self class]]) {
|
||||
return NO;
|
||||
}
|
||||
RTCRtpReceiver *receiver = (RTCRtpReceiver *)object;
|
||||
return _nativeRtpReceiver == receiver.nativeRtpReceiver;
|
||||
}
|
||||
|
||||
- (NSUInteger)hash {
|
||||
return (NSUInteger)_nativeRtpReceiver.get();
|
||||
}
|
||||
|
||||
#pragma mark - Private
|
||||
|
||||
- (rtc::scoped_refptr<webrtc::RtpReceiverInterface>)nativeRtpReceiver {
|
||||
return _nativeRtpReceiver;
|
||||
}
|
||||
|
||||
- (instancetype)initWithNativeRtpReceiver:
|
||||
(rtc::scoped_refptr<webrtc::RtpReceiverInterface>)nativeRtpReceiver {
|
||||
if (self = [super init]) {
|
||||
_nativeRtpReceiver = nativeRtpReceiver;
|
||||
RTCLogInfo(
|
||||
@"RTCRtpReceiver(%p): created receiver: %@", self, self.description);
|
||||
_observer.reset(new webrtc::RtpReceiverDelegateAdapter(self));
|
||||
_nativeRtpReceiver->SetObserver(_observer.get());
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
+ (RTCRtpMediaType)mediaTypeForNativeMediaType:
|
||||
(cricket::MediaType)nativeMediaType {
|
||||
switch (nativeMediaType) {
|
||||
case cricket::MEDIA_TYPE_AUDIO:
|
||||
return RTCRtpMediaTypeAudio;
|
||||
case cricket::MEDIA_TYPE_VIDEO:
|
||||
return RTCRtpMediaTypeVideo;
|
||||
case cricket::MEDIA_TYPE_DATA:
|
||||
return RTCRtpMediaTypeData;
|
||||
}
|
||||
}
|
||||
|
||||
@end
|
||||
@ -0,0 +1,29 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "WebRTC/RTCRtpSender.h"
|
||||
|
||||
#include "webrtc/api/rtpsenderinterface.h"
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
@interface RTCRtpSender ()
|
||||
|
||||
@property(nonatomic, readonly)
|
||||
rtc::scoped_refptr<webrtc::RtpSenderInterface> nativeRtpSender;
|
||||
|
||||
/** Initialize an RTCRtpSender with a native RtpSenderInterface. */
|
||||
- (instancetype)initWithNativeRtpSender:
|
||||
(rtc::scoped_refptr<webrtc::RtpSenderInterface>)nativeRtpSender
|
||||
NS_DESIGNATED_INITIALIZER;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
||||
94
sdk/objc/Framework/Classes/PeerConnection/RTCRtpSender.mm
Normal file
94
sdk/objc/Framework/Classes/PeerConnection/RTCRtpSender.mm
Normal file
@ -0,0 +1,94 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "RTCRtpSender+Private.h"
|
||||
|
||||
#import "NSString+StdString.h"
|
||||
#import "RTCMediaStreamTrack+Private.h"
|
||||
#import "RTCRtpParameters+Private.h"
|
||||
#import "WebRTC/RTCLogging.h"
|
||||
|
||||
#include "webrtc/api/mediastreaminterface.h"
|
||||
|
||||
@implementation RTCRtpSender {
|
||||
rtc::scoped_refptr<webrtc::RtpSenderInterface> _nativeRtpSender;
|
||||
}
|
||||
|
||||
- (NSString *)senderId {
|
||||
return [NSString stringForStdString:_nativeRtpSender->id()];
|
||||
}
|
||||
|
||||
- (RTCRtpParameters *)parameters {
|
||||
return [[RTCRtpParameters alloc]
|
||||
initWithNativeParameters:_nativeRtpSender->GetParameters()];
|
||||
}
|
||||
|
||||
- (void)setParameters:(RTCRtpParameters *)parameters {
|
||||
if (!_nativeRtpSender->SetParameters(parameters.nativeParameters)) {
|
||||
RTCLogError(@"RTCRtpSender(%p): Failed to set parameters: %@", self,
|
||||
parameters);
|
||||
}
|
||||
}
|
||||
|
||||
- (RTCMediaStreamTrack *)track {
|
||||
rtc::scoped_refptr<webrtc::MediaStreamTrackInterface> nativeTrack(
|
||||
_nativeRtpSender->track());
|
||||
if (nativeTrack) {
|
||||
return [[RTCMediaStreamTrack alloc] initWithNativeTrack:nativeTrack];
|
||||
}
|
||||
return nil;
|
||||
}
|
||||
|
||||
- (void)setTrack:(RTCMediaStreamTrack *)track {
|
||||
if (!_nativeRtpSender->SetTrack(track.nativeTrack)) {
|
||||
RTCLogError(@"RTCRtpSender(%p): Failed to set track %@", self, track);
|
||||
}
|
||||
}
|
||||
|
||||
- (NSString *)description {
|
||||
return [NSString stringWithFormat:@"RTCRtpSender {\n senderId: %@\n}",
|
||||
self.senderId];
|
||||
}
|
||||
|
||||
- (BOOL)isEqual:(id)object {
|
||||
if (self == object) {
|
||||
return YES;
|
||||
}
|
||||
if (object == nil) {
|
||||
return NO;
|
||||
}
|
||||
if (![object isMemberOfClass:[self class]]) {
|
||||
return NO;
|
||||
}
|
||||
RTCRtpSender *sender = (RTCRtpSender *)object;
|
||||
return _nativeRtpSender == sender.nativeRtpSender;
|
||||
}
|
||||
|
||||
- (NSUInteger)hash {
|
||||
return (NSUInteger)_nativeRtpSender.get();
|
||||
}
|
||||
|
||||
#pragma mark - Private
|
||||
|
||||
- (rtc::scoped_refptr<webrtc::RtpSenderInterface>)nativeRtpSender {
|
||||
return _nativeRtpSender;
|
||||
}
|
||||
|
||||
- (instancetype)initWithNativeRtpSender:
|
||||
(rtc::scoped_refptr<webrtc::RtpSenderInterface>)nativeRtpSender {
|
||||
NSParameterAssert(nativeRtpSender);
|
||||
if (self = [super init]) {
|
||||
_nativeRtpSender = nativeRtpSender;
|
||||
RTCLogInfo(@"RTCRtpSender(%p): created sender: %@", self, self.description);
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
@end
|
||||
26
sdk/objc/Framework/Classes/PeerConnection/RTCSSLAdapter.mm
Normal file
26
sdk/objc/Framework/Classes/PeerConnection/RTCSSLAdapter.mm
Normal file
@ -0,0 +1,26 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "WebRTC/RTCSSLAdapter.h"
|
||||
|
||||
#include "webrtc/rtc_base/checks.h"
|
||||
#include "webrtc/rtc_base/ssladapter.h"
|
||||
|
||||
BOOL RTCInitializeSSL() {
|
||||
BOOL initialized = rtc::InitializeSSL();
|
||||
RTC_DCHECK(initialized);
|
||||
return initialized;
|
||||
}
|
||||
|
||||
BOOL RTCCleanupSSL() {
|
||||
BOOL cleanedUp = rtc::CleanupSSL();
|
||||
RTC_DCHECK(cleanedUp);
|
||||
return cleanedUp;
|
||||
}
|
||||
@ -0,0 +1,41 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "WebRTC/RTCSessionDescription.h"
|
||||
|
||||
#include "webrtc/api/jsep.h"
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
@interface RTCSessionDescription ()
|
||||
|
||||
/**
|
||||
* The native SessionDescriptionInterface representation of this
|
||||
* RTCSessionDescription object. This is needed to pass to the underlying C++
|
||||
* APIs.
|
||||
*/
|
||||
@property(nonatomic, readonly)
|
||||
webrtc::SessionDescriptionInterface *nativeDescription;
|
||||
|
||||
/**
|
||||
* Initialize an RTCSessionDescription from a native
|
||||
* SessionDescriptionInterface. No ownership is taken of the native session
|
||||
* description.
|
||||
*/
|
||||
- (instancetype)initWithNativeDescription:
|
||||
(const webrtc::SessionDescriptionInterface *)nativeDescription;
|
||||
|
||||
+ (std::string)stdStringForType:(RTCSdpType)type;
|
||||
|
||||
+ (RTCSdpType)typeForStdString:(const std::string &)string;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
||||
@ -0,0 +1,102 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "RTCSessionDescription+Private.h"
|
||||
|
||||
#import "NSString+StdString.h"
|
||||
#import "WebRTC/RTCLogging.h"
|
||||
|
||||
#include "webrtc/rtc_base/checks.h"
|
||||
|
||||
@implementation RTCSessionDescription
|
||||
|
||||
@synthesize type = _type;
|
||||
@synthesize sdp = _sdp;
|
||||
|
||||
+ (NSString *)stringForType:(RTCSdpType)type {
|
||||
std::string string = [[self class] stdStringForType:type];
|
||||
return [NSString stringForStdString:string];
|
||||
}
|
||||
|
||||
+ (RTCSdpType)typeForString:(NSString *)string {
|
||||
std::string typeString = string.stdString;
|
||||
return [[self class] typeForStdString:typeString];
|
||||
}
|
||||
|
||||
- (instancetype)initWithType:(RTCSdpType)type sdp:(NSString *)sdp {
|
||||
NSParameterAssert(sdp.length);
|
||||
if (self = [super init]) {
|
||||
_type = type;
|
||||
_sdp = [sdp copy];
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (NSString *)description {
|
||||
return [NSString stringWithFormat:@"RTCSessionDescription:\n%@\n%@",
|
||||
[[self class] stringForType:_type],
|
||||
_sdp];
|
||||
}
|
||||
|
||||
#pragma mark - Private
|
||||
|
||||
- (webrtc::SessionDescriptionInterface *)nativeDescription {
|
||||
webrtc::SdpParseError error;
|
||||
|
||||
webrtc::SessionDescriptionInterface *description =
|
||||
webrtc::CreateSessionDescription([[self class] stdStringForType:_type],
|
||||
_sdp.stdString,
|
||||
&error);
|
||||
|
||||
if (!description) {
|
||||
RTCLogError(@"Failed to create session description: %s\nline: %s",
|
||||
error.description.c_str(),
|
||||
error.line.c_str());
|
||||
}
|
||||
|
||||
return description;
|
||||
}
|
||||
|
||||
- (instancetype)initWithNativeDescription:
|
||||
(const webrtc::SessionDescriptionInterface *)nativeDescription {
|
||||
NSParameterAssert(nativeDescription);
|
||||
std::string sdp;
|
||||
nativeDescription->ToString(&sdp);
|
||||
RTCSdpType type = [[self class] typeForStdString:nativeDescription->type()];
|
||||
|
||||
return [self initWithType:type
|
||||
sdp:[NSString stringForStdString:sdp]];
|
||||
}
|
||||
|
||||
+ (std::string)stdStringForType:(RTCSdpType)type {
|
||||
switch (type) {
|
||||
case RTCSdpTypeOffer:
|
||||
return webrtc::SessionDescriptionInterface::kOffer;
|
||||
case RTCSdpTypePrAnswer:
|
||||
return webrtc::SessionDescriptionInterface::kPrAnswer;
|
||||
case RTCSdpTypeAnswer:
|
||||
return webrtc::SessionDescriptionInterface::kAnswer;
|
||||
}
|
||||
}
|
||||
|
||||
+ (RTCSdpType)typeForStdString:(const std::string &)string {
|
||||
if (string == webrtc::SessionDescriptionInterface::kOffer) {
|
||||
return RTCSdpTypeOffer;
|
||||
} else if (string == webrtc::SessionDescriptionInterface::kPrAnswer) {
|
||||
return RTCSdpTypePrAnswer;
|
||||
} else if (string == webrtc::SessionDescriptionInterface::kAnswer) {
|
||||
return RTCSdpTypeAnswer;
|
||||
} else {
|
||||
RTC_NOTREACHED();
|
||||
return RTCSdpTypeOffer;
|
||||
}
|
||||
}
|
||||
|
||||
@end
|
||||
29
sdk/objc/Framework/Classes/PeerConnection/RTCTracing.mm
Normal file
29
sdk/objc/Framework/Classes/PeerConnection/RTCTracing.mm
Normal file
@ -0,0 +1,29 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "WebRTC/RTCTracing.h"
|
||||
|
||||
#include "webrtc/rtc_base/event_tracer.h"
|
||||
|
||||
void RTCSetupInternalTracer() {
|
||||
rtc::tracing::SetupInternalTracer();
|
||||
}
|
||||
|
||||
BOOL RTCStartInternalCapture(NSString *filePath) {
|
||||
return rtc::tracing::StartInternalCapture(filePath.UTF8String);
|
||||
}
|
||||
|
||||
void RTCStopInternalCapture() {
|
||||
rtc::tracing::StopInternalCapture();
|
||||
}
|
||||
|
||||
void RTCShutdownInternalTracer() {
|
||||
rtc::tracing::ShutdownInternalTracer();
|
||||
}
|
||||
25
sdk/objc/Framework/Classes/PeerConnection/RTCVideoCapturer.m
Normal file
25
sdk/objc/Framework/Classes/PeerConnection/RTCVideoCapturer.m
Normal file
@ -0,0 +1,25 @@
|
||||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "WebRTC/RTCVideoCapturer.h"
|
||||
|
||||
@implementation RTCVideoCapturer
|
||||
|
||||
@synthesize delegate = _delegate;
|
||||
|
||||
- (instancetype)initWithDelegate:(id<RTCVideoCapturerDelegate>)delegate {
|
||||
NSAssert(delegate != nil, @"delegate cannot be nil");
|
||||
if (self = [super init]) {
|
||||
_delegate = delegate;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
@end
|
||||
@ -0,0 +1,59 @@
|
||||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "WebRTC/RTCVideoCodec.h"
|
||||
|
||||
#import "WebRTC/RTCVideoCodecH264.h"
|
||||
|
||||
#include "webrtc/common_video/include/video_frame.h"
|
||||
#include "webrtc/media/base/codec.h"
|
||||
#include "webrtc/modules/video_coding/include/video_codec_interface.h"
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
/* Interfaces for converting to/from internal C++ formats. */
|
||||
@interface RTCEncodedImage ()
|
||||
|
||||
- (instancetype)initWithNativeEncodedImage:(webrtc::EncodedImage)encodedImage;
|
||||
- (webrtc::EncodedImage)nativeEncodedImage;
|
||||
|
||||
@end
|
||||
|
||||
@interface RTCVideoEncoderSettings ()
|
||||
|
||||
- (instancetype)initWithNativeVideoCodec:(const webrtc::VideoCodec *__nullable)videoCodec;
|
||||
|
||||
@end
|
||||
|
||||
@interface RTCCodecSpecificInfoH264 ()
|
||||
|
||||
- (webrtc::CodecSpecificInfo)nativeCodecSpecificInfo;
|
||||
|
||||
@end
|
||||
|
||||
@interface RTCRtpFragmentationHeader ()
|
||||
|
||||
- (instancetype)initWithNativeFragmentationHeader:
|
||||
(const webrtc::RTPFragmentationHeader *__nullable)fragmentationHeader;
|
||||
- (std::unique_ptr<webrtc::RTPFragmentationHeader>)createNativeFragmentationHeader;
|
||||
|
||||
@end
|
||||
|
||||
@interface RTCVideoCodecInfo ()
|
||||
|
||||
- (instancetype)initWithNativeVideoCodec:(cricket::VideoCodec)videoCodec;
|
||||
- (instancetype)initWithPayload:(NSInteger)payload
|
||||
name:(NSString *)name
|
||||
parameters:(NSDictionary<NSString *, NSString *> *)parameters;
|
||||
- (cricket::VideoCodec)nativeVideoCodec;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
||||
80
sdk/objc/Framework/Classes/PeerConnection/RTCVideoCodec.mm
Normal file
80
sdk/objc/Framework/Classes/PeerConnection/RTCVideoCodec.mm
Normal file
@ -0,0 +1,80 @@
|
||||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "WebRTC/RTCVideoCodec.h"
|
||||
|
||||
#import "NSString+StdString.h"
|
||||
#import "RTCVideoCodec+Private.h"
|
||||
#import "WebRTC/RTCVideoCodecFactory.h"
|
||||
|
||||
@implementation RTCVideoCodecInfo
|
||||
|
||||
@synthesize payload = _payload;
|
||||
@synthesize name = _name;
|
||||
@synthesize parameters = _parameters;
|
||||
|
||||
- (instancetype)initWithName:(NSString *)name
|
||||
parameters:(nullable NSDictionary<NSString *, NSString *> *)parameters {
|
||||
if (self = [super init]) {
|
||||
_payload = 0;
|
||||
_name = name;
|
||||
_parameters = (parameters ? parameters : @{});
|
||||
}
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
- (instancetype)initWithNativeVideoCodec:(cricket::VideoCodec)videoCodec {
|
||||
NSMutableDictionary *params = [NSMutableDictionary dictionary];
|
||||
for (auto it = videoCodec.params.begin(); it != videoCodec.params.end(); ++it) {
|
||||
[params setObject:[NSString stringForStdString:it->second]
|
||||
forKey:[NSString stringForStdString:it->first]];
|
||||
}
|
||||
return [self initWithPayload:videoCodec.id
|
||||
name:[NSString stringForStdString:videoCodec.name]
|
||||
parameters:params];
|
||||
}
|
||||
|
||||
- (instancetype)initWithPayload:(NSInteger)payload
|
||||
name:(NSString *)name
|
||||
parameters:(NSDictionary<NSString *, NSString *> *)parameters {
|
||||
if (self = [self initWithName:name parameters:parameters]) {
|
||||
_payload = payload;
|
||||
}
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
- (cricket::VideoCodec)nativeVideoCodec {
|
||||
cricket::VideoCodec codec([NSString stdStringForString:_name]);
|
||||
for (NSString *paramKey in _parameters.allKeys) {
|
||||
codec.SetParam([NSString stdStringForString:paramKey],
|
||||
[NSString stdStringForString:_parameters[paramKey]]);
|
||||
}
|
||||
|
||||
return codec;
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
@implementation RTCVideoEncoderQpThresholds
|
||||
|
||||
@synthesize low = _low;
|
||||
@synthesize high = _high;
|
||||
|
||||
- (instancetype)initWithThresholdsLow:(NSInteger)low high:(NSInteger)high {
|
||||
if (self = [super init]) {
|
||||
_low = low;
|
||||
_high = high;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
@end
|
||||
@ -0,0 +1,95 @@
|
||||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "WebRTC/RTCVideoCodecH264.h"
|
||||
|
||||
#include <vector>
|
||||
|
||||
#import "RTCVideoCodec+Private.h"
|
||||
#import "WebRTC/RTCVideoCodec.h"
|
||||
|
||||
#include "webrtc/rtc_base/timeutils.h"
|
||||
#include "webrtc/sdk/objc/Framework/Classes/Video/objc_frame_buffer.h"
|
||||
#include "webrtc/system_wrappers/include/field_trial.h"
|
||||
|
||||
const char kHighProfileExperiment[] = "WebRTC-H264HighProfile";
|
||||
static NSString *kLevel31ConstrainedHigh = @"640c1f";
|
||||
static NSString *kLevel31ConstrainedBaseline = @"42e01f";
|
||||
|
||||
bool IsHighProfileEnabled() {
|
||||
return webrtc::field_trial::IsEnabled(kHighProfileExperiment);
|
||||
}
|
||||
|
||||
// H264 specific settings.
|
||||
@implementation RTCCodecSpecificInfoH264
|
||||
|
||||
@synthesize packetizationMode = _packetizationMode;
|
||||
|
||||
- (webrtc::CodecSpecificInfo)nativeCodecSpecificInfo {
|
||||
webrtc::CodecSpecificInfo codecSpecificInfo;
|
||||
codecSpecificInfo.codecType = webrtc::kVideoCodecH264;
|
||||
codecSpecificInfo.codec_name = "H264";
|
||||
codecSpecificInfo.codecSpecific.H264.packetization_mode =
|
||||
(webrtc::H264PacketizationMode)_packetizationMode;
|
||||
|
||||
return codecSpecificInfo;
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
// Encoder factory.
|
||||
@implementation RTCVideoEncoderFactoryH264
|
||||
|
||||
- (NSArray<RTCVideoCodecInfo *> *)supportedCodecs {
|
||||
NSMutableArray<RTCVideoCodecInfo *> *codecs = [NSMutableArray array];
|
||||
NSString *codecName = [NSString stringWithUTF8String:cricket::kH264CodecName];
|
||||
|
||||
if (IsHighProfileEnabled()) {
|
||||
NSDictionary<NSString *, NSString *> *constrainedHighParams = @{
|
||||
@"profile-level-id" : kLevel31ConstrainedHigh,
|
||||
@"level-asymmetry-allowed" : @"1",
|
||||
@"packetization-mode" : @"1",
|
||||
};
|
||||
RTCVideoCodecInfo *constrainedHighInfo =
|
||||
[[RTCVideoCodecInfo alloc] initWithName:codecName parameters:constrainedHighParams];
|
||||
[codecs addObject:constrainedHighInfo];
|
||||
}
|
||||
|
||||
NSDictionary<NSString *, NSString *> *constrainedBaselineParams = @{
|
||||
@"profile-level-id" : kLevel31ConstrainedBaseline,
|
||||
@"level-asymmetry-allowed" : @"1",
|
||||
@"packetization-mode" : @"1",
|
||||
};
|
||||
RTCVideoCodecInfo *constrainedBaselineInfo =
|
||||
[[RTCVideoCodecInfo alloc] initWithName:codecName parameters:constrainedBaselineParams];
|
||||
[codecs addObject:constrainedBaselineInfo];
|
||||
|
||||
return [codecs copy];
|
||||
}
|
||||
|
||||
- (id<RTCVideoEncoder>)createEncoder:(RTCVideoCodecInfo *)info {
|
||||
return [[RTCVideoEncoderH264 alloc] initWithCodecInfo:info];
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
// Decoder factory.
|
||||
@implementation RTCVideoDecoderFactoryH264
|
||||
|
||||
- (id<RTCVideoDecoder>)createDecoder:(RTCVideoCodecInfo *)info {
|
||||
return [[RTCVideoDecoderH264 alloc] init];
|
||||
}
|
||||
|
||||
- (NSArray<RTCVideoCodecInfo *> *)supportedCodecs {
|
||||
NSString *codecName = [NSString stringWithUTF8String:cricket::kH264CodecName];
|
||||
return @[ [[RTCVideoCodecInfo alloc] initWithName:codecName parameters:nil] ];
|
||||
}
|
||||
|
||||
@end
|
||||
@ -0,0 +1,51 @@
|
||||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "WebRTC/RTCVideoCodec.h"
|
||||
|
||||
#import "NSString+StdString.h"
|
||||
#import "RTCVideoCodec+Private.h"
|
||||
#import "WebRTC/RTCVideoCodecFactory.h"
|
||||
|
||||
@implementation RTCVideoEncoderSettings
|
||||
|
||||
@synthesize name = _name;
|
||||
@synthesize width = _width;
|
||||
@synthesize height = _height;
|
||||
@synthesize startBitrate = _startBitrate;
|
||||
@synthesize maxBitrate = _maxBitrate;
|
||||
@synthesize minBitrate = _minBitrate;
|
||||
@synthesize targetBitrate = _targetBitrate;
|
||||
@synthesize maxFramerate = _maxFramerate;
|
||||
@synthesize qpMax = _qpMax;
|
||||
@synthesize mode = _mode;
|
||||
|
||||
- (instancetype)initWithNativeVideoCodec:(const webrtc::VideoCodec *)videoCodec {
|
||||
if (self = [super init]) {
|
||||
if (videoCodec) {
|
||||
const char *codecName = CodecTypeToPayloadString(videoCodec->codecType);
|
||||
_name = [NSString stringWithUTF8String:codecName];
|
||||
|
||||
_width = videoCodec->width;
|
||||
_height = videoCodec->height;
|
||||
_startBitrate = videoCodec->startBitrate;
|
||||
_maxBitrate = videoCodec->maxBitrate;
|
||||
_minBitrate = videoCodec->minBitrate;
|
||||
_targetBitrate = videoCodec->targetBitrate;
|
||||
_maxFramerate = videoCodec->maxFramerate;
|
||||
_qpMax = videoCodec->qpMax;
|
||||
_mode = (RTCVideoCodecMode)videoCodec->mode;
|
||||
}
|
||||
}
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
@end
|
||||
83
sdk/objc/Framework/Classes/PeerConnection/RTCVideoFrame.mm
Normal file
83
sdk/objc/Framework/Classes/PeerConnection/RTCVideoFrame.mm
Normal file
@ -0,0 +1,83 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "WebRTC/RTCVideoFrame.h"
|
||||
#import "WebRTC/RTCVideoFrameBuffer.h"
|
||||
|
||||
@implementation RTCVideoFrame {
|
||||
RTCVideoRotation _rotation;
|
||||
int64_t _timeStampNs;
|
||||
}
|
||||
|
||||
@synthesize buffer = _buffer;
|
||||
@synthesize timeStamp;
|
||||
|
||||
- (int)width {
|
||||
return _buffer.width;
|
||||
}
|
||||
|
||||
- (int)height {
|
||||
return _buffer.height;
|
||||
}
|
||||
|
||||
- (RTCVideoRotation)rotation {
|
||||
return _rotation;
|
||||
}
|
||||
|
||||
- (int64_t)timeStampNs {
|
||||
return _timeStampNs;
|
||||
}
|
||||
|
||||
- (RTCVideoFrame *)newI420VideoFrame {
|
||||
return [[RTCVideoFrame alloc] initWithBuffer:[_buffer toI420]
|
||||
rotation:_rotation
|
||||
timeStampNs:_timeStampNs];
|
||||
}
|
||||
|
||||
- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer
|
||||
rotation:(RTCVideoRotation)rotation
|
||||
timeStampNs:(int64_t)timeStampNs {
|
||||
return [self initWithBuffer:[[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer]
|
||||
rotation:rotation
|
||||
timeStampNs:timeStampNs];
|
||||
}
|
||||
|
||||
- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer
|
||||
scaledWidth:(int)scaledWidth
|
||||
scaledHeight:(int)scaledHeight
|
||||
cropWidth:(int)cropWidth
|
||||
cropHeight:(int)cropHeight
|
||||
cropX:(int)cropX
|
||||
cropY:(int)cropY
|
||||
rotation:(RTCVideoRotation)rotation
|
||||
timeStampNs:(int64_t)timeStampNs {
|
||||
RTCCVPixelBuffer *rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer
|
||||
adaptedWidth:scaledWidth
|
||||
adaptedHeight:scaledHeight
|
||||
cropWidth:cropWidth
|
||||
cropHeight:cropHeight
|
||||
cropX:cropX
|
||||
cropY:cropY];
|
||||
return [self initWithBuffer:rtcPixelBuffer rotation:rotation timeStampNs:timeStampNs];
|
||||
}
|
||||
|
||||
- (instancetype)initWithBuffer:(id<RTCVideoFrameBuffer>)buffer
|
||||
rotation:(RTCVideoRotation)rotation
|
||||
timeStampNs:(int64_t)timeStampNs {
|
||||
if (self = [super init]) {
|
||||
_buffer = buffer;
|
||||
_rotation = rotation;
|
||||
_timeStampNs = timeStampNs;
|
||||
}
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
@end
|
||||
@ -0,0 +1,42 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "RTCVideoRendererAdapter.h"
|
||||
|
||||
#import "WebRTC/RTCVideoRenderer.h"
|
||||
|
||||
#include "webrtc/api/mediastreaminterface.h"
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
@interface RTCVideoRendererAdapter ()
|
||||
|
||||
/**
|
||||
* The Objective-C video renderer passed to this adapter during construction.
|
||||
* Calls made to the webrtc::VideoRenderInterface will be adapted and passed to
|
||||
* this video renderer.
|
||||
*/
|
||||
@property(nonatomic, readonly) id<RTCVideoRenderer> videoRenderer;
|
||||
|
||||
/**
|
||||
* The native VideoSinkInterface surface exposed by this adapter. Calls made
|
||||
* to this interface will be adapted and passed to the RTCVideoRenderer supplied
|
||||
* during construction. This pointer is unsafe and owned by this class.
|
||||
*/
|
||||
@property(nonatomic, readonly)
|
||||
rtc::VideoSinkInterface<webrtc::VideoFrame> *nativeVideoRenderer;
|
||||
|
||||
/** Initialize an RTCVideoRendererAdapter with an RTCVideoRenderer. */
|
||||
- (instancetype)initWithNativeRenderer:(id<RTCVideoRenderer>)videoRenderer
|
||||
NS_DESIGNATED_INITIALIZER;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
||||
@ -0,0 +1,27 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
/*
|
||||
* Creates a rtc::VideoSinkInterface surface for an RTCVideoRenderer. The
|
||||
* rtc::VideoSinkInterface is used by WebRTC rendering code - this
|
||||
* adapter adapts calls made to that interface to the RTCVideoRenderer supplied
|
||||
* during construction.
|
||||
*/
|
||||
@interface RTCVideoRendererAdapter : NSObject
|
||||
|
||||
- (instancetype)init NS_UNAVAILABLE;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
||||
@ -0,0 +1,80 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "RTCI420Buffer+Private.h"
|
||||
#import "RTCVideoRendererAdapter+Private.h"
|
||||
#import "WebRTC/RTCVideoFrame.h"
|
||||
#import "WebRTC/RTCVideoFrameBuffer.h"
|
||||
#import "objc_frame_buffer.h"
|
||||
|
||||
#include <memory>
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
class VideoRendererAdapter
|
||||
: public rtc::VideoSinkInterface<webrtc::VideoFrame> {
|
||||
public:
|
||||
VideoRendererAdapter(RTCVideoRendererAdapter* adapter) {
|
||||
adapter_ = adapter;
|
||||
size_ = CGSizeZero;
|
||||
}
|
||||
|
||||
void OnFrame(const webrtc::VideoFrame& nativeVideoFrame) override {
|
||||
rtc::scoped_refptr<VideoFrameBuffer> video_frame_buffer = nativeVideoFrame.video_frame_buffer();
|
||||
id<RTCVideoFrameBuffer> rtc_frame_buffer;
|
||||
if (video_frame_buffer->type() == VideoFrameBuffer::Type::kNative) {
|
||||
rtc::scoped_refptr<ObjCFrameBuffer> objc_frame_buffer(
|
||||
static_cast<ObjCFrameBuffer*>(video_frame_buffer.get()));
|
||||
rtc_frame_buffer = (id<RTCVideoFrameBuffer>)objc_frame_buffer->wrapped_frame_buffer();
|
||||
} else {
|
||||
rtc_frame_buffer = [[RTCI420Buffer alloc] initWithFrameBuffer:video_frame_buffer->ToI420()];
|
||||
}
|
||||
RTCVideoFrame* videoFrame = [[RTCVideoFrame alloc]
|
||||
initWithBuffer:rtc_frame_buffer
|
||||
rotation:static_cast<RTCVideoRotation>(nativeVideoFrame.rotation())
|
||||
timeStampNs:nativeVideoFrame.timestamp_us() * rtc::kNumNanosecsPerMicrosec];
|
||||
|
||||
CGSize current_size = (videoFrame.rotation % 180 == 0)
|
||||
? CGSizeMake(videoFrame.width, videoFrame.height)
|
||||
: CGSizeMake(videoFrame.height, videoFrame.width);
|
||||
|
||||
if (!CGSizeEqualToSize(size_, current_size)) {
|
||||
size_ = current_size;
|
||||
[adapter_.videoRenderer setSize:size_];
|
||||
}
|
||||
[adapter_.videoRenderer renderFrame:videoFrame];
|
||||
}
|
||||
|
||||
private:
|
||||
__weak RTCVideoRendererAdapter *adapter_;
|
||||
CGSize size_;
|
||||
};
|
||||
}
|
||||
|
||||
@implementation RTCVideoRendererAdapter {
|
||||
std::unique_ptr<webrtc::VideoRendererAdapter> _adapter;
|
||||
}
|
||||
|
||||
@synthesize videoRenderer = _videoRenderer;
|
||||
|
||||
- (instancetype)initWithNativeRenderer:(id<RTCVideoRenderer>)videoRenderer {
|
||||
NSParameterAssert(videoRenderer);
|
||||
if (self = [super init]) {
|
||||
_videoRenderer = videoRenderer;
|
||||
_adapter.reset(new webrtc::VideoRendererAdapter(self));
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (rtc::VideoSinkInterface<webrtc::VideoFrame> *)nativeVideoRenderer {
|
||||
return _adapter.get();
|
||||
}
|
||||
|
||||
@end
|
||||
@ -0,0 +1,40 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "WebRTC/RTCVideoSource.h"
|
||||
|
||||
#import "RTCMediaSource+Private.h"
|
||||
|
||||
#include "webrtc/api/mediastreaminterface.h"
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
@interface RTCVideoSource ()
|
||||
|
||||
/**
|
||||
* The VideoTrackSourceInterface object passed to this RTCVideoSource during
|
||||
* construction.
|
||||
*/
|
||||
@property(nonatomic, readonly)
|
||||
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface>
|
||||
nativeVideoSource;
|
||||
|
||||
/** Initialize an RTCVideoSource from a native VideoTrackSourceInterface. */
|
||||
- (instancetype)initWithNativeVideoSource:
|
||||
(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface>)nativeVideoSource
|
||||
NS_DESIGNATED_INITIALIZER;
|
||||
|
||||
- (instancetype)initWithNativeMediaSource:
|
||||
(rtc::scoped_refptr<webrtc::MediaSourceInterface>)nativeMediaSource
|
||||
type:(RTCMediaSourceType)type NS_UNAVAILABLE;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
||||
67
sdk/objc/Framework/Classes/PeerConnection/RTCVideoSource.mm
Normal file
67
sdk/objc/Framework/Classes/PeerConnection/RTCVideoSource.mm
Normal file
@ -0,0 +1,67 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "RTCVideoSource+Private.h"
|
||||
|
||||
#include "webrtc/api/videosourceproxy.h"
|
||||
#include "webrtc/rtc_base/checks.h"
|
||||
#include "webrtc/sdk/objc/Framework/Classes/Video/objcvideotracksource.h"
|
||||
|
||||
static webrtc::ObjcVideoTrackSource *getObjcVideoSource(
|
||||
const rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> nativeSource) {
|
||||
webrtc::VideoTrackSourceProxy *proxy_source =
|
||||
static_cast<webrtc::VideoTrackSourceProxy *>(nativeSource.get());
|
||||
return static_cast<webrtc::ObjcVideoTrackSource *>(proxy_source->internal());
|
||||
}
|
||||
|
||||
// TODO(magjed): Refactor this class and target ObjcVideoTrackSource only once
|
||||
// RTCAVFoundationVideoSource is gone. See http://crbug/webrtc/7177 for more
|
||||
// info.
|
||||
@implementation RTCVideoSource {
|
||||
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> _nativeVideoSource;
|
||||
}
|
||||
|
||||
- (instancetype)initWithNativeVideoSource:
|
||||
(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface>)nativeVideoSource {
|
||||
RTC_DCHECK(nativeVideoSource);
|
||||
if (self = [super initWithNativeMediaSource:nativeVideoSource
|
||||
type:RTCMediaSourceTypeVideo]) {
|
||||
_nativeVideoSource = nativeVideoSource;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (instancetype)initWithNativeMediaSource:
|
||||
(rtc::scoped_refptr<webrtc::MediaSourceInterface>)nativeMediaSource
|
||||
type:(RTCMediaSourceType)type {
|
||||
RTC_NOTREACHED();
|
||||
return nil;
|
||||
}
|
||||
|
||||
- (NSString *)description {
|
||||
NSString *stateString = [[self class] stringForState:self.state];
|
||||
return [NSString stringWithFormat:@"RTCVideoSource( %p ): %@", self, stateString];
|
||||
}
|
||||
|
||||
- (void)capturer:(RTCVideoCapturer *)capturer didCaptureVideoFrame:(RTCVideoFrame *)frame {
|
||||
getObjcVideoSource(_nativeVideoSource)->OnCapturedFrame(frame);
|
||||
}
|
||||
|
||||
- (void)adaptOutputFormatToWidth:(int)width height:(int)height fps:(int)fps {
|
||||
getObjcVideoSource(_nativeVideoSource)->OnOutputFormatRequest(width, height, fps);
|
||||
}
|
||||
|
||||
#pragma mark - Private
|
||||
|
||||
- (rtc::scoped_refptr<webrtc::VideoTrackSourceInterface>)nativeVideoSource {
|
||||
return _nativeVideoSource;
|
||||
}
|
||||
|
||||
@end
|
||||
@ -0,0 +1,30 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "WebRTC/RTCVideoTrack.h"
|
||||
|
||||
#include "webrtc/api/mediastreaminterface.h"
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
@interface RTCVideoTrack ()
|
||||
|
||||
/** VideoTrackInterface created or passed in at construction. */
|
||||
@property(nonatomic, readonly)
|
||||
rtc::scoped_refptr<webrtc::VideoTrackInterface> nativeVideoTrack;
|
||||
|
||||
/** Initialize an RTCVideoTrack with its source and an id. */
|
||||
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
|
||||
source:(RTCVideoSource *)source
|
||||
trackId:(NSString *)trackId;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
||||
110
sdk/objc/Framework/Classes/PeerConnection/RTCVideoTrack.mm
Normal file
110
sdk/objc/Framework/Classes/PeerConnection/RTCVideoTrack.mm
Normal file
@ -0,0 +1,110 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "RTCVideoTrack+Private.h"
|
||||
|
||||
#import "NSString+StdString.h"
|
||||
#import "RTCMediaStreamTrack+Private.h"
|
||||
#import "RTCPeerConnectionFactory+Private.h"
|
||||
#import "RTCVideoRendererAdapter+Private.h"
|
||||
#import "RTCVideoSource+Private.h"
|
||||
|
||||
@implementation RTCVideoTrack {
|
||||
NSMutableArray *_adapters;
|
||||
}
|
||||
|
||||
@synthesize source = _source;
|
||||
|
||||
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
|
||||
source:(RTCVideoSource *)source
|
||||
trackId:(NSString *)trackId {
|
||||
NSParameterAssert(factory);
|
||||
NSParameterAssert(source);
|
||||
NSParameterAssert(trackId.length);
|
||||
std::string nativeId = [NSString stdStringForString:trackId];
|
||||
rtc::scoped_refptr<webrtc::VideoTrackInterface> track =
|
||||
factory.nativeFactory->CreateVideoTrack(nativeId,
|
||||
source.nativeVideoSource);
|
||||
if ([self initWithNativeTrack:track type:RTCMediaStreamTrackTypeVideo]) {
|
||||
_source = source;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (instancetype)initWithNativeTrack:
|
||||
(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeMediaTrack
|
||||
type:(RTCMediaStreamTrackType)type {
|
||||
NSParameterAssert(nativeMediaTrack);
|
||||
NSParameterAssert(type == RTCMediaStreamTrackTypeVideo);
|
||||
if (self = [super initWithNativeTrack:nativeMediaTrack type:type]) {
|
||||
_adapters = [NSMutableArray array];
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)dealloc {
|
||||
for (RTCVideoRendererAdapter *adapter in _adapters) {
|
||||
self.nativeVideoTrack->RemoveSink(adapter.nativeVideoRenderer);
|
||||
}
|
||||
}
|
||||
|
||||
- (RTCVideoSource *)source {
|
||||
if (!_source) {
|
||||
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source =
|
||||
self.nativeVideoTrack->GetSource();
|
||||
if (source) {
|
||||
_source = [[RTCVideoSource alloc] initWithNativeVideoSource:source.get()];
|
||||
}
|
||||
}
|
||||
return _source;
|
||||
}
|
||||
|
||||
- (void)addRenderer:(id<RTCVideoRenderer>)renderer {
|
||||
// Make sure we don't have this renderer yet.
|
||||
for (RTCVideoRendererAdapter *adapter in _adapters) {
|
||||
if (adapter.videoRenderer == renderer) {
|
||||
NSAssert(NO, @"|renderer| is already attached to this track");
|
||||
return;
|
||||
}
|
||||
}
|
||||
// Create a wrapper that provides a native pointer for us.
|
||||
RTCVideoRendererAdapter* adapter =
|
||||
[[RTCVideoRendererAdapter alloc] initWithNativeRenderer:renderer];
|
||||
[_adapters addObject:adapter];
|
||||
self.nativeVideoTrack->AddOrUpdateSink(adapter.nativeVideoRenderer,
|
||||
rtc::VideoSinkWants());
|
||||
}
|
||||
|
||||
- (void)removeRenderer:(id<RTCVideoRenderer>)renderer {
|
||||
__block NSUInteger indexToRemove = NSNotFound;
|
||||
[_adapters enumerateObjectsUsingBlock:^(RTCVideoRendererAdapter *adapter,
|
||||
NSUInteger idx,
|
||||
BOOL *stop) {
|
||||
if (adapter.videoRenderer == renderer) {
|
||||
indexToRemove = idx;
|
||||
*stop = YES;
|
||||
}
|
||||
}];
|
||||
if (indexToRemove == NSNotFound) {
|
||||
return;
|
||||
}
|
||||
RTCVideoRendererAdapter *adapterToRemove =
|
||||
[_adapters objectAtIndex:indexToRemove];
|
||||
self.nativeVideoTrack->RemoveSink(adapterToRemove.nativeVideoRenderer);
|
||||
[_adapters removeObjectAtIndex:indexToRemove];
|
||||
}
|
||||
|
||||
#pragma mark - Private
|
||||
|
||||
- (rtc::scoped_refptr<webrtc::VideoTrackInterface>)nativeVideoTrack {
|
||||
return static_cast<webrtc::VideoTrackInterface *>(self.nativeTrack.get());
|
||||
}
|
||||
|
||||
@end
|
||||
78
sdk/objc/Framework/Classes/UI/RTCCameraPreviewView.m
Normal file
78
sdk/objc/Framework/Classes/UI/RTCCameraPreviewView.m
Normal file
@ -0,0 +1,78 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "WebRTC/RTCCameraPreviewView.h"
|
||||
|
||||
#import <AVFoundation/AVFoundation.h>
|
||||
#import <UIKit/UIKit.h>
|
||||
|
||||
#import "RTCDispatcher+Private.h"
|
||||
|
||||
@implementation RTCCameraPreviewView
|
||||
|
||||
@synthesize captureSession = _captureSession;
|
||||
|
||||
+ (Class)layerClass {
|
||||
return [AVCaptureVideoPreviewLayer class];
|
||||
}
|
||||
|
||||
- (void)setCaptureSession:(AVCaptureSession *)captureSession {
|
||||
if (_captureSession == captureSession) {
|
||||
return;
|
||||
}
|
||||
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeMain
|
||||
block:^{
|
||||
_captureSession = captureSession;
|
||||
AVCaptureVideoPreviewLayer *previewLayer = [self previewLayer];
|
||||
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
||||
block:^{
|
||||
previewLayer.session = captureSession;
|
||||
}];
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)layoutSubviews {
|
||||
[super layoutSubviews];
|
||||
|
||||
// Update the video orientation based on the device orientation.
|
||||
[self setCorrectVideoOrientation];
|
||||
}
|
||||
|
||||
- (void)setCorrectVideoOrientation {
|
||||
// Get current device orientation.
|
||||
UIDeviceOrientation deviceOrientation = [UIDevice currentDevice].orientation;
|
||||
AVCaptureVideoPreviewLayer *previewLayer = [self previewLayer];
|
||||
|
||||
// First check if we are allowed to set the video orientation.
|
||||
if (previewLayer.connection.isVideoOrientationSupported) {
|
||||
// Set the video orientation based on device orientation.
|
||||
if (deviceOrientation == UIInterfaceOrientationPortraitUpsideDown) {
|
||||
previewLayer.connection.videoOrientation =
|
||||
AVCaptureVideoOrientationPortraitUpsideDown;
|
||||
} else if (deviceOrientation == UIInterfaceOrientationLandscapeRight) {
|
||||
previewLayer.connection.videoOrientation =
|
||||
AVCaptureVideoOrientationLandscapeRight;
|
||||
} else if (deviceOrientation == UIInterfaceOrientationLandscapeLeft) {
|
||||
previewLayer.connection.videoOrientation =
|
||||
AVCaptureVideoOrientationLandscapeLeft;
|
||||
} else {
|
||||
previewLayer.connection.videoOrientation =
|
||||
AVCaptureVideoOrientationPortrait;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#pragma mark - Private
|
||||
|
||||
- (AVCaptureVideoPreviewLayer *)previewLayer {
|
||||
return (AVCaptureVideoPreviewLayer *)self.layer;
|
||||
}
|
||||
|
||||
@end
|
||||
328
sdk/objc/Framework/Classes/UI/RTCEAGLVideoView.m
Normal file
328
sdk/objc/Framework/Classes/UI/RTCEAGLVideoView.m
Normal file
@ -0,0 +1,328 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "WebRTC/RTCEAGLVideoView.h"
|
||||
|
||||
#import <GLKit/GLKit.h>
|
||||
|
||||
#import "RTCDefaultShader.h"
|
||||
#import "RTCI420TextureCache.h"
|
||||
#import "RTCNV12TextureCache.h"
|
||||
#import "WebRTC/RTCLogging.h"
|
||||
#import "WebRTC/RTCVideoFrame.h"
|
||||
#import "WebRTC/RTCVideoFrameBuffer.h"
|
||||
|
||||
// RTCDisplayLinkTimer wraps a CADisplayLink and is set to fire every two screen
|
||||
// refreshes, which should be 30fps. We wrap the display link in order to avoid
|
||||
// a retain cycle since CADisplayLink takes a strong reference onto its target.
|
||||
// The timer is paused by default.
|
||||
@interface RTCDisplayLinkTimer : NSObject
|
||||
|
||||
@property(nonatomic) BOOL isPaused;
|
||||
|
||||
- (instancetype)initWithTimerHandler:(void (^)(void))timerHandler;
|
||||
- (void)invalidate;
|
||||
|
||||
@end
|
||||
|
||||
@implementation RTCDisplayLinkTimer {
|
||||
CADisplayLink *_displayLink;
|
||||
void (^_timerHandler)(void);
|
||||
}
|
||||
|
||||
- (instancetype)initWithTimerHandler:(void (^)(void))timerHandler {
|
||||
NSParameterAssert(timerHandler);
|
||||
if (self = [super init]) {
|
||||
_timerHandler = timerHandler;
|
||||
_displayLink =
|
||||
[CADisplayLink displayLinkWithTarget:self
|
||||
selector:@selector(displayLinkDidFire:)];
|
||||
_displayLink.paused = YES;
|
||||
// Set to half of screen refresh, which should be 30fps.
|
||||
[_displayLink setFrameInterval:2];
|
||||
[_displayLink addToRunLoop:[NSRunLoop currentRunLoop]
|
||||
forMode:NSRunLoopCommonModes];
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)dealloc {
|
||||
[self invalidate];
|
||||
}
|
||||
|
||||
- (BOOL)isPaused {
|
||||
return _displayLink.paused;
|
||||
}
|
||||
|
||||
- (void)setIsPaused:(BOOL)isPaused {
|
||||
_displayLink.paused = isPaused;
|
||||
}
|
||||
|
||||
- (void)invalidate {
|
||||
[_displayLink invalidate];
|
||||
}
|
||||
|
||||
- (void)displayLinkDidFire:(CADisplayLink *)displayLink {
|
||||
_timerHandler();
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
// RTCEAGLVideoView wraps a GLKView which is setup with
|
||||
// enableSetNeedsDisplay = NO for the purpose of gaining control of
|
||||
// exactly when to call -[GLKView display]. This need for extra
|
||||
// control is required to avoid triggering method calls on GLKView
|
||||
// that results in attempting to bind the underlying render buffer
|
||||
// when the drawable size would be empty which would result in the
|
||||
// error GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT. -[GLKView display] is
|
||||
// the method that will trigger the binding of the render
|
||||
// buffer. Because the standard behaviour of -[UIView setNeedsDisplay]
|
||||
// is disabled for the reasons above, the RTCEAGLVideoView maintains
|
||||
// its own |isDirty| flag.
|
||||
|
||||
@interface RTCEAGLVideoView () <GLKViewDelegate>
|
||||
// |videoFrame| is set when we receive a frame from a worker thread and is read
|
||||
// from the display link callback so atomicity is required.
|
||||
@property(atomic, strong) RTCVideoFrame *videoFrame;
|
||||
@property(nonatomic, readonly) GLKView *glkView;
|
||||
@end
|
||||
|
||||
@implementation RTCEAGLVideoView {
|
||||
RTCDisplayLinkTimer *_timer;
|
||||
EAGLContext *_glContext;
|
||||
// This flag should only be set and read on the main thread (e.g. by
|
||||
// setNeedsDisplay)
|
||||
BOOL _isDirty;
|
||||
id<RTCVideoViewShading> _shader;
|
||||
RTCNV12TextureCache *_nv12TextureCache;
|
||||
RTCI420TextureCache *_i420TextureCache;
|
||||
RTCVideoFrame *_lastDrawnFrame;
|
||||
}
|
||||
|
||||
@synthesize delegate = _delegate;
|
||||
@synthesize videoFrame = _videoFrame;
|
||||
@synthesize glkView = _glkView;
|
||||
|
||||
- (instancetype)initWithFrame:(CGRect)frame {
|
||||
return [self initWithFrame:frame shader:[[RTCDefaultShader alloc] init]];
|
||||
}
|
||||
|
||||
- (instancetype)initWithCoder:(NSCoder *)aDecoder {
|
||||
return [self initWithCoder:aDecoder shader:[[RTCDefaultShader alloc] init]];
|
||||
}
|
||||
|
||||
- (instancetype)initWithFrame:(CGRect)frame shader:(id<RTCVideoViewShading>)shader {
|
||||
if (self = [super initWithFrame:frame]) {
|
||||
_shader = shader;
|
||||
if (![self configure]) {
|
||||
return nil;
|
||||
}
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (instancetype)initWithCoder:(NSCoder *)aDecoder shader:(id<RTCVideoViewShading>)shader {
|
||||
if (self = [super initWithCoder:aDecoder]) {
|
||||
_shader = shader;
|
||||
if (![self configure]) {
|
||||
return nil;
|
||||
}
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (BOOL)configure {
|
||||
EAGLContext *glContext =
|
||||
[[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES3];
|
||||
if (!glContext) {
|
||||
glContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
|
||||
}
|
||||
if (!glContext) {
|
||||
RTCLogError(@"Failed to create EAGLContext");
|
||||
return NO;
|
||||
}
|
||||
_glContext = glContext;
|
||||
|
||||
// GLKView manages a framebuffer for us.
|
||||
_glkView = [[GLKView alloc] initWithFrame:CGRectZero
|
||||
context:_glContext];
|
||||
_glkView.drawableColorFormat = GLKViewDrawableColorFormatRGBA8888;
|
||||
_glkView.drawableDepthFormat = GLKViewDrawableDepthFormatNone;
|
||||
_glkView.drawableStencilFormat = GLKViewDrawableStencilFormatNone;
|
||||
_glkView.drawableMultisample = GLKViewDrawableMultisampleNone;
|
||||
_glkView.delegate = self;
|
||||
_glkView.layer.masksToBounds = YES;
|
||||
_glkView.enableSetNeedsDisplay = NO;
|
||||
[self addSubview:_glkView];
|
||||
|
||||
// Listen to application state in order to clean up OpenGL before app goes
|
||||
// away.
|
||||
NSNotificationCenter *notificationCenter =
|
||||
[NSNotificationCenter defaultCenter];
|
||||
[notificationCenter addObserver:self
|
||||
selector:@selector(willResignActive)
|
||||
name:UIApplicationWillResignActiveNotification
|
||||
object:nil];
|
||||
[notificationCenter addObserver:self
|
||||
selector:@selector(didBecomeActive)
|
||||
name:UIApplicationDidBecomeActiveNotification
|
||||
object:nil];
|
||||
|
||||
// Frames are received on a separate thread, so we poll for current frame
|
||||
// using a refresh rate proportional to screen refresh frequency. This
|
||||
// occurs on the main thread.
|
||||
__weak RTCEAGLVideoView *weakSelf = self;
|
||||
_timer = [[RTCDisplayLinkTimer alloc] initWithTimerHandler:^{
|
||||
RTCEAGLVideoView *strongSelf = weakSelf;
|
||||
[strongSelf displayLinkTimerDidFire];
|
||||
}];
|
||||
[self setupGL];
|
||||
return YES;
|
||||
}
|
||||
|
||||
- (void)dealloc {
|
||||
[[NSNotificationCenter defaultCenter] removeObserver:self];
|
||||
UIApplicationState appState =
|
||||
[UIApplication sharedApplication].applicationState;
|
||||
if (appState == UIApplicationStateActive) {
|
||||
[self teardownGL];
|
||||
}
|
||||
[_timer invalidate];
|
||||
if (_glContext && [EAGLContext currentContext] == _glContext) {
|
||||
[EAGLContext setCurrentContext:nil];
|
||||
}
|
||||
}
|
||||
|
||||
#pragma mark - UIView
|
||||
|
||||
- (void)setNeedsDisplay {
|
||||
[super setNeedsDisplay];
|
||||
_isDirty = YES;
|
||||
}
|
||||
|
||||
- (void)setNeedsDisplayInRect:(CGRect)rect {
|
||||
[super setNeedsDisplayInRect:rect];
|
||||
_isDirty = YES;
|
||||
}
|
||||
|
||||
- (void)layoutSubviews {
|
||||
[super layoutSubviews];
|
||||
_glkView.frame = self.bounds;
|
||||
}
|
||||
|
||||
#pragma mark - GLKViewDelegate
|
||||
|
||||
// This method is called when the GLKView's content is dirty and needs to be
|
||||
// redrawn. This occurs on main thread.
|
||||
- (void)glkView:(GLKView *)view drawInRect:(CGRect)rect {
|
||||
// The renderer will draw the frame to the framebuffer corresponding to the
|
||||
// one used by |view|.
|
||||
RTCVideoFrame *frame = self.videoFrame;
|
||||
if (!frame || frame == _lastDrawnFrame) {
|
||||
return;
|
||||
}
|
||||
[self ensureGLContext];
|
||||
glClear(GL_COLOR_BUFFER_BIT);
|
||||
if ([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]) {
|
||||
if (!_nv12TextureCache) {
|
||||
_nv12TextureCache = [[RTCNV12TextureCache alloc] initWithContext:_glContext];
|
||||
}
|
||||
if (_nv12TextureCache) {
|
||||
[_nv12TextureCache uploadFrameToTextures:frame];
|
||||
[_shader applyShadingForFrameWithWidth:frame.width
|
||||
height:frame.height
|
||||
rotation:frame.rotation
|
||||
yPlane:_nv12TextureCache.yTexture
|
||||
uvPlane:_nv12TextureCache.uvTexture];
|
||||
[_nv12TextureCache releaseTextures];
|
||||
}
|
||||
} else {
|
||||
if (!_i420TextureCache) {
|
||||
_i420TextureCache = [[RTCI420TextureCache alloc] initWithContext:_glContext];
|
||||
}
|
||||
[_i420TextureCache uploadFrameToTextures:frame];
|
||||
[_shader applyShadingForFrameWithWidth:frame.width
|
||||
height:frame.height
|
||||
rotation:frame.rotation
|
||||
yPlane:_i420TextureCache.yTexture
|
||||
uPlane:_i420TextureCache.uTexture
|
||||
vPlane:_i420TextureCache.vTexture];
|
||||
}
|
||||
}
|
||||
|
||||
#pragma mark - RTCVideoRenderer
|
||||
|
||||
// These methods may be called on non-main thread.
|
||||
- (void)setSize:(CGSize)size {
|
||||
__weak RTCEAGLVideoView *weakSelf = self;
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
RTCEAGLVideoView *strongSelf = weakSelf;
|
||||
[strongSelf.delegate videoView:strongSelf didChangeVideoSize:size];
|
||||
});
|
||||
}
|
||||
|
||||
- (void)renderFrame:(RTCVideoFrame *)frame {
|
||||
self.videoFrame = frame;
|
||||
}
|
||||
|
||||
#pragma mark - Private
|
||||
|
||||
- (void)displayLinkTimerDidFire {
|
||||
// Don't render unless video frame have changed or the view content
|
||||
// has explicitly been marked dirty.
|
||||
if (!_isDirty && _lastDrawnFrame == self.videoFrame) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Always reset isDirty at this point, even if -[GLKView display]
|
||||
// won't be called in the case the drawable size is empty.
|
||||
_isDirty = NO;
|
||||
|
||||
// Only call -[GLKView display] if the drawable size is
|
||||
// non-empty. Calling display will make the GLKView setup its
|
||||
// render buffer if necessary, but that will fail with error
|
||||
// GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT if size is empty.
|
||||
if (self.bounds.size.width > 0 && self.bounds.size.height > 0) {
|
||||
[_glkView display];
|
||||
}
|
||||
}
|
||||
|
||||
- (void)setupGL {
|
||||
self.videoFrame = nil;
|
||||
[self ensureGLContext];
|
||||
glDisable(GL_DITHER);
|
||||
_timer.isPaused = NO;
|
||||
}
|
||||
|
||||
- (void)teardownGL {
|
||||
self.videoFrame = nil;
|
||||
_timer.isPaused = YES;
|
||||
[_glkView deleteDrawable];
|
||||
[self ensureGLContext];
|
||||
_nv12TextureCache = nil;
|
||||
_i420TextureCache = nil;
|
||||
}
|
||||
|
||||
- (void)didBecomeActive {
|
||||
[self setupGL];
|
||||
}
|
||||
|
||||
- (void)willResignActive {
|
||||
[self teardownGL];
|
||||
}
|
||||
|
||||
- (void)ensureGLContext {
|
||||
NSAssert(_glContext, @"context shouldn't be nil");
|
||||
if ([EAGLContext currentContext] != _glContext) {
|
||||
[EAGLContext setCurrentContext:_glContext];
|
||||
}
|
||||
}
|
||||
|
||||
@end
|
||||
196
sdk/objc/Framework/Classes/UI/RTCNSGLVideoView.m
Normal file
196
sdk/objc/Framework/Classes/UI/RTCNSGLVideoView.m
Normal file
@ -0,0 +1,196 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
#if !TARGET_OS_IPHONE
|
||||
|
||||
#import "WebRTC/RTCNSGLVideoView.h"
|
||||
|
||||
#import <AppKit/NSOpenGL.h>
|
||||
#import <CoreVideo/CVDisplayLink.h>
|
||||
#import <OpenGL/gl3.h>
|
||||
|
||||
#import "RTCDefaultShader.h"
|
||||
#import "RTCI420TextureCache.h"
|
||||
#import "WebRTC/RTCLogging.h"
|
||||
#import "WebRTC/RTCVideoFrame.h"
|
||||
|
||||
@interface RTCNSGLVideoView ()
|
||||
// |videoFrame| is set when we receive a frame from a worker thread and is read
|
||||
// from the display link callback so atomicity is required.
|
||||
@property(atomic, strong) RTCVideoFrame *videoFrame;
|
||||
@property(atomic, strong) RTCI420TextureCache *i420TextureCache;
|
||||
|
||||
- (void)drawFrame;
|
||||
@end
|
||||
|
||||
static CVReturn OnDisplayLinkFired(CVDisplayLinkRef displayLink,
|
||||
const CVTimeStamp *now,
|
||||
const CVTimeStamp *outputTime,
|
||||
CVOptionFlags flagsIn,
|
||||
CVOptionFlags *flagsOut,
|
||||
void *displayLinkContext) {
|
||||
RTCNSGLVideoView *view = (__bridge RTCNSGLVideoView *)displayLinkContext;
|
||||
[view drawFrame];
|
||||
return kCVReturnSuccess;
|
||||
}
|
||||
|
||||
@implementation RTCNSGLVideoView {
|
||||
CVDisplayLinkRef _displayLink;
|
||||
RTCVideoFrame *_lastDrawnFrame;
|
||||
id<RTCVideoViewShading> _shader;
|
||||
}
|
||||
|
||||
@synthesize delegate = _delegate;
|
||||
@synthesize videoFrame = _videoFrame;
|
||||
@synthesize i420TextureCache = _i420TextureCache;
|
||||
|
||||
- (instancetype)initWithFrame:(NSRect)frame pixelFormat:(NSOpenGLPixelFormat *)format {
|
||||
return [self initWithFrame:frame pixelFormat:format shader:[[RTCDefaultShader alloc] init]];
|
||||
}
|
||||
|
||||
- (instancetype)initWithFrame:(NSRect)frame
|
||||
pixelFormat:(NSOpenGLPixelFormat *)format
|
||||
shader:(id<RTCVideoViewShading>)shader {
|
||||
if (self = [super initWithFrame:frame pixelFormat:format]) {
|
||||
_shader = shader;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)dealloc {
|
||||
[self teardownDisplayLink];
|
||||
}
|
||||
|
||||
- (void)drawRect:(NSRect)rect {
|
||||
[self drawFrame];
|
||||
}
|
||||
|
||||
- (void)reshape {
|
||||
[super reshape];
|
||||
NSRect frame = [self frame];
|
||||
[self ensureGLContext];
|
||||
CGLLockContext([[self openGLContext] CGLContextObj]);
|
||||
glViewport(0, 0, frame.size.width, frame.size.height);
|
||||
CGLUnlockContext([[self openGLContext] CGLContextObj]);
|
||||
}
|
||||
|
||||
- (void)lockFocus {
|
||||
NSOpenGLContext *context = [self openGLContext];
|
||||
[super lockFocus];
|
||||
if ([context view] != self) {
|
||||
[context setView:self];
|
||||
}
|
||||
[context makeCurrentContext];
|
||||
}
|
||||
|
||||
- (void)prepareOpenGL {
|
||||
[super prepareOpenGL];
|
||||
[self ensureGLContext];
|
||||
glDisable(GL_DITHER);
|
||||
[self setupDisplayLink];
|
||||
}
|
||||
|
||||
- (void)clearGLContext {
|
||||
[self ensureGLContext];
|
||||
self.i420TextureCache = nil;
|
||||
[super clearGLContext];
|
||||
}
|
||||
|
||||
#pragma mark - RTCVideoRenderer
|
||||
|
||||
// These methods may be called on non-main thread.
|
||||
- (void)setSize:(CGSize)size {
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
[self.delegate videoView:self didChangeVideoSize:size];
|
||||
});
|
||||
}
|
||||
|
||||
- (void)renderFrame:(RTCVideoFrame *)frame {
|
||||
self.videoFrame = frame;
|
||||
}
|
||||
|
||||
#pragma mark - Private
|
||||
|
||||
- (void)drawFrame {
|
||||
RTCVideoFrame *frame = self.videoFrame;
|
||||
if (!frame || frame == _lastDrawnFrame) {
|
||||
return;
|
||||
}
|
||||
// This method may be called from CVDisplayLink callback which isn't on the
|
||||
// main thread so we have to lock the GL context before drawing.
|
||||
NSOpenGLContext *context = [self openGLContext];
|
||||
CGLLockContext([context CGLContextObj]);
|
||||
|
||||
[self ensureGLContext];
|
||||
glClear(GL_COLOR_BUFFER_BIT);
|
||||
|
||||
// Rendering native CVPixelBuffer is not supported on OS X.
|
||||
// TODO(magjed): Add support for NV12 texture cache on OS X.
|
||||
frame = [frame newI420VideoFrame];
|
||||
if (!self.i420TextureCache) {
|
||||
self.i420TextureCache = [[RTCI420TextureCache alloc] initWithContext:context];
|
||||
}
|
||||
RTCI420TextureCache *i420TextureCache = self.i420TextureCache;
|
||||
if (i420TextureCache) {
|
||||
[i420TextureCache uploadFrameToTextures:frame];
|
||||
[_shader applyShadingForFrameWithWidth:frame.width
|
||||
height:frame.height
|
||||
rotation:frame.rotation
|
||||
yPlane:i420TextureCache.yTexture
|
||||
uPlane:i420TextureCache.uTexture
|
||||
vPlane:i420TextureCache.vTexture];
|
||||
[context flushBuffer];
|
||||
_lastDrawnFrame = frame;
|
||||
}
|
||||
CGLUnlockContext([context CGLContextObj]);
|
||||
}
|
||||
|
||||
- (void)setupDisplayLink {
|
||||
if (_displayLink) {
|
||||
return;
|
||||
}
|
||||
// Synchronize buffer swaps with vertical refresh rate.
|
||||
GLint swapInt = 1;
|
||||
[[self openGLContext] setValues:&swapInt forParameter:NSOpenGLCPSwapInterval];
|
||||
|
||||
// Create display link.
|
||||
CVDisplayLinkCreateWithActiveCGDisplays(&_displayLink);
|
||||
CVDisplayLinkSetOutputCallback(_displayLink,
|
||||
&OnDisplayLinkFired,
|
||||
(__bridge void *)self);
|
||||
// Set the display link for the current renderer.
|
||||
CGLContextObj cglContext = [[self openGLContext] CGLContextObj];
|
||||
CGLPixelFormatObj cglPixelFormat = [[self pixelFormat] CGLPixelFormatObj];
|
||||
CVDisplayLinkSetCurrentCGDisplayFromOpenGLContext(
|
||||
_displayLink, cglContext, cglPixelFormat);
|
||||
CVDisplayLinkStart(_displayLink);
|
||||
}
|
||||
|
||||
- (void)teardownDisplayLink {
|
||||
if (!_displayLink) {
|
||||
return;
|
||||
}
|
||||
CVDisplayLinkRelease(_displayLink);
|
||||
_displayLink = NULL;
|
||||
}
|
||||
|
||||
- (void)ensureGLContext {
|
||||
NSOpenGLContext* context = [self openGLContext];
|
||||
NSAssert(context, @"context shouldn't be nil");
|
||||
if ([NSOpenGLContext currentContext] != context) {
|
||||
[context makeCurrentContext];
|
||||
}
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
#endif // !TARGET_OS_IPHONE
|
||||
@ -0,0 +1,23 @@
|
||||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import <AVFoundation/AVFoundation.h>
|
||||
#import <CoreMedia/CoreMedia.h>
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
@interface AVCaptureSession (DevicePosition)
|
||||
|
||||
// Check the image's EXIF for the camera the image came from.
|
||||
+ (AVCaptureDevicePosition)devicePositionForSampleBuffer:(CMSampleBufferRef)sampleBuffer;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user