Wrap WebRTC OBJC API types with RTC_OBJC_TYPE.

This CL introduced 2 new macros that affect the WebRTC OBJC API symbols:

- RTC_OBJC_TYPE_PREFIX:
  Macro used to prepend a prefix to the API types that are exported with
  RTC_OBJC_EXPORT.

  Clients can patch the definition of this macro locally and build
  WebRTC.framework with their own prefix in case symbol clashing is a
  problem.

  This macro must only be defined by changing the value in
  sdk/objc/base/RTCMacros.h  and not on via compiler flag to ensure
  it has a unique value.

- RCT_OBJC_TYPE:
  Macro used internally to reference API types. Declaring an API type
  without using this macro will not include the declared type in the
  set of types that will be affected by the configurable
  RTC_OBJC_TYPE_PREFIX.

Manual changes:
https://webrtc-review.googlesource.com/c/src/+/173781/5..10

The auto-generated changes in PS#5 have been done with:
https://webrtc-review.googlesource.com/c/src/+/174061.

Bug: None
Change-Id: I0d54ca94db764fb3b6cb4365873f79e14cd879b8
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/173781
Commit-Queue: Mirko Bonadei <mbonadei@webrtc.org>
Reviewed-by: Karl Wiberg <kwiberg@webrtc.org>
Reviewed-by: Kári Helgason <kthelgason@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#31153}
This commit is contained in:
Mirko Bonadei
2020-05-04 16:14:32 +02:00
committed by Commit Bot
parent ce1320cc4d
commit a81e9c82fc
303 changed files with 2534 additions and 2189 deletions

View File

@ -13,17 +13,18 @@
#import "base/RTCLogging.h"
@implementation RTCAudioSession (Configuration)
@implementation RTC_OBJC_TYPE (RTCAudioSession)
(Configuration)
- (BOOL)setConfiguration:(RTCAudioSessionConfiguration *)configuration
error:(NSError **)outError {
- (BOOL)setConfiguration : (RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration error
: (NSError **)outError {
return [self setConfiguration:configuration
active:NO
shouldSetActive:NO
error:outError];
}
- (BOOL)setConfiguration:(RTCAudioSessionConfiguration *)configuration
- (BOOL)setConfiguration:(RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration
active:(BOOL)active
error:(NSError **)outError {
return [self setConfiguration:configuration
@ -34,7 +35,7 @@
#pragma mark - Private
- (BOOL)setConfiguration:(RTCAudioSessionConfiguration *)configuration
- (BOOL)setConfiguration:(RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration
active:(BOOL)active
shouldSetActive:(BOOL)shouldSetActive
error:(NSError **)outError {

View File

@ -12,14 +12,15 @@
NS_ASSUME_NONNULL_BEGIN
@class RTCAudioSessionConfiguration;
@class RTC_OBJC_TYPE(RTCAudioSessionConfiguration);
@interface RTCAudioSession ()
@interface RTC_OBJC_TYPE (RTCAudioSession)
()
/** Number of times setActive:YES has succeeded without a balanced call to
* setActive:NO.
*/
@property(nonatomic, readonly) int activationCount;
/** Number of times setActive:YES has succeeded without a balanced call to
* setActive:NO.
*/
@property(nonatomic, readonly) int activationCount;
/** The number of times |beginWebRTCSession| was called without a balanced call
* to |endWebRTCSession|.
@ -40,7 +41,7 @@ NS_ASSUME_NONNULL_BEGIN
* the list. This delegate will be notified before other delegates of
* audio events.
*/
- (void)pushDelegate:(id<RTCAudioSessionDelegate>)delegate;
- (void)pushDelegate:(id<RTC_OBJC_TYPE(RTCAudioSessionDelegate)>)delegate;
/** Signals RTCAudioSession that a WebRTC session is about to begin and
* audio configuration is needed. Will configure the audio session for WebRTC

View File

@ -21,78 +21,81 @@ extern NSInteger const kRTCAudioSessionErrorLockRequired;
/** Unknown configuration error occurred. */
extern NSInteger const kRTCAudioSessionErrorConfiguration;
@class RTCAudioSession;
@class RTCAudioSessionConfiguration;
@class RTC_OBJC_TYPE(RTCAudioSession);
@class RTC_OBJC_TYPE(RTCAudioSessionConfiguration);
// Surfaces AVAudioSession events. WebRTC will listen directly for notifications
// from AVAudioSession and handle them before calling these delegate methods,
// at which point applications can perform additional processing if required.
RTC_OBJC_EXPORT
@protocol RTCAudioSessionDelegate <NSObject>
@protocol RTC_OBJC_TYPE
(RTCAudioSessionDelegate)<NSObject>
@optional
@optional
/** Called on a system notification thread when AVAudioSession starts an
* interruption event.
*/
- (void)audioSessionDidBeginInterruption:(RTCAudioSession *)session;
- (void)audioSessionDidBeginInterruption:(RTC_OBJC_TYPE(RTCAudioSession) *)session;
/** Called on a system notification thread when AVAudioSession ends an
* interruption event.
*/
- (void)audioSessionDidEndInterruption:(RTCAudioSession *)session
- (void)audioSessionDidEndInterruption:(RTC_OBJC_TYPE(RTCAudioSession) *)session
shouldResumeSession:(BOOL)shouldResumeSession;
/** Called on a system notification thread when AVAudioSession changes the
* route.
*/
- (void)audioSessionDidChangeRoute:(RTCAudioSession *)session
- (void)audioSessionDidChangeRoute:(RTC_OBJC_TYPE(RTCAudioSession) *)session
reason:(AVAudioSessionRouteChangeReason)reason
previousRoute:(AVAudioSessionRouteDescription *)previousRoute;
/** Called on a system notification thread when AVAudioSession media server
* terminates.
*/
- (void)audioSessionMediaServerTerminated:(RTCAudioSession *)session;
- (void)audioSessionMediaServerTerminated:(RTC_OBJC_TYPE(RTCAudioSession) *)session;
/** Called on a system notification thread when AVAudioSession media server
* restarts.
*/
- (void)audioSessionMediaServerReset:(RTCAudioSession *)session;
- (void)audioSessionMediaServerReset:(RTC_OBJC_TYPE(RTCAudioSession) *)session;
// TODO(tkchin): Maybe handle SilenceSecondaryAudioHintNotification.
- (void)audioSession:(RTCAudioSession *)session didChangeCanPlayOrRecord:(BOOL)canPlayOrRecord;
- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)session
didChangeCanPlayOrRecord:(BOOL)canPlayOrRecord;
/** Called on a WebRTC thread when the audio device is notified to begin
* playback or recording.
*/
- (void)audioSessionDidStartPlayOrRecord:(RTCAudioSession *)session;
- (void)audioSessionDidStartPlayOrRecord:(RTC_OBJC_TYPE(RTCAudioSession) *)session;
/** Called on a WebRTC thread when the audio device is notified to stop
* playback or recording.
*/
- (void)audioSessionDidStopPlayOrRecord:(RTCAudioSession *)session;
- (void)audioSessionDidStopPlayOrRecord:(RTC_OBJC_TYPE(RTCAudioSession) *)session;
/** Called when the AVAudioSession output volume value changes. */
- (void)audioSession:(RTCAudioSession *)audioSession didChangeOutputVolume:(float)outputVolume;
- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession
didChangeOutputVolume:(float)outputVolume;
/** Called when the audio device detects a playout glitch. The argument is the
* number of glitches detected so far in the current audio playout session.
*/
- (void)audioSession:(RTCAudioSession *)audioSession
- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession
didDetectPlayoutGlitch:(int64_t)totalNumberOfGlitches;
/** Called when the audio session is about to change the active state.
*/
- (void)audioSession:(RTCAudioSession *)audioSession willSetActive:(BOOL)active;
- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession willSetActive:(BOOL)active;
/** Called after the audio session sucessfully changed the active state.
*/
- (void)audioSession:(RTCAudioSession *)audioSession didSetActive:(BOOL)active;
- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession didSetActive:(BOOL)active;
/** Called after the audio session failed to change the active state.
*/
- (void)audioSession:(RTCAudioSession *)audioSession
- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession
failedToSetActive:(BOOL)active
error:(NSError *)error;
@ -103,10 +106,11 @@ RTC_OBJC_EXPORT
* case of this is when CallKit activates the audio session for the application
*/
RTC_OBJC_EXPORT
@protocol RTCAudioSessionActivationDelegate <NSObject>
@protocol RTC_OBJC_TYPE
(RTCAudioSessionActivationDelegate)<NSObject>
/** Called when the audio session is activated outside of the app by iOS. */
- (void)audioSessionDidActivate:(AVAudioSession *)session;
/** Called when the audio session is activated outside of the app by iOS. */
- (void)audioSessionDidActivate : (AVAudioSession *)session;
/** Called when the audio session is deactivated outside of the app by iOS. */
- (void)audioSessionDidDeactivate:(AVAudioSession *)session;
@ -121,7 +125,7 @@ RTC_OBJC_EXPORT
* activated only once. See |setActive:error:|.
*/
RTC_OBJC_EXPORT
@interface RTCAudioSession : NSObject <RTCAudioSessionActivationDelegate>
@interface RTC_OBJC_TYPE (RTCAudioSession) : NSObject <RTC_OBJC_TYPE(RTCAudioSessionActivationDelegate)>
/** Convenience property to access the AVAudioSession singleton. Callers should
* not call setters on AVAudioSession directly, but other method invocations
@ -196,9 +200,9 @@ RTC_OBJC_EXPORT
- (instancetype)init NS_UNAVAILABLE;
/** Adds a delegate, which is held weakly. */
- (void)addDelegate:(id<RTCAudioSessionDelegate>)delegate;
- (void)addDelegate:(id<RTC_OBJC_TYPE(RTCAudioSessionDelegate)>)delegate;
/** Removes an added delegate. */
- (void)removeDelegate:(id<RTCAudioSessionDelegate>)delegate;
- (void)removeDelegate:(id<RTC_OBJC_TYPE(RTCAudioSessionDelegate)>)delegate;
/** Request exclusive access to the audio session for configuration. This call
* will block if the lock is held by another object.
@ -237,19 +241,21 @@ RTC_OBJC_EXPORT
error:(NSError **)outError;
@end
@interface RTCAudioSession (Configuration)
@interface RTC_OBJC_TYPE (RTCAudioSession)
(Configuration)
/** Applies the configuration to the current session. Attempts to set all
* properties even if previous ones fail. Only the last error will be
* returned.
* |lockForConfiguration| must be called first.
*/
- (BOOL)setConfiguration:(RTCAudioSessionConfiguration *)configuration error:(NSError **)outError;
/** Applies the configuration to the current session. Attempts to set all
* properties even if previous ones fail. Only the last error will be
* returned.
* |lockForConfiguration| must be called first.
*/
- (BOOL)setConfiguration : (RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration error
: (NSError **)outError;
/** Convenience method that calls both setConfiguration and setActive.
* |lockForConfiguration| must be called first.
*/
- (BOOL)setConfiguration:(RTCAudioSessionConfiguration *)configuration
- (BOOL)setConfiguration:(RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration
active:(BOOL)active
error:(NSError **)outError;

View File

@ -21,20 +21,20 @@
#import "RTCAudioSessionConfiguration.h"
#import "base/RTCLogging.h"
NSString * const kRTCAudioSessionErrorDomain = @"org.webrtc.RTCAudioSession";
NSString *const kRTCAudioSessionErrorDomain = @"org.webrtc.RTC_OBJC_TYPE(RTCAudioSession)";
NSInteger const kRTCAudioSessionErrorLockRequired = -1;
NSInteger const kRTCAudioSessionErrorConfiguration = -2;
NSString * const kRTCAudioSessionOutputVolumeSelector = @"outputVolume";
@interface RTCAudioSession ()
@property(nonatomic, readonly) std::vector<__weak id<RTCAudioSessionDelegate> > delegates;
@interface RTC_OBJC_TYPE (RTCAudioSession)
() @property(nonatomic,
readonly) std::vector<__weak id<RTC_OBJC_TYPE(RTCAudioSessionDelegate)> > delegates;
@end
// This class needs to be thread-safe because it is accessed from many threads.
// TODO(tkchin): Consider more granular locking. We're not expecting a lot of
// lock contention so coarse locks should be fine for now.
@implementation RTCAudioSession {
@implementation RTC_OBJC_TYPE (RTCAudioSession) {
rtc::CriticalSection _crit;
AVAudioSession *_session;
volatile int _activationCount;
@ -54,7 +54,7 @@ NSString * const kRTCAudioSessionOutputVolumeSelector = @"outputVolume";
+ (instancetype)sharedInstance {
static dispatch_once_t onceToken;
static RTCAudioSession *sharedInstance = nil;
static RTC_OBJC_TYPE(RTCAudioSession) *sharedInstance = nil;
dispatch_once(&onceToken, ^{
sharedInstance = [[self alloc] init];
});
@ -102,9 +102,9 @@ NSString * const kRTCAudioSessionOutputVolumeSelector = @"outputVolume";
[_session addObserver:self
forKeyPath:kRTCAudioSessionOutputVolumeSelector
options:NSKeyValueObservingOptionNew | NSKeyValueObservingOptionOld
context:(__bridge void*)RTCAudioSession.class];
context:(__bridge void *)RTC_OBJC_TYPE(RTCAudioSession).class];
RTCLog(@"RTCAudioSession (%p): init.", self);
RTCLog(@"RTC_OBJC_TYPE(RTCAudioSession) (%p): init.", self);
}
return self;
}
@ -113,25 +113,24 @@ NSString * const kRTCAudioSessionOutputVolumeSelector = @"outputVolume";
[[NSNotificationCenter defaultCenter] removeObserver:self];
[_session removeObserver:self
forKeyPath:kRTCAudioSessionOutputVolumeSelector
context:(__bridge void*)RTCAudioSession.class];
RTCLog(@"RTCAudioSession (%p): dealloc.", self);
context:(__bridge void *)RTC_OBJC_TYPE(RTCAudioSession).class];
RTCLog(@"RTC_OBJC_TYPE(RTCAudioSession) (%p): dealloc.", self);
}
- (NSString *)description {
NSString *format =
@"RTCAudioSession: {\n"
" category: %@\n"
" categoryOptions: %ld\n"
" mode: %@\n"
" isActive: %d\n"
" sampleRate: %.2f\n"
" IOBufferDuration: %f\n"
" outputNumberOfChannels: %ld\n"
" inputNumberOfChannels: %ld\n"
" outputLatency: %f\n"
" inputLatency: %f\n"
" outputVolume: %f\n"
"}";
NSString *format = @"RTC_OBJC_TYPE(RTCAudioSession): {\n"
" category: %@\n"
" categoryOptions: %ld\n"
" mode: %@\n"
" isActive: %d\n"
" sampleRate: %.2f\n"
" IOBufferDuration: %f\n"
" outputNumberOfChannels: %ld\n"
" inputNumberOfChannels: %ld\n"
" outputLatency: %f\n"
" inputLatency: %f\n"
" outputVolume: %f\n"
"}";
NSString *description = [NSString stringWithFormat:format,
self.category, (long)self.categoryOptions, self.mode,
self.isActive, self.sampleRate, self.IOBufferDuration,
@ -206,7 +205,7 @@ NSString * const kRTCAudioSessionOutputVolumeSelector = @"outputVolume";
}
// TODO(tkchin): Check for duplicates.
- (void)addDelegate:(id<RTCAudioSessionDelegate>)delegate {
- (void)addDelegate:(id<RTC_OBJC_TYPE(RTCAudioSessionDelegate)>)delegate {
RTCLog(@"Adding delegate: (%p)", delegate);
if (!delegate) {
return;
@ -217,7 +216,7 @@ NSString * const kRTCAudioSessionOutputVolumeSelector = @"outputVolume";
}
}
- (void)removeDelegate:(id<RTCAudioSessionDelegate>)delegate {
- (void)removeDelegate:(id<RTC_OBJC_TYPE(RTCAudioSessionDelegate)>)delegate {
RTCLog(@"Removing delegate: (%p)", delegate);
if (!delegate) {
return;
@ -621,7 +620,7 @@ NSString * const kRTCAudioSessionOutputVolumeSelector = @"outputVolume";
return error;
}
- (std::vector<__weak id<RTCAudioSessionDelegate> >)delegates {
- (std::vector<__weak id<RTC_OBJC_TYPE(RTCAudioSessionDelegate)> >)delegates {
@synchronized(self) {
// Note: this returns a copy.
return _delegates;
@ -629,7 +628,7 @@ NSString * const kRTCAudioSessionOutputVolumeSelector = @"outputVolume";
}
// TODO(tkchin): check for duplicates.
- (void)pushDelegate:(id<RTCAudioSessionDelegate>)delegate {
- (void)pushDelegate:(id<RTC_OBJC_TYPE(RTCAudioSessionDelegate)>)delegate {
@synchronized(self) {
_delegates.insert(_delegates.begin(), delegate);
}
@ -687,7 +686,7 @@ NSString * const kRTCAudioSessionOutputVolumeSelector = @"outputVolume";
// acquire lock if it hasn't already been called.
if (!self.isLocked) {
if (outError) {
*outError = [RTCAudioSession lockError];
*outError = [RTC_OBJC_TYPE(RTCAudioSession) lockError];
}
return NO;
}
@ -730,8 +729,8 @@ NSString * const kRTCAudioSessionOutputVolumeSelector = @"outputVolume";
// Configure the AVAudioSession and activate it.
// Provide an error even if there isn't one so we can log it.
NSError *error = nil;
RTCAudioSessionConfiguration *webRTCConfig =
[RTCAudioSessionConfiguration webRTCConfiguration];
RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *webRTCConfig =
[RTC_OBJC_TYPE(RTCAudioSessionConfiguration) webRTCConfiguration];
if (![self setConfiguration:webRTCConfig active:YES error:&error]) {
RTCLogError(@"Failed to set WebRTC audio configuration: %@",
error.localizedDescription);
@ -866,7 +865,7 @@ NSString * const kRTCAudioSessionOutputVolumeSelector = @"outputVolume";
ofObject:(id)object
change:(NSDictionary *)change
context:(void *)context {
if (context == (__bridge void*)RTCAudioSession.class) {
if (context == (__bridge void *)RTC_OBJC_TYPE(RTCAudioSession).class) {
if (object == _session) {
NSNumber *newVolume = change[NSKeyValueChangeNewKey];
RTCLog(@"OutputVolumeDidChange to %f", newVolume.floatValue);

View File

@ -23,7 +23,7 @@ RTC_EXTERN const double kRTCAudioSessionLowComplexityIOBufferDuration;
// Struct to hold configuration values.
RTC_OBJC_EXPORT
@interface RTCAudioSessionConfiguration : NSObject
@interface RTC_OBJC_TYPE (RTCAudioSessionConfiguration) : NSObject
@property(nonatomic, strong) NSString *category;
@property(nonatomic, assign) AVAudioSessionCategoryOptions categoryOptions;
@ -41,7 +41,7 @@ RTC_OBJC_EXPORT
/** Returns the configuration that WebRTC needs. */
+ (instancetype)webRTCConfiguration;
/** Provide a way to override the default configuration. */
+ (void)setWebRTCConfiguration:(RTCAudioSessionConfiguration *)configuration;
+ (void)setWebRTCConfiguration:(RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration;
@end

View File

@ -51,9 +51,9 @@ const double kRTCAudioSessionHighPerformanceIOBufferDuration = 0.02;
// TODO(henrika): monitor this size and determine if it should be modified.
const double kRTCAudioSessionLowComplexityIOBufferDuration = 0.06;
static RTCAudioSessionConfiguration *gWebRTCConfiguration = nil;
static RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *gWebRTCConfiguration = nil;
@implementation RTCAudioSessionConfiguration
@implementation RTC_OBJC_TYPE (RTCAudioSessionConfiguration)
@synthesize category = _category;
@synthesize categoryOptions = _categoryOptions;
@ -105,9 +105,9 @@ static RTCAudioSessionConfiguration *gWebRTCConfiguration = nil;
}
+ (instancetype)currentConfiguration {
RTCAudioSession *session = [RTCAudioSession sharedInstance];
RTCAudioSessionConfiguration *config =
[[RTCAudioSessionConfiguration alloc] init];
RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *config =
[[RTC_OBJC_TYPE(RTCAudioSessionConfiguration) alloc] init];
config.category = session.category;
config.categoryOptions = session.categoryOptions;
config.mode = session.mode;
@ -120,11 +120,11 @@ static RTCAudioSessionConfiguration *gWebRTCConfiguration = nil;
+ (instancetype)webRTCConfiguration {
@synchronized(self) {
return (RTCAudioSessionConfiguration *)gWebRTCConfiguration;
return (RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)gWebRTCConfiguration;
}
}
+ (void)setWebRTCConfiguration:(RTCAudioSessionConfiguration *)configuration {
+ (void)setWebRTCConfiguration:(RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration {
@synchronized(self) {
gWebRTCConfiguration = configuration;
}

View File

@ -19,7 +19,7 @@ class AudioSessionObserver;
/** Adapter that forwards RTCAudioSessionDelegate calls to the appropriate
* methods on the AudioSessionObserver.
*/
@interface RTCNativeAudioSessionDelegateAdapter : NSObject <RTCAudioSessionDelegate>
@interface RTCNativeAudioSessionDelegateAdapter : NSObject <RTC_OBJC_TYPE (RTCAudioSessionDelegate)>
- (instancetype)init NS_UNAVAILABLE;

View File

@ -26,20 +26,20 @@
return self;
}
#pragma mark - RTCAudioSessionDelegate
#pragma mark - RTC_OBJC_TYPE(RTCAudioSessionDelegate)
- (void)audioSessionDidBeginInterruption:(RTCAudioSession *)session {
- (void)audioSessionDidBeginInterruption:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
_observer->OnInterruptionBegin();
}
- (void)audioSessionDidEndInterruption:(RTCAudioSession *)session
- (void)audioSessionDidEndInterruption:(RTC_OBJC_TYPE(RTCAudioSession) *)session
shouldResumeSession:(BOOL)shouldResumeSession {
_observer->OnInterruptionEnd();
}
- (void)audioSessionDidChangeRoute:(RTCAudioSession *)session
reason:(AVAudioSessionRouteChangeReason)reason
previousRoute:(AVAudioSessionRouteDescription *)previousRoute {
- (void)audioSessionDidChangeRoute:(RTC_OBJC_TYPE(RTCAudioSession) *)session
reason:(AVAudioSessionRouteChangeReason)reason
previousRoute:(AVAudioSessionRouteDescription *)previousRoute {
switch (reason) {
case AVAudioSessionRouteChangeReasonUnknown:
case AVAudioSessionRouteChangeReasonNewDeviceAvailable:
@ -64,24 +64,24 @@
}
}
- (void)audioSessionMediaServerTerminated:(RTCAudioSession *)session {
- (void)audioSessionMediaServerTerminated:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
}
- (void)audioSessionMediaServerReset:(RTCAudioSession *)session {
- (void)audioSessionMediaServerReset:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
}
- (void)audioSession:(RTCAudioSession *)session
- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)session
didChangeCanPlayOrRecord:(BOOL)canPlayOrRecord {
_observer->OnCanPlayOrRecordChange(canPlayOrRecord);
}
- (void)audioSessionDidStartPlayOrRecord:(RTCAudioSession *)session {
- (void)audioSessionDidStartPlayOrRecord:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
}
- (void)audioSessionDidStopPlayOrRecord:(RTCAudioSession *)session {
- (void)audioSessionDidStopPlayOrRecord:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
}
- (void)audioSession:(RTCAudioSession *)audioSession
- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession
didChangeOutputVolume:(float)outputVolume {
_observer->OnChangedOutputVolume();
}

View File

@ -17,10 +17,10 @@
NS_ASSUME_NONNULL_BEGIN
RTC_OBJC_EXPORT
// Camera capture that implements RTCVideoCapturer. Delivers frames to a RTCVideoCapturerDelegate
// (usually RTCVideoSource).
// Camera capture that implements RTCVideoCapturer. Delivers frames to a
// RTCVideoCapturerDelegate (usually RTCVideoSource).
NS_EXTENSION_UNAVAILABLE_IOS("Camera not available in app extensions.")
@interface RTCCameraVideoCapturer : RTCVideoCapturer
@interface RTC_OBJC_TYPE (RTCCameraVideoCapturer) : RTC_OBJC_TYPE(RTCVideoCapturer)
// Capture session that is used for capturing. Valid from initialization to dealloc.
@property(readonly, nonatomic) AVCaptureSession *captureSession;

View File

@ -25,8 +25,9 @@
const int64_t kNanosecondsPerSecond = 1000000000;
@interface RTCCameraVideoCapturer ()<AVCaptureVideoDataOutputSampleBufferDelegate>
@property(nonatomic, readonly) dispatch_queue_t frameQueue;
@interface RTC_OBJC_TYPE (RTCCameraVideoCapturer)
()<AVCaptureVideoDataOutputSampleBufferDelegate> @property(nonatomic,
readonly) dispatch_queue_t frameQueue;
@property(nonatomic, strong) AVCaptureDevice *currentDevice;
@property(nonatomic, assign) BOOL hasRetriedOnFatalError;
@property(nonatomic, assign) BOOL isRunning;
@ -34,7 +35,7 @@ const int64_t kNanosecondsPerSecond = 1000000000;
@property(nonatomic, assign) BOOL willBeRunning;
@end
@implementation RTCCameraVideoCapturer {
@implementation RTC_OBJC_TYPE (RTCCameraVideoCapturer) {
AVCaptureVideoDataOutput *_videoDataOutput;
AVCaptureSession *_captureSession;
FourCharCode _preferredOutputPixelFormat;
@ -57,12 +58,12 @@ const int64_t kNanosecondsPerSecond = 1000000000;
return [self initWithDelegate:nil captureSession:[[AVCaptureSession alloc] init]];
}
- (instancetype)initWithDelegate:(__weak id<RTCVideoCapturerDelegate>)delegate {
- (instancetype)initWithDelegate:(__weak id<RTC_OBJC_TYPE(RTCVideoCapturerDelegate)>)delegate {
return [self initWithDelegate:delegate captureSession:[[AVCaptureSession alloc] init]];
}
// This initializer is used for testing.
- (instancetype)initWithDelegate:(__weak id<RTCVideoCapturerDelegate>)delegate
- (instancetype)initWithDelegate:(__weak id<RTC_OBJC_TYPE(RTCVideoCapturerDelegate)>)delegate
captureSession:(AVCaptureSession *)captureSession {
if (self = [super initWithDelegate:delegate]) {
// Create the capture session and all relevant inputs and outputs. We need
@ -110,9 +111,9 @@ const int64_t kNanosecondsPerSecond = 1000000000;
}
- (void)dealloc {
NSAssert(
!_willBeRunning,
@"Session was still running in RTCCameraVideoCapturer dealloc. Forgot to call stopCapture?");
NSAssert(!_willBeRunning,
@"Session was still running in RTC_OBJC_TYPE(RTCCameraVideoCapturer) dealloc. Forgot to "
@"call stopCapture?");
[[NSNotificationCenter defaultCenter] removeObserver:self];
}
@ -154,7 +155,7 @@ const int64_t kNanosecondsPerSecond = 1000000000;
fps:(NSInteger)fps
completionHandler:(nullable void (^)(NSError *))completionHandler {
_willBeRunning = YES;
[RTCDispatcher
[RTC_OBJC_TYPE(RTCDispatcher)
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
RTCLogInfo("startCaptureWithDevice %@ @ %ld fps", format, (long)fps);
@ -196,7 +197,7 @@ const int64_t kNanosecondsPerSecond = 1000000000;
- (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHandler {
_willBeRunning = NO;
[RTCDispatcher
[RTC_OBJC_TYPE(RTCDispatcher)
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
RTCLogInfo("Stop");
@ -225,10 +226,10 @@ const int64_t kNanosecondsPerSecond = 1000000000;
#if TARGET_OS_IPHONE
- (void)deviceOrientationDidChange:(NSNotification *)notification {
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
[self updateOrientation];
}];
[RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
[self updateOrientation];
}];
}
#endif
@ -287,12 +288,14 @@ const int64_t kNanosecondsPerSecond = 1000000000;
_rotation = RTCVideoRotation_0;
#endif
RTCCVPixelBuffer *rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer];
RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer =
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBuffer];
int64_t timeStampNs = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) *
kNanosecondsPerSecond;
RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuffer
rotation:_rotation
timeStampNs:timeStampNs];
RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame =
[[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:rtcPixelBuffer
rotation:_rotation
timeStampNs:timeStampNs];
[self.delegate capturer:self didCaptureVideoFrame:videoFrame];
}
@ -343,29 +346,29 @@ const int64_t kNanosecondsPerSecond = 1000000000;
NSError *error = [notification.userInfo objectForKey:AVCaptureSessionErrorKey];
RTCLogError(@"Capture session runtime error: %@", error);
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
[RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
#if TARGET_OS_IPHONE
if (error.code == AVErrorMediaServicesWereReset) {
[self handleNonFatalError];
} else {
[self handleFatalError];
}
if (error.code == AVErrorMediaServicesWereReset) {
[self handleNonFatalError];
} else {
[self handleFatalError];
}
#else
[self handleFatalError];
[self handleFatalError];
#endif
}];
}];
}
- (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification {
RTCLog(@"Capture session started.");
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
// If we successfully restarted after an unknown error,
// allow future retries on fatal errors.
self.hasRetriedOnFatalError = NO;
}];
[RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
// If we successfully restarted after an unknown
// error, allow future retries on fatal errors.
self.hasRetriedOnFatalError = NO;
}];
}
- (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification {
@ -373,7 +376,7 @@ const int64_t kNanosecondsPerSecond = 1000000000;
}
- (void)handleFatalError {
[RTCDispatcher
[RTC_OBJC_TYPE(RTCDispatcher)
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
if (!self.hasRetriedOnFatalError) {
@ -387,13 +390,13 @@ const int64_t kNanosecondsPerSecond = 1000000000;
}
- (void)handleNonFatalError {
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
RTCLog(@"Restarting capture session after error.");
if (self.isRunning) {
[self.captureSession startRunning];
}
}];
[RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
RTCLog(@"Restarting capture session after error.");
if (self.isRunning) {
[self.captureSession startRunning];
}
}];
}
#if TARGET_OS_IPHONE
@ -401,13 +404,14 @@ const int64_t kNanosecondsPerSecond = 1000000000;
#pragma mark - UIApplication notifications
- (void)handleApplicationDidBecomeActive:(NSNotification *)notification {
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
if (self.isRunning && !self.captureSession.isRunning) {
RTCLog(@"Restarting capture session on active.");
[self.captureSession startRunning];
}
}];
[RTC_OBJC_TYPE(RTCDispatcher)
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
if (self.isRunning && !self.captureSession.isRunning) {
RTCLog(@"Restarting capture session on active.");
[self.captureSession startRunning];
}
}];
}
#endif // TARGET_OS_IPHONE
@ -448,7 +452,8 @@ const int64_t kNanosecondsPerSecond = 1000000000;
// `videoDataOutput.availableVideoCVPixelFormatTypes` returns the pixel formats supported by the
// device with the most efficient output format first. Find the first format that we support.
NSSet<NSNumber *> *supportedPixelFormats = [RTCCVPixelBuffer supportedPixelFormats];
NSSet<NSNumber *> *supportedPixelFormats =
[RTC_OBJC_TYPE(RTCCVPixelBuffer) supportedPixelFormats];
NSMutableOrderedSet *availablePixelFormats =
[NSMutableOrderedSet orderedSetWithArray:videoDataOutput.availableVideoCVPixelFormatTypes];
[availablePixelFormats intersectSet:supportedPixelFormats];
@ -465,7 +470,7 @@ const int64_t kNanosecondsPerSecond = 1000000000;
- (void)updateVideoDataOutputPixelFormat:(AVCaptureDeviceFormat *)format {
FourCharCode mediaSubType = CMFormatDescriptionGetMediaSubType(format.formatDescription);
if (![[RTCCVPixelBuffer supportedPixelFormats] containsObject:@(mediaSubType)]) {
if (![[RTC_OBJC_TYPE(RTCCVPixelBuffer) supportedPixelFormats] containsObject:@(mediaSubType)]) {
mediaSubType = _preferredOutputPixelFormat;
}
@ -479,7 +484,7 @@ const int64_t kNanosecondsPerSecond = 1000000000;
#pragma mark - Private, called inside capture queue
- (void)updateDeviceCaptureFormat:(AVCaptureDeviceFormat *)format fps:(NSInteger)fps {
NSAssert([RTCDispatcher isOnQueueForType:RTCDispatcherTypeCaptureSession],
NSAssert([RTC_OBJC_TYPE(RTCDispatcher) isOnQueueForType:RTCDispatcherTypeCaptureSession],
@"updateDeviceCaptureFormat must be called on the capture queue.");
@try {
_currentDevice.activeFormat = format;
@ -491,7 +496,7 @@ const int64_t kNanosecondsPerSecond = 1000000000;
}
- (void)reconfigureCaptureSessionInput {
NSAssert([RTCDispatcher isOnQueueForType:RTCDispatcherTypeCaptureSession],
NSAssert([RTC_OBJC_TYPE(RTCDispatcher) isOnQueueForType:RTCDispatcherTypeCaptureSession],
@"reconfigureCaptureSessionInput must be called on the capture queue.");
NSError *error = nil;
AVCaptureDeviceInput *input =
@ -513,7 +518,7 @@ const int64_t kNanosecondsPerSecond = 1000000000;
}
- (void)updateOrientation {
NSAssert([RTCDispatcher isOnQueueForType:RTCDispatcherTypeCaptureSession],
NSAssert([RTC_OBJC_TYPE(RTCDispatcher) isOnQueueForType:RTCDispatcherTypeCaptureSession],
@"updateOrientation must be called on the capture queue.");
#if TARGET_OS_IPHONE
_orientation = [UIDevice currentDevice].orientation;

View File

@ -27,7 +27,7 @@ typedef void (^RTCFileVideoCapturerErrorBlock)(NSError *error);
RTC_OBJC_EXPORT
NS_CLASS_AVAILABLE_IOS(10)
@interface RTCFileVideoCapturer : RTCVideoCapturer
@interface RTC_OBJC_TYPE (RTCFileVideoCapturer) : RTC_OBJC_TYPE(RTCVideoCapturer)
/**
* Starts asynchronous capture of frames from video file.

View File

@ -15,7 +15,8 @@
#import "components/video_frame_buffer/RTCCVPixelBuffer.h"
#include "rtc_base/system/gcd_helpers.h"
NSString *const kRTCFileVideoCapturerErrorDomain = @"org.webrtc.RTCFileVideoCapturer";
NSString *const kRTCFileVideoCapturerErrorDomain =
@"org.webrtc.RTC_OBJC_TYPE(RTCFileVideoCapturer)";
typedef NS_ENUM(NSInteger, RTCFileVideoCapturerErrorCode) {
RTCFileVideoCapturerErrorCode_CapturerRunning = 2000,
@ -28,12 +29,12 @@ typedef NS_ENUM(NSInteger, RTCFileVideoCapturerStatus) {
RTCFileVideoCapturerStatusStopped
};
@interface RTCFileVideoCapturer ()
@property(nonatomic, assign) CMTime lastPresentationTime;
@interface RTC_OBJC_TYPE (RTCFileVideoCapturer)
() @property(nonatomic, assign) CMTime lastPresentationTime;
@property(nonatomic, strong) NSURL *fileURL;
@end
@implementation RTCFileVideoCapturer {
@implementation RTC_OBJC_TYPE (RTCFileVideoCapturer) {
AVAssetReader *_reader;
AVAssetReaderTrackOutput *_outTrack;
RTCFileVideoCapturerStatus _status;
@ -182,11 +183,14 @@ typedef NS_ENUM(NSInteger, RTCFileVideoCapturerStatus) {
return;
}
RTCCVPixelBuffer *rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer];
RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer =
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBuffer];
NSTimeInterval timeStampSeconds = CACurrentMediaTime();
int64_t timeStampNs = lroundf(timeStampSeconds * NSEC_PER_SEC);
RTCVideoFrame *videoFrame =
[[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuffer rotation:0 timeStampNs:timeStampNs];
RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame =
[[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:rtcPixelBuffer
rotation:0
timeStampNs:timeStampNs];
CFRelease(sampleBuffer);
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{

View File

@ -97,7 +97,7 @@ static NSString *const shaderSource = MTL_STRINGIFY(
cropHeight:(nonnull int *)cropHeight
cropX:(nonnull int *)cropX
cropY:(nonnull int *)cropY
ofFrame:(nonnull RTCVideoFrame *)frame {
ofFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
*width = frame.width;
*height = frame.height;
*cropWidth = frame.width;
@ -106,7 +106,7 @@ static NSString *const shaderSource = MTL_STRINGIFY(
*cropY = 0;
}
- (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame {
- (BOOL)setupTexturesForFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
if (![super setupTexturesForFrame:frame]) {
return NO;
}
@ -116,7 +116,7 @@ static NSString *const shaderSource = MTL_STRINGIFY(
return NO;
}
id<RTCI420Buffer> buffer = [frame.buffer toI420];
id<RTC_OBJC_TYPE(RTCI420Buffer)> buffer = [frame.buffer toI420];
// Luma (y) texture.
if (!_descriptor || _width != frame.width || _height != frame.height) {

View File

@ -15,9 +15,9 @@
NS_AVAILABLE_MAC(10.11)
RTC_OBJC_EXPORT
@interface RTCMTLNSVideoView : NSView <RTCVideoRenderer>
@interface RTC_OBJC_TYPE (RTCMTLNSVideoView) : NSView <RTC_OBJC_TYPE(RTCVideoRenderer)>
@property(nonatomic, weak) id<RTCVideoViewDelegate> delegate;
@property(nonatomic, weak) id<RTC_OBJC_TYPE(RTCVideoViewDelegate)> delegate;
+ (BOOL)isMetalAvailable;

View File

@ -17,13 +17,13 @@
#import "RTCMTLI420Renderer.h"
@interface RTCMTLNSVideoView ()<MTKViewDelegate>
@property(nonatomic) id<RTCMTLRenderer> renderer;
@interface RTC_OBJC_TYPE (RTCMTLNSVideoView)
()<MTKViewDelegate> @property(nonatomic) id<RTCMTLRenderer> renderer;
@property(nonatomic, strong) MTKView *metalView;
@property(atomic, strong) RTCVideoFrame *videoFrame;
@property(atomic, strong) RTC_OBJC_TYPE(RTCVideoFrame) * videoFrame;
@end
@implementation RTCMTLNSVideoView {
@implementation RTC_OBJC_TYPE (RTCMTLNSVideoView) {
id<RTCMTLRenderer> _renderer;
}
@ -102,7 +102,7 @@
- (void)mtkView:(MTKView *)view drawableSizeWillChange:(CGSize)size {
}
#pragma mark - RTCVideoRenderer
#pragma mark - RTC_OBJC_TYPE(RTCVideoRenderer)
- (void)setSize:(CGSize)size {
_metalView.drawableSize = size;
@ -112,7 +112,7 @@
[_metalView draw];
}
- (void)renderFrame:(nullable RTCVideoFrame *)frame {
- (void)renderFrame:(nullable RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
if (frame == nil) {
return;
}

View File

@ -95,8 +95,8 @@ static NSString *const shaderSource = MTL_STRINGIFY(
cropHeight:(nonnull int *)cropHeight
cropX:(nonnull int *)cropX
cropY:(nonnull int *)cropY
ofFrame:(nonnull RTCVideoFrame *)frame {
RTCCVPixelBuffer *pixelBuffer = (RTCCVPixelBuffer *)frame.buffer;
ofFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
RTC_OBJC_TYPE(RTCCVPixelBuffer) *pixelBuffer = (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer;
*width = CVPixelBufferGetWidth(pixelBuffer.pixelBuffer);
*height = CVPixelBufferGetHeight(pixelBuffer.pixelBuffer);
*cropWidth = pixelBuffer.cropWidth;
@ -105,12 +105,12 @@ static NSString *const shaderSource = MTL_STRINGIFY(
*cropY = pixelBuffer.cropY;
}
- (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame {
RTC_DCHECK([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]);
- (BOOL)setupTexturesForFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
RTC_DCHECK([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]);
if (![super setupTexturesForFrame:frame]) {
return NO;
}
CVPixelBufferRef pixelBuffer = ((RTCCVPixelBuffer *)frame.buffer).pixelBuffer;
CVPixelBufferRef pixelBuffer = ((RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer).pixelBuffer;
id<MTLTexture> lumaTexture = nil;
id<MTLTexture> chromaTexture = nil;

View File

@ -93,8 +93,8 @@ static NSString *const shaderSource = MTL_STRINGIFY(
cropHeight:(nonnull int *)cropHeight
cropX:(nonnull int *)cropX
cropY:(nonnull int *)cropY
ofFrame:(nonnull RTCVideoFrame *)frame {
RTCCVPixelBuffer *pixelBuffer = (RTCCVPixelBuffer *)frame.buffer;
ofFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
RTC_OBJC_TYPE(RTCCVPixelBuffer) *pixelBuffer = (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer;
*width = CVPixelBufferGetWidth(pixelBuffer.pixelBuffer);
*height = CVPixelBufferGetHeight(pixelBuffer.pixelBuffer);
*cropWidth = pixelBuffer.cropWidth;
@ -103,12 +103,12 @@ static NSString *const shaderSource = MTL_STRINGIFY(
*cropY = pixelBuffer.cropY;
}
- (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame {
RTC_DCHECK([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]);
- (BOOL)setupTexturesForFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
RTC_DCHECK([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]);
if (![super setupTexturesForFrame:frame]) {
return NO;
}
CVPixelBufferRef pixelBuffer = ((RTCCVPixelBuffer *)frame.buffer).pixelBuffer;
CVPixelBufferRef pixelBuffer = ((RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer).pixelBuffer;
id<MTLTexture> gpuTexture = nil;
CVMetalTextureRef textureOut = nullptr;

View File

@ -19,7 +19,7 @@ NS_ASSUME_NONNULL_BEGIN
@interface RTCMTLRenderer (Private)
- (nullable id<MTLDevice>)currentMetalDevice;
- (NSString *)shaderSource;
- (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame;
- (BOOL)setupTexturesForFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame;
- (void)uploadTexturesToRenderEncoder:(id<MTLRenderCommandEncoder>)renderEncoder;
- (void)getWidth:(nonnull int *)width
height:(nonnull int *)height
@ -27,7 +27,7 @@ NS_ASSUME_NONNULL_BEGIN
cropHeight:(nonnull int *)cropHeight
cropX:(nonnull int *)cropX
cropY:(nonnull int *)cropY
ofFrame:(nonnull RTCVideoFrame *)frame;
ofFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame;
@end
NS_ASSUME_NONNULL_END

View File

@ -28,7 +28,7 @@ NS_ASSUME_NONNULL_BEGIN
*
* @param frame The frame to be rendered.
*/
- (void)drawFrame:(RTCVideoFrame *)frame;
- (void)drawFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame;
/**
* Sets the provided view as rendering destination if possible.

View File

@ -167,11 +167,11 @@ static const NSInteger kMaxInflightBuffers = 1;
cropHeight:(int *)cropHeight
cropX:(int *)cropX
cropY:(int *)cropY
ofFrame:(nonnull RTCVideoFrame *)frame {
ofFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
RTC_NOTREACHED() << "Virtual method not implemented in subclass.";
}
- (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame {
- (BOOL)setupTexturesForFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
// Apply rotation override if set.
RTCVideoRotation rotation;
NSValue *rotationOverride = self.rotationOverride;
@ -311,7 +311,7 @@ static const NSInteger kMaxInflightBuffers = 1;
#pragma mark - RTCMTLRenderer
- (void)drawFrame:(RTCVideoFrame *)frame {
- (void)drawFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
@autoreleasepool {
// Wait until the inflight (curently sent to GPU) command buffer
// has completed the GPU work.

View File

@ -27,9 +27,9 @@ NS_ASSUME_NONNULL_BEGIN
NS_CLASS_AVAILABLE_IOS(9)
RTC_OBJC_EXPORT
@interface RTCMTLVideoView : UIView<RTCVideoRenderer>
@interface RTC_OBJC_TYPE (RTCMTLVideoView) : UIView<RTC_OBJC_TYPE(RTCVideoRenderer)>
@property(nonatomic, weak) id<RTCVideoViewDelegate> delegate;
@property(nonatomic, weak) id<RTC_OBJC_TYPE(RTCVideoViewDelegate)> delegate;
@property(nonatomic) UIViewContentMode videoContentMode;

View File

@ -29,17 +29,17 @@
#define RTCMTLI420RendererClass NSClassFromString(@"RTCMTLI420Renderer")
#define RTCMTLRGBRendererClass NSClassFromString(@"RTCMTLRGBRenderer")
@interface RTCMTLVideoView () <MTKViewDelegate>
@property(nonatomic) RTCMTLI420Renderer *rendererI420;
@interface RTC_OBJC_TYPE (RTCMTLVideoView)
()<MTKViewDelegate> @property(nonatomic) RTCMTLI420Renderer *rendererI420;
@property(nonatomic) RTCMTLNV12Renderer *rendererNV12;
@property(nonatomic) RTCMTLRGBRenderer *rendererRGB;
@property(nonatomic) MTKView *metalView;
@property(atomic) RTCVideoFrame *videoFrame;
@property(atomic) RTC_OBJC_TYPE(RTCVideoFrame) * videoFrame;
@property(nonatomic) CGSize videoFrameSize;
@property(nonatomic) int64_t lastFrameTimeNs;
@end
@implementation RTCMTLVideoView
@implementation RTC_OBJC_TYPE (RTCMTLVideoView)
@synthesize delegate = _delegate;
@synthesize rendererI420 = _rendererI420;
@ -110,9 +110,10 @@
}
- (void)configure {
NSAssert([RTCMTLVideoView isMetalAvailable], @"Metal not availiable on this device");
NSAssert([RTC_OBJC_TYPE(RTCMTLVideoView) isMetalAvailable],
@"Metal not availiable on this device");
self.metalView = [RTCMTLVideoView createMetalView:self.bounds];
self.metalView = [RTC_OBJC_TYPE(RTCMTLVideoView) createMetalView:self.bounds];
self.metalView.delegate = self;
self.metalView.contentMode = UIViewContentModeScaleAspectFill;
[self addSubview:self.metalView];
@ -140,7 +141,7 @@
- (void)drawInMTKView:(nonnull MTKView *)view {
NSAssert(view == self.metalView, @"Receiving draw callbacks from foreign instance.");
RTCVideoFrame *videoFrame = self.videoFrame;
RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame = self.videoFrame;
// Skip rendering if we've already rendered this frame.
if (!videoFrame || videoFrame.timeStampNs == self.lastFrameTimeNs) {
return;
@ -151,12 +152,12 @@
}
RTCMTLRenderer *renderer;
if ([videoFrame.buffer isKindOfClass:[RTCCVPixelBuffer class]]) {
RTCCVPixelBuffer *buffer = (RTCCVPixelBuffer*)videoFrame.buffer;
if ([videoFrame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) {
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)videoFrame.buffer;
const OSType pixelFormat = CVPixelBufferGetPixelFormatType(buffer.pixelBuffer);
if (pixelFormat == kCVPixelFormatType_32BGRA || pixelFormat == kCVPixelFormatType_32ARGB) {
if (!self.rendererRGB) {
self.rendererRGB = [RTCMTLVideoView createRGBRenderer];
self.rendererRGB = [RTC_OBJC_TYPE(RTCMTLVideoView) createRGBRenderer];
if (![self.rendererRGB addRenderingDestination:self.metalView]) {
self.rendererRGB = nil;
RTCLogError(@"Failed to create RGB renderer");
@ -166,7 +167,7 @@
renderer = self.rendererRGB;
} else {
if (!self.rendererNV12) {
self.rendererNV12 = [RTCMTLVideoView createNV12Renderer];
self.rendererNV12 = [RTC_OBJC_TYPE(RTCMTLVideoView) createNV12Renderer];
if (![self.rendererNV12 addRenderingDestination:self.metalView]) {
self.rendererNV12 = nil;
RTCLogError(@"Failed to create NV12 renderer");
@ -177,7 +178,7 @@
}
} else {
if (!self.rendererI420) {
self.rendererI420 = [RTCMTLVideoView createI420Renderer];
self.rendererI420 = [RTC_OBJC_TYPE(RTCMTLVideoView) createI420Renderer];
if (![self.rendererI420 addRenderingDestination:self.metalView]) {
self.rendererI420 = nil;
RTCLogError(@"Failed to create I420 renderer");
@ -236,12 +237,12 @@
}
}
#pragma mark - RTCVideoRenderer
#pragma mark - RTC_OBJC_TYPE(RTCVideoRenderer)
- (void)setSize:(CGSize)size {
__weak RTCMTLVideoView *weakSelf = self;
__weak RTC_OBJC_TYPE(RTCMTLVideoView) *weakSelf = self;
dispatch_async(dispatch_get_main_queue(), ^{
RTCMTLVideoView *strongSelf = weakSelf;
RTC_OBJC_TYPE(RTCMTLVideoView) *strongSelf = weakSelf;
strongSelf.videoFrameSize = size;
CGSize drawableSize = [strongSelf drawableSize];
@ -252,7 +253,7 @@
});
}
- (void)renderFrame:(nullable RTCVideoFrame *)frame {
- (void)renderFrame:(nullable RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
if (!self.isEnabled) {
return;
}

View File

@ -12,11 +12,11 @@
NS_ASSUME_NONNULL_BEGIN
/** Default RTCVideoViewShading that will be used in RTCNSGLVideoView and
* RTCEAGLVideoView if no external shader is specified. This shader will render
/** Default RTCVideoViewShading that will be used in RTCNSGLVideoView
* and RTCEAGLVideoView if no external shader is specified. This shader will render
* the video in a rectangle without any color or geometric transformations.
*/
@interface RTCDefaultShader : NSObject<RTCVideoViewShading>
@interface RTCDefaultShader : NSObject <RTC_OBJC_TYPE (RTCVideoViewShading)>
@end

View File

@ -17,23 +17,25 @@
NS_ASSUME_NONNULL_BEGIN
@class RTCEAGLVideoView;
@class RTC_OBJC_TYPE(RTCEAGLVideoView);
/**
* RTCEAGLVideoView is an RTCVideoRenderer which renders video frames in its
* bounds using OpenGLES 2.0 or OpenGLES 3.0.
* RTCEAGLVideoView is an RTCVideoRenderer which renders video frames
* in its bounds using OpenGLES 2.0 or OpenGLES 3.0.
*/
RTC_OBJC_EXPORT
NS_EXTENSION_UNAVAILABLE_IOS("Rendering not available in app extensions.")
@interface RTCEAGLVideoView : UIView <RTCVideoRenderer>
@interface RTC_OBJC_TYPE (RTCEAGLVideoView) : UIView <RTC_OBJC_TYPE(RTCVideoRenderer)>
@property(nonatomic, weak) id<RTCVideoViewDelegate> delegate;
@property(nonatomic, weak) id<RTC_OBJC_TYPE(RTCVideoViewDelegate)> delegate;
- (instancetype)initWithFrame:(CGRect)frame
shader:(id<RTCVideoViewShading>)shader NS_DESIGNATED_INITIALIZER;
shader:(id<RTC_OBJC_TYPE(RTCVideoViewShading)>)shader
NS_DESIGNATED_INITIALIZER;
- (instancetype)initWithCoder:(NSCoder *)aDecoder
shader:(id<RTCVideoViewShading>)shader NS_DESIGNATED_INITIALIZER;
shader:(id<RTC_OBJC_TYPE(RTCVideoViewShading)>)shader
NS_DESIGNATED_INITIALIZER;
/** @abstract Wrapped RTCVideoRotation, or nil.
*/

View File

@ -21,7 +21,7 @@
#import "base/RTCVideoFrameBuffer.h"
#import "components/video_frame_buffer/RTCCVPixelBuffer.h"
// RTCEAGLVideoView wraps a GLKView which is setup with
// RTC_OBJC_TYPE(RTCEAGLVideoView) wraps a GLKView which is setup with
// enableSetNeedsDisplay = NO for the purpose of gaining control of
// exactly when to call -[GLKView display]. This need for extra
// control is required to avoid triggering method calls on GLKView
@ -30,23 +30,24 @@
// error GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT. -[GLKView display] is
// the method that will trigger the binding of the render
// buffer. Because the standard behaviour of -[UIView setNeedsDisplay]
// is disabled for the reasons above, the RTCEAGLVideoView maintains
// is disabled for the reasons above, the RTC_OBJC_TYPE(RTCEAGLVideoView) maintains
// its own |isDirty| flag.
@interface RTCEAGLVideoView () <GLKViewDelegate>
// |videoFrame| is set when we receive a frame from a worker thread and is read
// from the display link callback so atomicity is required.
@property(atomic, strong) RTCVideoFrame *videoFrame;
@interface RTC_OBJC_TYPE (RTCEAGLVideoView)
()<GLKViewDelegate>
// |videoFrame| is set when we receive a frame from a worker thread and is read
// from the display link callback so atomicity is required.
@property(atomic, strong) RTC_OBJC_TYPE(RTCVideoFrame) * videoFrame;
@property(nonatomic, readonly) GLKView *glkView;
@end
@implementation RTCEAGLVideoView {
@implementation RTC_OBJC_TYPE (RTCEAGLVideoView) {
RTCDisplayLinkTimer *_timer;
EAGLContext *_glContext;
// This flag should only be set and read on the main thread (e.g. by
// setNeedsDisplay)
BOOL _isDirty;
id<RTCVideoViewShading> _shader;
id<RTC_OBJC_TYPE(RTCVideoViewShading)> _shader;
RTCNV12TextureCache *_nv12TextureCache;
RTCI420TextureCache *_i420TextureCache;
// As timestamps should be unique between frames, will store last
@ -67,7 +68,7 @@
return [self initWithCoder:aDecoder shader:[[RTCDefaultShader alloc] init]];
}
- (instancetype)initWithFrame:(CGRect)frame shader:(id<RTCVideoViewShading>)shader {
- (instancetype)initWithFrame:(CGRect)frame shader:(id<RTC_OBJC_TYPE(RTCVideoViewShading)>)shader {
if (self = [super initWithFrame:frame]) {
_shader = shader;
if (![self configure]) {
@ -77,7 +78,8 @@
return self;
}
- (instancetype)initWithCoder:(NSCoder *)aDecoder shader:(id<RTCVideoViewShading>)shader {
- (instancetype)initWithCoder:(NSCoder *)aDecoder
shader:(id<RTC_OBJC_TYPE(RTCVideoViewShading)>)shader {
if (self = [super initWithCoder:aDecoder]) {
_shader = shader;
if (![self configure]) {
@ -127,11 +129,11 @@
// Frames are received on a separate thread, so we poll for current frame
// using a refresh rate proportional to screen refresh frequency. This
// occurs on the main thread.
__weak RTCEAGLVideoView *weakSelf = self;
__weak RTC_OBJC_TYPE(RTCEAGLVideoView) *weakSelf = self;
_timer = [[RTCDisplayLinkTimer alloc] initWithTimerHandler:^{
RTCEAGLVideoView *strongSelf = weakSelf;
[strongSelf displayLinkTimerDidFire];
}];
RTC_OBJC_TYPE(RTCEAGLVideoView) *strongSelf = weakSelf;
[strongSelf displayLinkTimerDidFire];
}];
if ([[UIApplication sharedApplication] applicationState] == UIApplicationStateActive) {
[self setupGL];
}
@ -182,7 +184,7 @@
- (void)glkView:(GLKView *)view drawInRect:(CGRect)rect {
// The renderer will draw the frame to the framebuffer corresponding to the
// one used by |view|.
RTCVideoFrame *frame = self.videoFrame;
RTC_OBJC_TYPE(RTCVideoFrame) *frame = self.videoFrame;
if (!frame || frame.timeStampNs == _lastDrawnFrameTimeStampNs) {
return;
}
@ -192,7 +194,7 @@
}
[self ensureGLContext];
glClear(GL_COLOR_BUFFER_BIT);
if ([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]) {
if ([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) {
if (!_nv12TextureCache) {
_nv12TextureCache = [[RTCNV12TextureCache alloc] initWithContext:_glContext];
}
@ -223,18 +225,18 @@
}
}
#pragma mark - RTCVideoRenderer
#pragma mark - RTC_OBJC_TYPE(RTCVideoRenderer)
// These methods may be called on non-main thread.
- (void)setSize:(CGSize)size {
__weak RTCEAGLVideoView *weakSelf = self;
__weak RTC_OBJC_TYPE(RTCEAGLVideoView) *weakSelf = self;
dispatch_async(dispatch_get_main_queue(), ^{
RTCEAGLVideoView *strongSelf = weakSelf;
RTC_OBJC_TYPE(RTCEAGLVideoView) *strongSelf = weakSelf;
[strongSelf.delegate videoView:strongSelf didChangeVideoSize:size];
});
}
- (void)renderFrame:(RTCVideoFrame *)frame {
- (void)renderFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
self.videoFrame = frame;
}

View File

@ -20,6 +20,6 @@
- (instancetype)init NS_UNAVAILABLE;
- (instancetype)initWithContext:(GlContextType *)context NS_DESIGNATED_INITIALIZER;
- (void)uploadFrameToTextures:(RTCVideoFrame *)frame;
- (void)uploadFrameToTextures:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame;
@end

View File

@ -123,10 +123,10 @@ static const GLsizei kNumTextures = kNumTexturesPerSet * kNumTextureSets;
uploadPlane);
}
- (void)uploadFrameToTextures:(RTCVideoFrame *)frame {
- (void)uploadFrameToTextures:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
_currentTextureSet = (_currentTextureSet + 1) % kNumTextureSets;
id<RTCI420Buffer> buffer = [frame.buffer toI420];
id<RTC_OBJC_TYPE(RTCI420Buffer)> buffer = [frame.buffer toI420];
const int chromaWidth = buffer.chromaWidth;
const int chromaHeight = buffer.chromaHeight;

View File

@ -19,20 +19,21 @@
NS_ASSUME_NONNULL_BEGIN
@class RTCNSGLVideoView;
@class RTC_OBJC_TYPE(RTCNSGLVideoView);
RTC_OBJC_EXPORT
@protocol RTCNSGLVideoViewDelegate <RTCVideoViewDelegate>
@end
@protocol RTC_OBJC_TYPE
(RTCNSGLVideoViewDelegate)<RTC_OBJC_TYPE(RTCVideoViewDelegate)> @end
RTC_OBJC_EXPORT
@interface RTCNSGLVideoView : NSOpenGLView <RTCVideoRenderer>
@interface RTC_OBJC_TYPE (RTCNSGLVideoView) : NSOpenGLView <RTC_OBJC_TYPE(RTCVideoRenderer)>
@property(nonatomic, weak) id<RTCVideoViewDelegate> delegate;
@property(nonatomic, weak) id<RTC_OBJC_TYPE(RTCVideoViewDelegate)> delegate;
- (instancetype)initWithFrame:(NSRect)frameRect
pixelFormat:(NSOpenGLPixelFormat *)format
shader:(id<RTCVideoViewShading>)shader NS_DESIGNATED_INITIALIZER;
shader:(id<RTC_OBJC_TYPE(RTCVideoViewShading)>)shader
NS_DESIGNATED_INITIALIZER;
@end

View File

@ -23,10 +23,12 @@
#import "base/RTCLogging.h"
#import "base/RTCVideoFrame.h"
@interface RTCNSGLVideoView ()
// |videoFrame| is set when we receive a frame from a worker thread and is read
// from the display link callback so atomicity is required.
@property(atomic, strong) RTCVideoFrame *videoFrame;
@interface RTC_OBJC_TYPE (RTCNSGLVideoView)
()
// |videoFrame| is set when we receive a frame from a worker thread and is read
// from the display link callback so atomicity is required.
@property(atomic, strong) RTC_OBJC_TYPE(RTCVideoFrame) *
videoFrame;
@property(atomic, strong) RTCI420TextureCache *i420TextureCache;
- (void)drawFrame;
@ -38,15 +40,16 @@ static CVReturn OnDisplayLinkFired(CVDisplayLinkRef displayLink,
CVOptionFlags flagsIn,
CVOptionFlags *flagsOut,
void *displayLinkContext) {
RTCNSGLVideoView *view = (__bridge RTCNSGLVideoView *)displayLinkContext;
RTC_OBJC_TYPE(RTCNSGLVideoView) *view =
(__bridge RTC_OBJC_TYPE(RTCNSGLVideoView) *)displayLinkContext;
[view drawFrame];
return kCVReturnSuccess;
}
@implementation RTCNSGLVideoView {
@implementation RTC_OBJC_TYPE (RTCNSGLVideoView) {
CVDisplayLinkRef _displayLink;
RTCVideoFrame *_lastDrawnFrame;
id<RTCVideoViewShading> _shader;
RTC_OBJC_TYPE(RTCVideoFrame) * _lastDrawnFrame;
id<RTC_OBJC_TYPE(RTCVideoViewShading)> _shader;
}
@synthesize delegate = _delegate;
@ -59,7 +62,7 @@ static CVReturn OnDisplayLinkFired(CVDisplayLinkRef displayLink,
- (instancetype)initWithFrame:(NSRect)frame
pixelFormat:(NSOpenGLPixelFormat *)format
shader:(id<RTCVideoViewShading>)shader {
shader:(id<RTC_OBJC_TYPE(RTCVideoViewShading)>)shader {
if (self = [super initWithFrame:frame pixelFormat:format]) {
_shader = shader;
}
@ -105,7 +108,7 @@ static CVReturn OnDisplayLinkFired(CVDisplayLinkRef displayLink,
[super clearGLContext];
}
#pragma mark - RTCVideoRenderer
#pragma mark - RTC_OBJC_TYPE(RTCVideoRenderer)
// These methods may be called on non-main thread.
- (void)setSize:(CGSize)size {
@ -114,14 +117,14 @@ static CVReturn OnDisplayLinkFired(CVDisplayLinkRef displayLink,
});
}
- (void)renderFrame:(RTCVideoFrame *)frame {
- (void)renderFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
self.videoFrame = frame;
}
#pragma mark - Private
- (void)drawFrame {
RTCVideoFrame *frame = self.videoFrame;
RTC_OBJC_TYPE(RTCVideoFrame) *frame = self.videoFrame;
if (!frame || frame == _lastDrawnFrame) {
return;
}

View File

@ -10,7 +10,9 @@
#import <GLKit/GLKit.h>
@class RTCVideoFrame;
#import "base/RTCMacros.h"
@class RTC_OBJC_TYPE(RTCVideoFrame);
NS_ASSUME_NONNULL_BEGIN
@ -22,7 +24,7 @@ NS_ASSUME_NONNULL_BEGIN
- (instancetype)init NS_UNAVAILABLE;
- (nullable instancetype)initWithContext:(EAGLContext *)context NS_DESIGNATED_INITIALIZER;
- (BOOL)uploadFrameToTextures:(RTCVideoFrame *)frame;
- (BOOL)uploadFrameToTextures:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame;
- (void)releaseTextures;

View File

@ -76,10 +76,10 @@
return YES;
}
- (BOOL)uploadFrameToTextures:(RTCVideoFrame *)frame {
NSAssert([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]],
- (BOOL)uploadFrameToTextures:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
NSAssert([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]],
@"frame must be CVPixelBuffer backed");
RTCCVPixelBuffer *rtcPixelBuffer = (RTCCVPixelBuffer *)frame.buffer;
RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer = (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer;
CVPixelBufferRef pixelBuffer = rtcPixelBuffer.pixelBuffer;
return [self loadTexture:&_yTextureRef
pixelBuffer:pixelBuffer

View File

@ -15,19 +15,17 @@
NS_ASSUME_NONNULL_BEGIN
/**
* RTCVideoViewShading provides a way for apps to customize the OpenGL(ES) shaders used in
* rendering for the RTCEAGLVideoView/RTCNSGLVideoView.
* RTCVideoViewShading provides a way for apps to customize the OpenGL(ES shaders
* used in rendering for the RTCEAGLVideoView/RTCNSGLVideoView.
*/
RTC_OBJC_EXPORT
@protocol RTCVideoViewShading <NSObject>
@protocol RTC_OBJC_TYPE
(RTCVideoViewShading)<NSObject>
/** Callback for I420 frames. Each plane is given as a texture. */
- (void)applyShadingForFrameWithWidth:(int)width
height:(int)height
rotation:(RTCVideoRotation)rotation
yPlane:(GLuint)yPlane
uPlane:(GLuint)uPlane
vPlane:(GLuint)vPlane;
/** Callback for I420 frames. Each plane is given as a texture. */
- (void)applyShadingForFrameWithWidth : (int)width height : (int)height rotation
: (RTCVideoRotation)rotation yPlane : (GLuint)yPlane uPlane : (GLuint)uPlane vPlane
: (GLuint)vPlane;
/** Callback for NV12 frames. Each plane is given as a texture. */
- (void)applyShadingForFrameWithWidth:(int)width

View File

@ -15,9 +15,10 @@
NS_ASSUME_NONNULL_BEGIN
/* Interfaces for converting to/from internal C++ formats. */
@interface RTCCodecSpecificInfoH264 ()
@interface RTC_OBJC_TYPE (RTCCodecSpecificInfoH264)
()
- (webrtc::CodecSpecificInfo)nativeCodecSpecificInfo;
- (webrtc::CodecSpecificInfo)nativeCodecSpecificInfo;
@end

View File

@ -20,7 +20,7 @@ typedef NS_ENUM(NSUInteger, RTCH264PacketizationMode) {
};
RTC_OBJC_EXPORT
@interface RTCCodecSpecificInfoH264 : NSObject <RTCCodecSpecificInfo>
@interface RTC_OBJC_TYPE (RTCCodecSpecificInfoH264) : NSObject <RTC_OBJC_TYPE(RTCCodecSpecificInfo)>
@property(nonatomic, assign) RTCH264PacketizationMode packetizationMode;

View File

@ -13,7 +13,7 @@
#import "RTCH264ProfileLevelId.h"
// H264 specific settings.
@implementation RTCCodecSpecificInfoH264
@implementation RTC_OBJC_TYPE (RTCCodecSpecificInfoH264)
@synthesize packetizationMode = _packetizationMode;

View File

@ -16,10 +16,11 @@
NS_ASSUME_NONNULL_BEGIN
/** This decoder factory include support for all codecs bundled with WebRTC. If using custom
* codecs, create custom implementations of RTCVideoEncoderFactory and RTCVideoDecoderFactory.
* codecs, create custom implementations of RTCVideoEncoderFactory and
* RTCVideoDecoderFactory.
*/
RTC_OBJC_EXPORT
@interface RTCDefaultVideoDecoderFactory : NSObject <RTCVideoDecoderFactory>
@interface RTC_OBJC_TYPE (RTCDefaultVideoDecoderFactory) : NSObject <RTC_OBJC_TYPE(RTCVideoDecoderFactory)>
@end
NS_ASSUME_NONNULL_END

View File

@ -19,31 +19,33 @@
#import "api/video_codec/RTCVideoDecoderVP9.h"
#endif
@implementation RTCDefaultVideoDecoderFactory
@implementation RTC_OBJC_TYPE (RTCDefaultVideoDecoderFactory)
- (NSArray<RTCVideoCodecInfo *> *)supportedCodecs {
- (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)supportedCodecs {
NSDictionary<NSString *, NSString *> *constrainedHighParams = @{
@"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedHigh,
@"level-asymmetry-allowed" : @"1",
@"packetization-mode" : @"1",
};
RTCVideoCodecInfo *constrainedHighInfo =
[[RTCVideoCodecInfo alloc] initWithName:kRTCVideoCodecH264Name
parameters:constrainedHighParams];
RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedHighInfo =
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecH264Name
parameters:constrainedHighParams];
NSDictionary<NSString *, NSString *> *constrainedBaselineParams = @{
@"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedBaseline,
@"level-asymmetry-allowed" : @"1",
@"packetization-mode" : @"1",
};
RTCVideoCodecInfo *constrainedBaselineInfo =
[[RTCVideoCodecInfo alloc] initWithName:kRTCVideoCodecH264Name
parameters:constrainedBaselineParams];
RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedBaselineInfo =
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecH264Name
parameters:constrainedBaselineParams];
RTCVideoCodecInfo *vp8Info = [[RTCVideoCodecInfo alloc] initWithName:kRTCVideoCodecVp8Name];
RTC_OBJC_TYPE(RTCVideoCodecInfo) *vp8Info =
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecVp8Name];
#if defined(RTC_ENABLE_VP9)
RTCVideoCodecInfo *vp9Info = [[RTCVideoCodecInfo alloc] initWithName:kRTCVideoCodecVp9Name];
RTC_OBJC_TYPE(RTCVideoCodecInfo) *vp9Info =
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecVp9Name];
#endif
return @[
@ -56,14 +58,14 @@
];
}
- (id<RTCVideoDecoder>)createDecoder:(RTCVideoCodecInfo *)info {
- (id<RTC_OBJC_TYPE(RTCVideoDecoder)>)createDecoder:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info {
if ([info.name isEqualToString:kRTCVideoCodecH264Name]) {
return [[RTCVideoDecoderH264 alloc] init];
return [[RTC_OBJC_TYPE(RTCVideoDecoderH264) alloc] init];
} else if ([info.name isEqualToString:kRTCVideoCodecVp8Name]) {
return [RTCVideoDecoderVP8 vp8Decoder];
return [RTC_OBJC_TYPE(RTCVideoDecoderVP8) vp8Decoder];
#if defined(RTC_ENABLE_VP9)
} else if ([info.name isEqualToString:kRTCVideoCodecVp9Name]) {
return [RTCVideoDecoderVP9 vp9Decoder];
return [RTC_OBJC_TYPE(RTCVideoDecoderVP9) vp9Decoder];
#endif
}

View File

@ -16,14 +16,15 @@
NS_ASSUME_NONNULL_BEGIN
/** This encoder factory include support for all codecs bundled with WebRTC. If using custom
* codecs, create custom implementations of RTCVideoEncoderFactory and RTCVideoDecoderFactory.
* codecs, create custom implementations of RTCVideoEncoderFactory and
* RTCVideoDecoderFactory.
*/
RTC_OBJC_EXPORT
@interface RTCDefaultVideoEncoderFactory : NSObject <RTCVideoEncoderFactory>
@interface RTC_OBJC_TYPE (RTCDefaultVideoEncoderFactory) : NSObject <RTC_OBJC_TYPE(RTCVideoEncoderFactory)>
@property(nonatomic, retain) RTCVideoCodecInfo *preferredCodec;
@property(nonatomic, retain) RTC_OBJC_TYPE(RTCVideoCodecInfo) *preferredCodec;
+ (NSArray<RTCVideoCodecInfo *> *)supportedCodecs;
+ (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)supportedCodecs;
@end

View File

@ -19,33 +19,35 @@
#import "api/video_codec/RTCVideoEncoderVP9.h"
#endif
@implementation RTCDefaultVideoEncoderFactory
@implementation RTC_OBJC_TYPE (RTCDefaultVideoEncoderFactory)
@synthesize preferredCodec;
+ (NSArray<RTCVideoCodecInfo *> *)supportedCodecs {
+ (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)supportedCodecs {
NSDictionary<NSString *, NSString *> *constrainedHighParams = @{
@"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedHigh,
@"level-asymmetry-allowed" : @"1",
@"packetization-mode" : @"1",
};
RTCVideoCodecInfo *constrainedHighInfo =
[[RTCVideoCodecInfo alloc] initWithName:kRTCVideoCodecH264Name
parameters:constrainedHighParams];
RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedHighInfo =
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecH264Name
parameters:constrainedHighParams];
NSDictionary<NSString *, NSString *> *constrainedBaselineParams = @{
@"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedBaseline,
@"level-asymmetry-allowed" : @"1",
@"packetization-mode" : @"1",
};
RTCVideoCodecInfo *constrainedBaselineInfo =
[[RTCVideoCodecInfo alloc] initWithName:kRTCVideoCodecH264Name
parameters:constrainedBaselineParams];
RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedBaselineInfo =
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecH264Name
parameters:constrainedBaselineParams];
RTCVideoCodecInfo *vp8Info = [[RTCVideoCodecInfo alloc] initWithName:kRTCVideoCodecVp8Name];
RTC_OBJC_TYPE(RTCVideoCodecInfo) *vp8Info =
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecVp8Name];
#if defined(RTC_ENABLE_VP9)
RTCVideoCodecInfo *vp9Info = [[RTCVideoCodecInfo alloc] initWithName:kRTCVideoCodecVp9Name];
RTC_OBJC_TYPE(RTCVideoCodecInfo) *vp9Info =
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecVp9Name];
#endif
return @[
@ -58,24 +60,25 @@
];
}
- (id<RTCVideoEncoder>)createEncoder:(RTCVideoCodecInfo *)info {
- (id<RTC_OBJC_TYPE(RTCVideoEncoder)>)createEncoder:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info {
if ([info.name isEqualToString:kRTCVideoCodecH264Name]) {
return [[RTCVideoEncoderH264 alloc] initWithCodecInfo:info];
return [[RTC_OBJC_TYPE(RTCVideoEncoderH264) alloc] initWithCodecInfo:info];
} else if ([info.name isEqualToString:kRTCVideoCodecVp8Name]) {
return [RTCVideoEncoderVP8 vp8Encoder];
return [RTC_OBJC_TYPE(RTCVideoEncoderVP8) vp8Encoder];
#if defined(RTC_ENABLE_VP9)
} else if ([info.name isEqualToString:kRTCVideoCodecVp9Name]) {
return [RTCVideoEncoderVP9 vp9Encoder];
return [RTC_OBJC_TYPE(RTCVideoEncoderVP9) vp9Encoder];
#endif
}
return nil;
}
- (NSArray<RTCVideoCodecInfo *> *)supportedCodecs {
NSMutableArray<RTCVideoCodecInfo *> *codecs = [[[self class] supportedCodecs] mutableCopy];
- (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)supportedCodecs {
NSMutableArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *codecs =
[[[self class] supportedCodecs] mutableCopy];
NSMutableArray<RTCVideoCodecInfo *> *orderedCodecs = [NSMutableArray array];
NSMutableArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *orderedCodecs = [NSMutableArray array];
NSUInteger index = [codecs indexOfObject:self.preferredCodec];
if (index != NSNotFound) {
[orderedCodecs addObject:[codecs objectAtIndex:index]];

View File

@ -48,7 +48,7 @@ typedef NS_ENUM(NSUInteger, RTCH264Level) {
};
RTC_OBJC_EXPORT
@interface RTCH264ProfileLevelId : NSObject
@interface RTC_OBJC_TYPE (RTCH264ProfileLevelId) : NSObject
@property(nonatomic, readonly) RTCH264Profile profile;
@property(nonatomic, readonly) RTCH264Level level;

View File

@ -75,15 +75,16 @@ NSString *MaxSupportedProfileLevelConstrainedHigh() {
} // namespace
@interface RTCH264ProfileLevelId ()
@interface RTC_OBJC_TYPE (RTCH264ProfileLevelId)
()
@property(nonatomic, assign) RTCH264Profile profile;
@property(nonatomic, assign) RTCH264Profile profile;
@property(nonatomic, assign) RTCH264Level level;
@property(nonatomic, strong) NSString *hexString;
@end
@implementation RTCH264ProfileLevelId
@implementation RTC_OBJC_TYPE (RTCH264ProfileLevelId)
@synthesize profile = _profile;
@synthesize level = _level;

View File

@ -14,5 +14,5 @@
#import "RTCVideoDecoderFactory.h"
RTC_OBJC_EXPORT
@interface RTCVideoDecoderFactoryH264 : NSObject <RTCVideoDecoderFactory>
@interface RTC_OBJC_TYPE (RTCVideoDecoderFactoryH264) : NSObject <RTC_OBJC_TYPE(RTCVideoDecoderFactory)>
@end

View File

@ -13,10 +13,10 @@
#import "RTCH264ProfileLevelId.h"
#import "RTCVideoDecoderH264.h"
@implementation RTCVideoDecoderFactoryH264
@implementation RTC_OBJC_TYPE (RTCVideoDecoderFactoryH264)
- (NSArray<RTCVideoCodecInfo *> *)supportedCodecs {
NSMutableArray<RTCVideoCodecInfo *> *codecs = [NSMutableArray array];
- (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)supportedCodecs {
NSMutableArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *codecs = [NSMutableArray array];
NSString *codecName = kRTCVideoCodecH264Name;
NSDictionary<NSString *, NSString *> *constrainedHighParams = @{
@ -24,8 +24,9 @@
@"level-asymmetry-allowed" : @"1",
@"packetization-mode" : @"1",
};
RTCVideoCodecInfo *constrainedHighInfo =
[[RTCVideoCodecInfo alloc] initWithName:codecName parameters:constrainedHighParams];
RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedHighInfo =
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:codecName
parameters:constrainedHighParams];
[codecs addObject:constrainedHighInfo];
NSDictionary<NSString *, NSString *> *constrainedBaselineParams = @{
@ -33,15 +34,16 @@
@"level-asymmetry-allowed" : @"1",
@"packetization-mode" : @"1",
};
RTCVideoCodecInfo *constrainedBaselineInfo =
[[RTCVideoCodecInfo alloc] initWithName:codecName parameters:constrainedBaselineParams];
RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedBaselineInfo =
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:codecName
parameters:constrainedBaselineParams];
[codecs addObject:constrainedBaselineInfo];
return [codecs copy];
}
- (id<RTCVideoDecoder>)createDecoder:(RTCVideoCodecInfo *)info {
return [[RTCVideoDecoderH264 alloc] init];
- (id<RTC_OBJC_TYPE(RTCVideoDecoder)>)createDecoder:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info {
return [[RTC_OBJC_TYPE(RTCVideoDecoderH264) alloc] init];
}
@end

View File

@ -14,5 +14,5 @@
#import "RTCVideoDecoder.h"
RTC_OBJC_EXPORT
@interface RTCVideoDecoderH264 : NSObject <RTCVideoDecoder>
@interface RTC_OBJC_TYPE (RTCVideoDecoderH264) : NSObject <RTC_OBJC_TYPE(RTCVideoDecoder)>
@end

View File

@ -37,8 +37,8 @@ struct RTCFrameDecodeParams {
int64_t timestamp;
};
@interface RTCVideoDecoderH264 ()
- (void)setError:(OSStatus)error;
@interface RTC_OBJC_TYPE (RTCVideoDecoderH264)
() - (void)setError : (OSStatus)error;
@end
// This is the callback function that VideoToolbox calls when decode is
@ -53,23 +53,25 @@ void decompressionOutputCallback(void *decoderRef,
std::unique_ptr<RTCFrameDecodeParams> decodeParams(
reinterpret_cast<RTCFrameDecodeParams *>(params));
if (status != noErr) {
RTCVideoDecoderH264 *decoder = (__bridge RTCVideoDecoderH264 *)decoderRef;
RTC_OBJC_TYPE(RTCVideoDecoderH264) *decoder =
(__bridge RTC_OBJC_TYPE(RTCVideoDecoderH264) *)decoderRef;
[decoder setError:status];
RTC_LOG(LS_ERROR) << "Failed to decode frame. Status: " << status;
return;
}
// TODO(tkchin): Handle CVO properly.
RTCCVPixelBuffer *frameBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:imageBuffer];
RTCVideoFrame *decodedFrame =
[[RTCVideoFrame alloc] initWithBuffer:frameBuffer
rotation:RTCVideoRotation_0
timeStampNs:CMTimeGetSeconds(timestamp) * rtc::kNumNanosecsPerSec];
RTC_OBJC_TYPE(RTCCVPixelBuffer) *frameBuffer =
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:imageBuffer];
RTC_OBJC_TYPE(RTCVideoFrame) *decodedFrame = [[RTC_OBJC_TYPE(RTCVideoFrame) alloc]
initWithBuffer:frameBuffer
rotation:RTCVideoRotation_0
timeStampNs:CMTimeGetSeconds(timestamp) * rtc::kNumNanosecsPerSec];
decodedFrame.timeStamp = decodeParams->timestamp;
decodeParams->callback(decodedFrame);
}
// Decoder.
@implementation RTCVideoDecoderH264 {
@implementation RTC_OBJC_TYPE (RTCVideoDecoderH264) {
CMVideoFormatDescriptionRef _videoFormat;
CMMemoryPoolRef _memoryPool;
VTDecompressionSessionRef _decompressionSession;
@ -96,9 +98,9 @@ void decompressionOutputCallback(void *decoderRef,
return WEBRTC_VIDEO_CODEC_OK;
}
- (NSInteger)decode:(RTCEncodedImage *)inputImage
- (NSInteger)decode:(RTC_OBJC_TYPE(RTCEncodedImage) *)inputImage
missingFrames:(BOOL)missingFrames
codecSpecificInfo:(nullable id<RTCCodecSpecificInfo>)info
codecSpecificInfo:(nullable id<RTC_OBJC_TYPE(RTCCodecSpecificInfo)>)info
renderTimeMs:(int64_t)renderTimeMs {
RTC_DCHECK(inputImage.buffer);

View File

@ -14,5 +14,5 @@
#import "RTCVideoEncoderFactory.h"
RTC_OBJC_EXPORT
@interface RTCVideoEncoderFactoryH264 : NSObject <RTCVideoEncoderFactory>
@interface RTC_OBJC_TYPE (RTCVideoEncoderFactoryH264) : NSObject <RTC_OBJC_TYPE(RTCVideoEncoderFactory)>
@end

View File

@ -13,10 +13,10 @@
#import "RTCH264ProfileLevelId.h"
#import "RTCVideoEncoderH264.h"
@implementation RTCVideoEncoderFactoryH264
@implementation RTC_OBJC_TYPE (RTCVideoEncoderFactoryH264)
- (NSArray<RTCVideoCodecInfo *> *)supportedCodecs {
NSMutableArray<RTCVideoCodecInfo *> *codecs = [NSMutableArray array];
- (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)supportedCodecs {
NSMutableArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *codecs = [NSMutableArray array];
NSString *codecName = kRTCVideoCodecH264Name;
NSDictionary<NSString *, NSString *> *constrainedHighParams = @{
@ -24,8 +24,9 @@
@"level-asymmetry-allowed" : @"1",
@"packetization-mode" : @"1",
};
RTCVideoCodecInfo *constrainedHighInfo =
[[RTCVideoCodecInfo alloc] initWithName:codecName parameters:constrainedHighParams];
RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedHighInfo =
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:codecName
parameters:constrainedHighParams];
[codecs addObject:constrainedHighInfo];
NSDictionary<NSString *, NSString *> *constrainedBaselineParams = @{
@ -33,15 +34,16 @@
@"level-asymmetry-allowed" : @"1",
@"packetization-mode" : @"1",
};
RTCVideoCodecInfo *constrainedBaselineInfo =
[[RTCVideoCodecInfo alloc] initWithName:codecName parameters:constrainedBaselineParams];
RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedBaselineInfo =
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:codecName
parameters:constrainedBaselineParams];
[codecs addObject:constrainedBaselineInfo];
return [codecs copy];
}
- (id<RTCVideoEncoder>)createEncoder:(RTCVideoCodecInfo *)info {
return [[RTCVideoEncoderH264 alloc] initWithCodecInfo:info];
- (id<RTC_OBJC_TYPE(RTCVideoEncoder)>)createEncoder:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info {
return [[RTC_OBJC_TYPE(RTCVideoEncoderH264) alloc] initWithCodecInfo:info];
}
@end

View File

@ -15,8 +15,8 @@
#import "RTCVideoEncoder.h"
RTC_OBJC_EXPORT
@interface RTCVideoEncoderH264 : NSObject <RTCVideoEncoder>
@interface RTC_OBJC_TYPE (RTCVideoEncoderH264) : NSObject <RTC_OBJC_TYPE(RTCVideoEncoder)>
- (instancetype)initWithCodecInfo:(RTCVideoCodecInfo *)codecInfo;
- (instancetype)initWithCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)codecInfo;
@end

View File

@ -40,17 +40,14 @@
#include "sdk/objc/components/video_codec/nalu_rewriter.h"
#include "third_party/libyuv/include/libyuv/convert_from.h"
@interface RTCVideoEncoderH264 ()
@interface RTC_OBJC_TYPE (RTCVideoEncoderH264)
()
- (void)frameWasEncoded:(OSStatus)status
flags:(VTEncodeInfoFlags)infoFlags
sampleBuffer:(CMSampleBufferRef)sampleBuffer
codecSpecificInfo:(id<RTCCodecSpecificInfo>)codecSpecificInfo
width:(int32_t)width
height:(int32_t)height
renderTimeMs:(int64_t)renderTimeMs
timestamp:(uint32_t)timestamp
rotation:(RTCVideoRotation)rotation;
- (void)frameWasEncoded : (OSStatus)status flags : (VTEncodeInfoFlags)infoFlags sampleBuffer
: (CMSampleBufferRef)sampleBuffer codecSpecificInfo
: (id<RTC_OBJC_TYPE(RTCCodecSpecificInfo)>)codecSpecificInfo width : (int32_t)width height
: (int32_t)height renderTimeMs : (int64_t)renderTimeMs timestamp : (uint32_t)timestamp rotation
: (RTCVideoRotation)rotation;
@end
@ -70,8 +67,8 @@ const OSType kNV12PixelFormat = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange;
// Struct that we pass to the encoder per frame to encode. We receive it again
// in the encoder callback.
struct RTCFrameEncodeParams {
RTCFrameEncodeParams(RTCVideoEncoderH264 *e,
RTCCodecSpecificInfoH264 *csi,
RTCFrameEncodeParams(RTC_OBJC_TYPE(RTCVideoEncoderH264) * e,
RTC_OBJC_TYPE(RTCCodecSpecificInfoH264) * csi,
int32_t w,
int32_t h,
int64_t rtms,
@ -81,12 +78,12 @@ struct RTCFrameEncodeParams {
if (csi) {
codecSpecificInfo = csi;
} else {
codecSpecificInfo = [[RTCCodecSpecificInfoH264 alloc] init];
codecSpecificInfo = [[RTC_OBJC_TYPE(RTCCodecSpecificInfoH264) alloc] init];
}
}
RTCVideoEncoderH264 *encoder;
RTCCodecSpecificInfoH264 *codecSpecificInfo;
RTC_OBJC_TYPE(RTCVideoEncoderH264) * encoder;
RTC_OBJC_TYPE(RTCCodecSpecificInfoH264) * codecSpecificInfo;
int32_t width;
int32_t height;
int64_t render_time_ms;
@ -97,7 +94,8 @@ struct RTCFrameEncodeParams {
// We receive I420Frames as input, but we need to feed CVPixelBuffers into the
// encoder. This performs the copy and format conversion.
// TODO(tkchin): See if encoder will accept i420 frames and compare performance.
bool CopyVideoFrameToNV12PixelBuffer(id<RTCI420Buffer> frameBuffer, CVPixelBufferRef pixelBuffer) {
bool CopyVideoFrameToNV12PixelBuffer(id<RTC_OBJC_TYPE(RTCI420Buffer)> frameBuffer,
CVPixelBufferRef pixelBuffer) {
RTC_DCHECK(pixelBuffer);
RTC_DCHECK_EQ(CVPixelBufferGetPixelFormatType(pixelBuffer), kNV12PixelFormat);
RTC_DCHECK_EQ(CVPixelBufferGetHeightOfPlane(pixelBuffer, 0), frameBuffer.height);
@ -313,8 +311,8 @@ NSUInteger GetMaxSampleRate(const webrtc::H264::ProfileLevelId &profile_level_id
}
} // namespace
@implementation RTCVideoEncoderH264 {
RTCVideoCodecInfo *_codecInfo;
@implementation RTC_OBJC_TYPE (RTCVideoEncoderH264) {
RTC_OBJC_TYPE(RTCVideoCodecInfo) * _codecInfo;
std::unique_ptr<webrtc::BitrateAdjuster> _bitrateAdjuster;
uint32_t _targetBitrateBps;
uint32_t _encoderBitrateBps;
@ -340,7 +338,7 @@ NSUInteger GetMaxSampleRate(const webrtc::H264::ProfileLevelId &profile_level_id
// drastically reduced bitrate, so we want to avoid that. In steady state
// conditions, 0.95 seems to give us better overall bitrate over long periods
// of time.
- (instancetype)initWithCodecInfo:(RTCVideoCodecInfo *)codecInfo {
- (instancetype)initWithCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)codecInfo {
if (self = [super init]) {
_codecInfo = codecInfo;
_bitrateAdjuster.reset(new webrtc::BitrateAdjuster(.5, .95));
@ -358,7 +356,7 @@ NSUInteger GetMaxSampleRate(const webrtc::H264::ProfileLevelId &profile_level_id
[self destroyCompressionSession];
}
- (NSInteger)startEncodeWithSettings:(RTCVideoEncoderSettings *)settings
- (NSInteger)startEncodeWithSettings:(RTC_OBJC_TYPE(RTCVideoEncoderSettings) *)settings
numberOfCores:(int)numberOfCores {
RTC_DCHECK(settings);
RTC_DCHECK([settings.name isEqualToString:kRTCVideoCodecH264Name]);
@ -388,8 +386,8 @@ NSUInteger GetMaxSampleRate(const webrtc::H264::ProfileLevelId &profile_level_id
return [self resetCompressionSessionWithPixelFormat:kNV12PixelFormat];
}
- (NSInteger)encode:(RTCVideoFrame *)frame
codecSpecificInfo:(nullable id<RTCCodecSpecificInfo>)codecSpecificInfo
- (NSInteger)encode:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame
codecSpecificInfo:(nullable id<RTC_OBJC_TYPE(RTCCodecSpecificInfo)>)codecSpecificInfo
frameTypes:(NSArray<NSNumber *> *)frameTypes {
RTC_DCHECK_EQ(frame.width, _width);
RTC_DCHECK_EQ(frame.height, _height);
@ -404,9 +402,10 @@ NSUInteger GetMaxSampleRate(const webrtc::H264::ProfileLevelId &profile_level_id
}
CVPixelBufferRef pixelBuffer = nullptr;
if ([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]) {
if ([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) {
// Native frame buffer
RTCCVPixelBuffer *rtcPixelBuffer = (RTCCVPixelBuffer *)frame.buffer;
RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer =
(RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer;
if (![rtcPixelBuffer requiresCropping]) {
// This pixel buffer might have a higher resolution than what the
// compression session is configured to. The compression session can
@ -543,17 +542,18 @@ NSUInteger GetMaxSampleRate(const webrtc::H264::ProfileLevelId &profile_level_id
return WEBRTC_VIDEO_CODEC_OK;
}
- (OSType)pixelFormatOfFrame:(RTCVideoFrame *)frame {
- (OSType)pixelFormatOfFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
// Use NV12 for non-native frames.
if ([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]) {
RTCCVPixelBuffer *rtcPixelBuffer = (RTCCVPixelBuffer *)frame.buffer;
if ([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) {
RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer =
(RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer;
return CVPixelBufferGetPixelFormatType(rtcPixelBuffer.pixelBuffer);
}
return kNV12PixelFormat;
}
- (BOOL)resetCompressionSessionIfNeededWithFrame:(RTCVideoFrame *)frame {
- (BOOL)resetCompressionSessionIfNeededWithFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
BOOL resetCompressionSession = NO;
// If we're capturing native frames in another pixel format than the compression session is
@ -755,7 +755,7 @@ NSUInteger GetMaxSampleRate(const webrtc::H264::ProfileLevelId &profile_level_id
- (void)frameWasEncoded:(OSStatus)status
flags:(VTEncodeInfoFlags)infoFlags
sampleBuffer:(CMSampleBufferRef)sampleBuffer
codecSpecificInfo:(id<RTCCodecSpecificInfo>)codecSpecificInfo
codecSpecificInfo:(id<RTC_OBJC_TYPE(RTCCodecSpecificInfo)>)codecSpecificInfo
width:(int32_t)width
height:(int32_t)height
renderTimeMs:(int64_t)renderTimeMs
@ -783,18 +783,19 @@ NSUInteger GetMaxSampleRate(const webrtc::H264::ProfileLevelId &profile_level_id
}
__block std::unique_ptr<rtc::Buffer> buffer = std::make_unique<rtc::Buffer>();
RTCRtpFragmentationHeader *header;
RTC_OBJC_TYPE(RTCRtpFragmentationHeader) * header;
{
std::unique_ptr<webrtc::RTPFragmentationHeader> header_cpp;
bool result =
H264CMSampleBufferToAnnexBBuffer(sampleBuffer, isKeyframe, buffer.get(), &header_cpp);
header = [[RTCRtpFragmentationHeader alloc] initWithNativeFragmentationHeader:header_cpp.get()];
header = [[RTC_OBJC_TYPE(RTCRtpFragmentationHeader) alloc]
initWithNativeFragmentationHeader:header_cpp.get()];
if (!result) {
return;
}
}
RTCEncodedImage *frame = [[RTCEncodedImage alloc] init];
RTC_OBJC_TYPE(RTCEncodedImage) *frame = [[RTC_OBJC_TYPE(RTCEncodedImage) alloc] init];
// This assumes ownership of `buffer` and is responsible for freeing it when done.
frame.buffer = [[NSData alloc] initWithBytesNoCopy:buffer->data()
length:buffer->size()
@ -825,9 +826,10 @@ NSUInteger GetMaxSampleRate(const webrtc::H264::ProfileLevelId &profile_level_id
_bitrateAdjuster->Update(frame.buffer.length);
}
- (nullable RTCVideoEncoderQpThresholds *)scalingSettings {
return [[RTCVideoEncoderQpThresholds alloc] initWithThresholdsLow:kLowH264QpThreshold
high:kHighH264QpThreshold];
- (nullable RTC_OBJC_TYPE(RTCVideoEncoderQpThresholds) *)scalingSettings {
return [[RTC_OBJC_TYPE(RTCVideoEncoderQpThresholds) alloc]
initWithThresholdsLow:kLowH264QpThreshold
high:kHighH264QpThreshold];
}
@end

View File

@ -17,7 +17,7 @@ NS_ASSUME_NONNULL_BEGIN
/** RTCVideoFrameBuffer containing a CVPixelBufferRef */
RTC_OBJC_EXPORT
@interface RTCCVPixelBuffer : NSObject <RTCVideoFrameBuffer>
@interface RTC_OBJC_TYPE (RTCCVPixelBuffer) : NSObject <RTC_OBJC_TYPE(RTCVideoFrameBuffer)>
@property(nonatomic, readonly) CVPixelBufferRef pixelBuffer;
@property(nonatomic, readonly) int cropX;

View File

@ -22,7 +22,7 @@
#import <VideoToolbox/VideoToolbox.h>
#endif
@implementation RTCCVPixelBuffer {
@implementation RTC_OBJC_TYPE (RTCCVPixelBuffer) {
int _width;
int _height;
int _bufferWidth;
@ -152,13 +152,13 @@
return YES;
}
- (id<RTCI420Buffer>)toI420 {
- (id<RTC_OBJC_TYPE(RTCI420Buffer)>)toI420 {
const OSType pixelFormat = CVPixelBufferGetPixelFormatType(_pixelBuffer);
CVPixelBufferLockBaseAddress(_pixelBuffer, kCVPixelBufferLock_ReadOnly);
RTCMutableI420Buffer* i420Buffer =
[[RTCMutableI420Buffer alloc] initWithWidth:[self width] height:[self height]];
RTC_OBJC_TYPE(RTCMutableI420Buffer)* i420Buffer =
[[RTC_OBJC_TYPE(RTCMutableI420Buffer) alloc] initWithWidth:[self width] height:[self height]];
switch (pixelFormat) {
case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange: