Wrap WebRTC OBJC API types with RTC_OBJC_TYPE.

This CL introduced 2 new macros that affect the WebRTC OBJC API symbols:

- RTC_OBJC_TYPE_PREFIX:
  Macro used to prepend a prefix to the API types that are exported with
  RTC_OBJC_EXPORT.

  Clients can patch the definition of this macro locally and build
  WebRTC.framework with their own prefix in case symbol clashing is a
  problem.

  This macro must only be defined by changing the value in
  sdk/objc/base/RTCMacros.h  and not on via compiler flag to ensure
  it has a unique value.

- RCT_OBJC_TYPE:
  Macro used internally to reference API types. Declaring an API type
  without using this macro will not include the declared type in the
  set of types that will be affected by the configurable
  RTC_OBJC_TYPE_PREFIX.

Manual changes:
https://webrtc-review.googlesource.com/c/src/+/173781/5..10

The auto-generated changes in PS#5 have been done with:
https://webrtc-review.googlesource.com/c/src/+/174061.

Bug: None
Change-Id: I0d54ca94db764fb3b6cb4365873f79e14cd879b8
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/173781
Commit-Queue: Mirko Bonadei <mbonadei@webrtc.org>
Reviewed-by: Karl Wiberg <kwiberg@webrtc.org>
Reviewed-by: Kári Helgason <kthelgason@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#31153}
This commit is contained in:
Mirko Bonadei
2020-05-04 16:14:32 +02:00
committed by Commit Bot
parent ce1320cc4d
commit a81e9c82fc
303 changed files with 2534 additions and 2189 deletions

View File

@ -12,11 +12,11 @@
NS_ASSUME_NONNULL_BEGIN
/** Default RTCVideoViewShading that will be used in RTCNSGLVideoView and
* RTCEAGLVideoView if no external shader is specified. This shader will render
/** Default RTCVideoViewShading that will be used in RTCNSGLVideoView
* and RTCEAGLVideoView if no external shader is specified. This shader will render
* the video in a rectangle without any color or geometric transformations.
*/
@interface RTCDefaultShader : NSObject<RTCVideoViewShading>
@interface RTCDefaultShader : NSObject <RTC_OBJC_TYPE (RTCVideoViewShading)>
@end

View File

@ -17,23 +17,25 @@
NS_ASSUME_NONNULL_BEGIN
@class RTCEAGLVideoView;
@class RTC_OBJC_TYPE(RTCEAGLVideoView);
/**
* RTCEAGLVideoView is an RTCVideoRenderer which renders video frames in its
* bounds using OpenGLES 2.0 or OpenGLES 3.0.
* RTCEAGLVideoView is an RTCVideoRenderer which renders video frames
* in its bounds using OpenGLES 2.0 or OpenGLES 3.0.
*/
RTC_OBJC_EXPORT
NS_EXTENSION_UNAVAILABLE_IOS("Rendering not available in app extensions.")
@interface RTCEAGLVideoView : UIView <RTCVideoRenderer>
@interface RTC_OBJC_TYPE (RTCEAGLVideoView) : UIView <RTC_OBJC_TYPE(RTCVideoRenderer)>
@property(nonatomic, weak) id<RTCVideoViewDelegate> delegate;
@property(nonatomic, weak) id<RTC_OBJC_TYPE(RTCVideoViewDelegate)> delegate;
- (instancetype)initWithFrame:(CGRect)frame
shader:(id<RTCVideoViewShading>)shader NS_DESIGNATED_INITIALIZER;
shader:(id<RTC_OBJC_TYPE(RTCVideoViewShading)>)shader
NS_DESIGNATED_INITIALIZER;
- (instancetype)initWithCoder:(NSCoder *)aDecoder
shader:(id<RTCVideoViewShading>)shader NS_DESIGNATED_INITIALIZER;
shader:(id<RTC_OBJC_TYPE(RTCVideoViewShading)>)shader
NS_DESIGNATED_INITIALIZER;
/** @abstract Wrapped RTCVideoRotation, or nil.
*/

View File

@ -21,7 +21,7 @@
#import "base/RTCVideoFrameBuffer.h"
#import "components/video_frame_buffer/RTCCVPixelBuffer.h"
// RTCEAGLVideoView wraps a GLKView which is setup with
// RTC_OBJC_TYPE(RTCEAGLVideoView) wraps a GLKView which is setup with
// enableSetNeedsDisplay = NO for the purpose of gaining control of
// exactly when to call -[GLKView display]. This need for extra
// control is required to avoid triggering method calls on GLKView
@ -30,23 +30,24 @@
// error GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT. -[GLKView display] is
// the method that will trigger the binding of the render
// buffer. Because the standard behaviour of -[UIView setNeedsDisplay]
// is disabled for the reasons above, the RTCEAGLVideoView maintains
// is disabled for the reasons above, the RTC_OBJC_TYPE(RTCEAGLVideoView) maintains
// its own |isDirty| flag.
@interface RTCEAGLVideoView () <GLKViewDelegate>
// |videoFrame| is set when we receive a frame from a worker thread and is read
// from the display link callback so atomicity is required.
@property(atomic, strong) RTCVideoFrame *videoFrame;
@interface RTC_OBJC_TYPE (RTCEAGLVideoView)
()<GLKViewDelegate>
// |videoFrame| is set when we receive a frame from a worker thread and is read
// from the display link callback so atomicity is required.
@property(atomic, strong) RTC_OBJC_TYPE(RTCVideoFrame) * videoFrame;
@property(nonatomic, readonly) GLKView *glkView;
@end
@implementation RTCEAGLVideoView {
@implementation RTC_OBJC_TYPE (RTCEAGLVideoView) {
RTCDisplayLinkTimer *_timer;
EAGLContext *_glContext;
// This flag should only be set and read on the main thread (e.g. by
// setNeedsDisplay)
BOOL _isDirty;
id<RTCVideoViewShading> _shader;
id<RTC_OBJC_TYPE(RTCVideoViewShading)> _shader;
RTCNV12TextureCache *_nv12TextureCache;
RTCI420TextureCache *_i420TextureCache;
// As timestamps should be unique between frames, will store last
@ -67,7 +68,7 @@
return [self initWithCoder:aDecoder shader:[[RTCDefaultShader alloc] init]];
}
- (instancetype)initWithFrame:(CGRect)frame shader:(id<RTCVideoViewShading>)shader {
- (instancetype)initWithFrame:(CGRect)frame shader:(id<RTC_OBJC_TYPE(RTCVideoViewShading)>)shader {
if (self = [super initWithFrame:frame]) {
_shader = shader;
if (![self configure]) {
@ -77,7 +78,8 @@
return self;
}
- (instancetype)initWithCoder:(NSCoder *)aDecoder shader:(id<RTCVideoViewShading>)shader {
- (instancetype)initWithCoder:(NSCoder *)aDecoder
shader:(id<RTC_OBJC_TYPE(RTCVideoViewShading)>)shader {
if (self = [super initWithCoder:aDecoder]) {
_shader = shader;
if (![self configure]) {
@ -127,11 +129,11 @@
// Frames are received on a separate thread, so we poll for current frame
// using a refresh rate proportional to screen refresh frequency. This
// occurs on the main thread.
__weak RTCEAGLVideoView *weakSelf = self;
__weak RTC_OBJC_TYPE(RTCEAGLVideoView) *weakSelf = self;
_timer = [[RTCDisplayLinkTimer alloc] initWithTimerHandler:^{
RTCEAGLVideoView *strongSelf = weakSelf;
[strongSelf displayLinkTimerDidFire];
}];
RTC_OBJC_TYPE(RTCEAGLVideoView) *strongSelf = weakSelf;
[strongSelf displayLinkTimerDidFire];
}];
if ([[UIApplication sharedApplication] applicationState] == UIApplicationStateActive) {
[self setupGL];
}
@ -182,7 +184,7 @@
- (void)glkView:(GLKView *)view drawInRect:(CGRect)rect {
// The renderer will draw the frame to the framebuffer corresponding to the
// one used by |view|.
RTCVideoFrame *frame = self.videoFrame;
RTC_OBJC_TYPE(RTCVideoFrame) *frame = self.videoFrame;
if (!frame || frame.timeStampNs == _lastDrawnFrameTimeStampNs) {
return;
}
@ -192,7 +194,7 @@
}
[self ensureGLContext];
glClear(GL_COLOR_BUFFER_BIT);
if ([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]) {
if ([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) {
if (!_nv12TextureCache) {
_nv12TextureCache = [[RTCNV12TextureCache alloc] initWithContext:_glContext];
}
@ -223,18 +225,18 @@
}
}
#pragma mark - RTCVideoRenderer
#pragma mark - RTC_OBJC_TYPE(RTCVideoRenderer)
// These methods may be called on non-main thread.
- (void)setSize:(CGSize)size {
__weak RTCEAGLVideoView *weakSelf = self;
__weak RTC_OBJC_TYPE(RTCEAGLVideoView) *weakSelf = self;
dispatch_async(dispatch_get_main_queue(), ^{
RTCEAGLVideoView *strongSelf = weakSelf;
RTC_OBJC_TYPE(RTCEAGLVideoView) *strongSelf = weakSelf;
[strongSelf.delegate videoView:strongSelf didChangeVideoSize:size];
});
}
- (void)renderFrame:(RTCVideoFrame *)frame {
- (void)renderFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
self.videoFrame = frame;
}

View File

@ -20,6 +20,6 @@
- (instancetype)init NS_UNAVAILABLE;
- (instancetype)initWithContext:(GlContextType *)context NS_DESIGNATED_INITIALIZER;
- (void)uploadFrameToTextures:(RTCVideoFrame *)frame;
- (void)uploadFrameToTextures:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame;
@end

View File

@ -123,10 +123,10 @@ static const GLsizei kNumTextures = kNumTexturesPerSet * kNumTextureSets;
uploadPlane);
}
- (void)uploadFrameToTextures:(RTCVideoFrame *)frame {
- (void)uploadFrameToTextures:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
_currentTextureSet = (_currentTextureSet + 1) % kNumTextureSets;
id<RTCI420Buffer> buffer = [frame.buffer toI420];
id<RTC_OBJC_TYPE(RTCI420Buffer)> buffer = [frame.buffer toI420];
const int chromaWidth = buffer.chromaWidth;
const int chromaHeight = buffer.chromaHeight;

View File

@ -19,20 +19,21 @@
NS_ASSUME_NONNULL_BEGIN
@class RTCNSGLVideoView;
@class RTC_OBJC_TYPE(RTCNSGLVideoView);
RTC_OBJC_EXPORT
@protocol RTCNSGLVideoViewDelegate <RTCVideoViewDelegate>
@end
@protocol RTC_OBJC_TYPE
(RTCNSGLVideoViewDelegate)<RTC_OBJC_TYPE(RTCVideoViewDelegate)> @end
RTC_OBJC_EXPORT
@interface RTCNSGLVideoView : NSOpenGLView <RTCVideoRenderer>
@interface RTC_OBJC_TYPE (RTCNSGLVideoView) : NSOpenGLView <RTC_OBJC_TYPE(RTCVideoRenderer)>
@property(nonatomic, weak) id<RTCVideoViewDelegate> delegate;
@property(nonatomic, weak) id<RTC_OBJC_TYPE(RTCVideoViewDelegate)> delegate;
- (instancetype)initWithFrame:(NSRect)frameRect
pixelFormat:(NSOpenGLPixelFormat *)format
shader:(id<RTCVideoViewShading>)shader NS_DESIGNATED_INITIALIZER;
shader:(id<RTC_OBJC_TYPE(RTCVideoViewShading)>)shader
NS_DESIGNATED_INITIALIZER;
@end

View File

@ -23,10 +23,12 @@
#import "base/RTCLogging.h"
#import "base/RTCVideoFrame.h"
@interface RTCNSGLVideoView ()
// |videoFrame| is set when we receive a frame from a worker thread and is read
// from the display link callback so atomicity is required.
@property(atomic, strong) RTCVideoFrame *videoFrame;
@interface RTC_OBJC_TYPE (RTCNSGLVideoView)
()
// |videoFrame| is set when we receive a frame from a worker thread and is read
// from the display link callback so atomicity is required.
@property(atomic, strong) RTC_OBJC_TYPE(RTCVideoFrame) *
videoFrame;
@property(atomic, strong) RTCI420TextureCache *i420TextureCache;
- (void)drawFrame;
@ -38,15 +40,16 @@ static CVReturn OnDisplayLinkFired(CVDisplayLinkRef displayLink,
CVOptionFlags flagsIn,
CVOptionFlags *flagsOut,
void *displayLinkContext) {
RTCNSGLVideoView *view = (__bridge RTCNSGLVideoView *)displayLinkContext;
RTC_OBJC_TYPE(RTCNSGLVideoView) *view =
(__bridge RTC_OBJC_TYPE(RTCNSGLVideoView) *)displayLinkContext;
[view drawFrame];
return kCVReturnSuccess;
}
@implementation RTCNSGLVideoView {
@implementation RTC_OBJC_TYPE (RTCNSGLVideoView) {
CVDisplayLinkRef _displayLink;
RTCVideoFrame *_lastDrawnFrame;
id<RTCVideoViewShading> _shader;
RTC_OBJC_TYPE(RTCVideoFrame) * _lastDrawnFrame;
id<RTC_OBJC_TYPE(RTCVideoViewShading)> _shader;
}
@synthesize delegate = _delegate;
@ -59,7 +62,7 @@ static CVReturn OnDisplayLinkFired(CVDisplayLinkRef displayLink,
- (instancetype)initWithFrame:(NSRect)frame
pixelFormat:(NSOpenGLPixelFormat *)format
shader:(id<RTCVideoViewShading>)shader {
shader:(id<RTC_OBJC_TYPE(RTCVideoViewShading)>)shader {
if (self = [super initWithFrame:frame pixelFormat:format]) {
_shader = shader;
}
@ -105,7 +108,7 @@ static CVReturn OnDisplayLinkFired(CVDisplayLinkRef displayLink,
[super clearGLContext];
}
#pragma mark - RTCVideoRenderer
#pragma mark - RTC_OBJC_TYPE(RTCVideoRenderer)
// These methods may be called on non-main thread.
- (void)setSize:(CGSize)size {
@ -114,14 +117,14 @@ static CVReturn OnDisplayLinkFired(CVDisplayLinkRef displayLink,
});
}
- (void)renderFrame:(RTCVideoFrame *)frame {
- (void)renderFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
self.videoFrame = frame;
}
#pragma mark - Private
- (void)drawFrame {
RTCVideoFrame *frame = self.videoFrame;
RTC_OBJC_TYPE(RTCVideoFrame) *frame = self.videoFrame;
if (!frame || frame == _lastDrawnFrame) {
return;
}

View File

@ -10,7 +10,9 @@
#import <GLKit/GLKit.h>
@class RTCVideoFrame;
#import "base/RTCMacros.h"
@class RTC_OBJC_TYPE(RTCVideoFrame);
NS_ASSUME_NONNULL_BEGIN
@ -22,7 +24,7 @@ NS_ASSUME_NONNULL_BEGIN
- (instancetype)init NS_UNAVAILABLE;
- (nullable instancetype)initWithContext:(EAGLContext *)context NS_DESIGNATED_INITIALIZER;
- (BOOL)uploadFrameToTextures:(RTCVideoFrame *)frame;
- (BOOL)uploadFrameToTextures:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame;
- (void)releaseTextures;

View File

@ -76,10 +76,10 @@
return YES;
}
- (BOOL)uploadFrameToTextures:(RTCVideoFrame *)frame {
NSAssert([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]],
- (BOOL)uploadFrameToTextures:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
NSAssert([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]],
@"frame must be CVPixelBuffer backed");
RTCCVPixelBuffer *rtcPixelBuffer = (RTCCVPixelBuffer *)frame.buffer;
RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer = (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer;
CVPixelBufferRef pixelBuffer = rtcPixelBuffer.pixelBuffer;
return [self loadTexture:&_yTextureRef
pixelBuffer:pixelBuffer

View File

@ -15,19 +15,17 @@
NS_ASSUME_NONNULL_BEGIN
/**
* RTCVideoViewShading provides a way for apps to customize the OpenGL(ES) shaders used in
* rendering for the RTCEAGLVideoView/RTCNSGLVideoView.
* RTCVideoViewShading provides a way for apps to customize the OpenGL(ES shaders
* used in rendering for the RTCEAGLVideoView/RTCNSGLVideoView.
*/
RTC_OBJC_EXPORT
@protocol RTCVideoViewShading <NSObject>
@protocol RTC_OBJC_TYPE
(RTCVideoViewShading)<NSObject>
/** Callback for I420 frames. Each plane is given as a texture. */
- (void)applyShadingForFrameWithWidth:(int)width
height:(int)height
rotation:(RTCVideoRotation)rotation
yPlane:(GLuint)yPlane
uPlane:(GLuint)uPlane
vPlane:(GLuint)vPlane;
/** Callback for I420 frames. Each plane is given as a texture. */
- (void)applyShadingForFrameWithWidth : (int)width height : (int)height rotation
: (RTCVideoRotation)rotation yPlane : (GLuint)yPlane uPlane : (GLuint)uPlane vPlane
: (GLuint)vPlane;
/** Callback for NV12 frames. Each plane is given as a texture. */
- (void)applyShadingForFrameWithWidth:(int)width