Revert "Metal rendering should account for cropping."
This reverts commit fc4a9c933326cac2eb048eb507e63021c75e705e. Reason for revert: Remote video is not showing in a video call. Original change's description: > Metal rendering should account for cropping. > > Also: > - added a rotation override to allow ignoring frame rotation > - fixed a couple of minor issues > - made it possible to run the MTKView without the DisplayLink > > Bug: webrtc:9301 > Change-Id: Ia83c152d9b6d45d56ceb80d287b5d3eacfaebddd > Reviewed-on: https://webrtc-review.googlesource.com/78282 > Reviewed-by: Kári Helgason <kthelgason@webrtc.org> > Reviewed-by: Anders Carlsson <andersc@webrtc.org> > Commit-Queue: Peter Hanspers <peterhanspers@webrtc.org> > Cr-Commit-Position: refs/heads/master@{#23452} TBR=andersc@webrtc.org,kthelgason@webrtc.org,peterhanspers@webrtc.org Change-Id: Iddf7793368531d2d7268c1ec138bb3a9874a4ab7 No-Presubmit: true No-Tree-Checks: true No-Try: true Bug: webrtc:9301 Reviewed-on: https://webrtc-review.googlesource.com/80020 Reviewed-by: JT Teh <jtteh@webrtc.org> Commit-Queue: JT Teh <jtteh@webrtc.org> Cr-Commit-Position: refs/heads/master@{#23455}
This commit is contained in:
@ -43,9 +43,7 @@ static CGFloat const kStatusBarHeight = 20;
|
|||||||
if (self = [super initWithFrame:frame]) {
|
if (self = [super initWithFrame:frame]) {
|
||||||
|
|
||||||
#if defined(RTC_SUPPORTS_METAL)
|
#if defined(RTC_SUPPORTS_METAL)
|
||||||
RTCMTLVideoView *metalView = [[RTCMTLVideoView alloc] initWithFrame:CGRectZero];
|
_remoteVideoView = [[RTCMTLVideoView alloc] initWithFrame:CGRectZero];
|
||||||
metalView.useDisplayLink = NO;
|
|
||||||
_remoteVideoView = metalView;
|
|
||||||
#else
|
#else
|
||||||
RTCEAGLVideoView *remoteView = [[RTCEAGLVideoView alloc] initWithFrame:CGRectZero];
|
RTCEAGLVideoView *remoteView = [[RTCEAGLVideoView alloc] initWithFrame:CGRectZero];
|
||||||
remoteView.delegate = self;
|
remoteView.delegate = self;
|
||||||
|
@ -90,9 +90,7 @@ static NSString *const shaderSource = MTL_STRINGIFY(
|
|||||||
}
|
}
|
||||||
|
|
||||||
- (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame {
|
- (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame {
|
||||||
if (![super setupTexturesForFrame:frame]) {
|
[super setupTexturesForFrame:frame];
|
||||||
return NO;
|
|
||||||
}
|
|
||||||
|
|
||||||
id<MTLDevice> device = [self currentMetalDevice];
|
id<MTLDevice> device = [self currentMetalDevice];
|
||||||
if (!device) {
|
if (!device) {
|
||||||
|
@ -89,9 +89,7 @@ static NSString *const shaderSource = MTL_STRINGIFY(
|
|||||||
|
|
||||||
- (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame {
|
- (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame {
|
||||||
RTC_DCHECK([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]);
|
RTC_DCHECK([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]);
|
||||||
if ([super setupTexturesForFrame:frame] == NO) {
|
[super setupTexturesForFrame:frame];
|
||||||
return NO;
|
|
||||||
}
|
|
||||||
CVPixelBufferRef pixelBuffer = ((RTCCVPixelBuffer *)frame.buffer).pixelBuffer;
|
CVPixelBufferRef pixelBuffer = ((RTCCVPixelBuffer *)frame.buffer).pixelBuffer;
|
||||||
|
|
||||||
id<MTLTexture> lumaTexture = nil;
|
id<MTLTexture> lumaTexture = nil;
|
||||||
|
@ -88,9 +88,7 @@ static NSString *const shaderSource = MTL_STRINGIFY(
|
|||||||
|
|
||||||
- (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame {
|
- (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame {
|
||||||
RTC_DCHECK([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]);
|
RTC_DCHECK([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]);
|
||||||
if ([super setupTexturesForFrame:frame] == NO) {
|
[super setupTexturesForFrame:frame];
|
||||||
return NO;
|
|
||||||
}
|
|
||||||
CVPixelBufferRef pixelBuffer = ((RTCCVPixelBuffer *)frame.buffer).pixelBuffer;
|
CVPixelBufferRef pixelBuffer = ((RTCCVPixelBuffer *)frame.buffer).pixelBuffer;
|
||||||
|
|
||||||
id<MTLTexture> gpuTexture = nil;
|
id<MTLTexture> gpuTexture = nil;
|
||||||
@ -123,9 +121,10 @@ static NSString *const shaderSource = MTL_STRINGIFY(
|
|||||||
|
|
||||||
if (gpuTexture != nil) {
|
if (gpuTexture != nil) {
|
||||||
_texture = gpuTexture;
|
_texture = gpuTexture;
|
||||||
_uniformsBuffer = [[self currentMetalDevice] newBufferWithBytes:&isARGB
|
_uniformsBuffer =
|
||||||
|
[[self currentMetalDevice] newBufferWithBytes:&isARGB
|
||||||
length:sizeof(isARGB)
|
length:sizeof(isARGB)
|
||||||
options:MTLResourceStorageModePrivate];
|
options:MTLResourceCPUCacheModeDefaultCache];
|
||||||
return YES;
|
return YES;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -46,16 +46,10 @@ NS_ASSUME_NONNULL_BEGIN
|
|||||||
@end
|
@end
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Implementation of RTCMTLRenderer protocol.
|
* Implementation of RTCMTLRenderer protocol for rendering native nv12 video frames.
|
||||||
*/
|
*/
|
||||||
NS_AVAILABLE(10_11, 9_0)
|
NS_AVAILABLE(10_11, 9_0)
|
||||||
@interface RTCMTLRenderer : NSObject<RTCMTLRenderer>
|
@interface RTCMTLRenderer : NSObject<RTCMTLRenderer>
|
||||||
|
|
||||||
/** @abstract A wrapped RTCVideoRotation, or nil.
|
|
||||||
@discussion When not nil, the frame rotation is ignored when rendering.
|
|
||||||
*/
|
|
||||||
@property(atomic, nullable) NSValue *rotationOverride;
|
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
||||||
NS_ASSUME_NONNULL_END
|
NS_ASSUME_NONNULL_END
|
||||||
|
@ -15,7 +15,6 @@
|
|||||||
|
|
||||||
#import "WebRTC/RTCLogging.h"
|
#import "WebRTC/RTCLogging.h"
|
||||||
#import "WebRTC/RTCVideoFrame.h"
|
#import "WebRTC/RTCVideoFrame.h"
|
||||||
#import "WebRTC/RTCVideoFrameBuffer.h"
|
|
||||||
|
|
||||||
#include "api/video/video_rotation.h"
|
#include "api/video/video_rotation.h"
|
||||||
#include "rtc_base/checks.h"
|
#include "rtc_base/checks.h"
|
||||||
@ -29,57 +28,31 @@ static NSString *const commandBufferLabel = @"RTCCommandBuffer";
|
|||||||
static NSString *const renderEncoderLabel = @"RTCEncoder";
|
static NSString *const renderEncoderLabel = @"RTCEncoder";
|
||||||
static NSString *const renderEncoderDebugGroup = @"RTCDrawFrame";
|
static NSString *const renderEncoderDebugGroup = @"RTCDrawFrame";
|
||||||
|
|
||||||
// Computes the texture coordinates given rotation and cropping.
|
static const float cubeVertexData[64] = {
|
||||||
static inline void getCubeVertexData(int cropX,
|
-1.0, -1.0, 0.0, 1.0, 1.0, -1.0, 1.0, 1.0, -1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0,
|
||||||
int cropY,
|
|
||||||
int cropWidth,
|
|
||||||
int cropHeight,
|
|
||||||
size_t frameWidth,
|
|
||||||
size_t frameHeight,
|
|
||||||
RTCVideoRotation rotation,
|
|
||||||
float *buffer) {
|
|
||||||
// The computed values are the adjusted texture coordinates, in [0..1].
|
|
||||||
// For the left and top, 0.0 means no cropping and e.g. 0.2 means we're skipping 20% of the
|
|
||||||
// left/top edge.
|
|
||||||
// For the right and bottom, 1.0 means no cropping and e.g. 0.8 means we're skipping 20% of the
|
|
||||||
// right/bottom edge (i.e. keeping 80%).
|
|
||||||
float cropLeft = cropX / (float)frameWidth;
|
|
||||||
float cropRight = (cropX + cropWidth) / (float)frameWidth;
|
|
||||||
float cropTop = cropY / (float)frameHeight;
|
|
||||||
float cropBottom = (cropY + cropHeight) / (float)frameHeight;
|
|
||||||
|
|
||||||
// These arrays map the view coordinates to texture coordinates, taking cropping and rotation
|
// rotation = 90, offset = 16.
|
||||||
// into account. The first two columns are view coordinates, the last two are texture coordinates.
|
-1.0, -1.0, 1.0, 1.0, 1.0, -1.0, 1.0, 0.0, -1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0,
|
||||||
|
|
||||||
|
// rotation = 180, offset = 32.
|
||||||
|
-1.0, -1.0, 1.0, 0.0, 1.0, -1.0, 0.0, 0.0, -1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0,
|
||||||
|
|
||||||
|
// rotation = 270, offset = 48.
|
||||||
|
-1.0, -1.0, 0.0, 0.0, 1.0, -1.0, 0.0, 1.0, -1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0,
|
||||||
|
};
|
||||||
|
|
||||||
|
static inline int offsetForRotation(RTCVideoRotation rotation) {
|
||||||
switch (rotation) {
|
switch (rotation) {
|
||||||
case RTCVideoRotation_0: {
|
case RTCVideoRotation_0:
|
||||||
float values[16] = {-1.0, -1.0, cropLeft, cropBottom,
|
return 0;
|
||||||
1.0, -1.0, cropRight, cropBottom,
|
case RTCVideoRotation_90:
|
||||||
-1.0, 1.0, cropLeft, cropTop,
|
return 16;
|
||||||
1.0, 1.0, cropRight, cropTop};
|
case RTCVideoRotation_180:
|
||||||
memcpy(buffer, &values, sizeof(values));
|
return 32;
|
||||||
} break;
|
case RTCVideoRotation_270:
|
||||||
case RTCVideoRotation_90: {
|
return 48;
|
||||||
float values[16] = {-1.0, -1.0, cropRight, cropBottom,
|
|
||||||
1.0, -1.0, cropRight, cropTop,
|
|
||||||
-1.0, 1.0, cropLeft, cropBottom,
|
|
||||||
1.0, 1.0, cropLeft, cropTop};
|
|
||||||
memcpy(buffer, &values, sizeof(values));
|
|
||||||
} break;
|
|
||||||
case RTCVideoRotation_180: {
|
|
||||||
float values[16] = {-1.0, -1.0, cropRight, cropTop,
|
|
||||||
1.0, -1.0, cropLeft, cropTop,
|
|
||||||
-1.0, 1.0, cropRight, cropBottom,
|
|
||||||
1.0, 1.0, cropLeft, cropBottom};
|
|
||||||
memcpy(buffer, &values, sizeof(values));
|
|
||||||
} break;
|
|
||||||
case RTCVideoRotation_270: {
|
|
||||||
float values[16] = {-1.0, -1.0, cropLeft, cropTop,
|
|
||||||
1.0, -1.0, cropLeft, cropBottom,
|
|
||||||
-1.0, 1.0, cropRight, cropTop,
|
|
||||||
1.0, 1.0, cropRight, cropBottom};
|
|
||||||
memcpy(buffer, &values, sizeof(values));
|
|
||||||
} break;
|
|
||||||
}
|
}
|
||||||
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
// The max number of command buffers in flight (submitted to GPU).
|
// The max number of command buffers in flight (submitted to GPU).
|
||||||
@ -102,20 +75,14 @@ static const NSInteger kMaxInflightBuffers = 1;
|
|||||||
// Buffers.
|
// Buffers.
|
||||||
id<MTLBuffer> _vertexBuffer;
|
id<MTLBuffer> _vertexBuffer;
|
||||||
|
|
||||||
// Values affecting the vertex buffer. Stored for comparison to avoid unnecessary recreation.
|
// RTC Frame parameters.
|
||||||
size_t _oldFrameWidth;
|
int _offset;
|
||||||
size_t _oldFrameHeight;
|
|
||||||
int _oldCropWidth;
|
|
||||||
int _oldCropHeight;
|
|
||||||
int _oldCropX;
|
|
||||||
int _oldCropY;
|
|
||||||
RTCVideoRotation _oldRotation;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@synthesize rotationOverride = _rotationOverride;
|
|
||||||
|
|
||||||
- (instancetype)init {
|
- (instancetype)init {
|
||||||
if (self = [super init]) {
|
if (self = [super init]) {
|
||||||
|
// _offset of 0 is equal to rotation of 0.
|
||||||
|
_offset = 0;
|
||||||
_inflight_semaphore = dispatch_semaphore_create(kMaxInflightBuffers);
|
_inflight_semaphore = dispatch_semaphore_create(kMaxInflightBuffers);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -131,22 +98,13 @@ static const NSInteger kMaxInflightBuffers = 1;
|
|||||||
- (BOOL)setupWithView:(__kindof MTKView *)view {
|
- (BOOL)setupWithView:(__kindof MTKView *)view {
|
||||||
BOOL success = NO;
|
BOOL success = NO;
|
||||||
if ([self setupMetal]) {
|
if ([self setupMetal]) {
|
||||||
_view = view;
|
[self setupView:view];
|
||||||
view.device = _device;
|
|
||||||
view.preferredFramesPerSecond = 30;
|
|
||||||
view.autoResizeDrawable = NO;
|
|
||||||
|
|
||||||
float vertexBufferArray[16] = {0};
|
|
||||||
_vertexBuffer = [_device newBufferWithBytes:vertexBufferArray
|
|
||||||
length:sizeof(vertexBufferArray)
|
|
||||||
options:MTLResourceCPUCacheModeWriteCombined];
|
|
||||||
|
|
||||||
[self loadAssets];
|
[self loadAssets];
|
||||||
|
[self setupBuffers];
|
||||||
success = YES;
|
success = YES;
|
||||||
}
|
}
|
||||||
return success;
|
return success;
|
||||||
}
|
}
|
||||||
|
|
||||||
#pragma mark - Inheritance
|
#pragma mark - Inheritance
|
||||||
|
|
||||||
- (id<MTLDevice>)currentMetalDevice {
|
- (id<MTLDevice>)currentMetalDevice {
|
||||||
@ -163,47 +121,7 @@ static const NSInteger kMaxInflightBuffers = 1;
|
|||||||
}
|
}
|
||||||
|
|
||||||
- (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame {
|
- (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame {
|
||||||
// Apply rotation override if set.
|
_offset = offsetForRotation(frame.rotation);
|
||||||
RTCVideoRotation rotation;
|
|
||||||
NSValue *rotationOverride = self.rotationOverride;
|
|
||||||
if (rotationOverride) {
|
|
||||||
#if defined(__IPHONE_11_0) && (__IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_11_0)
|
|
||||||
if (@available(iOS 11, *)) {
|
|
||||||
[rotationOverride getValue:&rotation size:sizeof(rotation)];
|
|
||||||
} else
|
|
||||||
#endif
|
|
||||||
{
|
|
||||||
[rotationOverride getValue:&rotation];
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
rotation = frame.rotation;
|
|
||||||
}
|
|
||||||
|
|
||||||
RTCCVPixelBuffer *pixelBuffer = (RTCCVPixelBuffer *)frame.buffer;
|
|
||||||
size_t frameWidth = CVPixelBufferGetWidth(pixelBuffer.pixelBuffer);
|
|
||||||
size_t frameHeight = CVPixelBufferGetHeight(pixelBuffer.pixelBuffer);
|
|
||||||
|
|
||||||
// Recompute the texture cropping and recreate vertexBuffer if necessary.
|
|
||||||
if (pixelBuffer.cropX != _oldCropX || pixelBuffer.cropY != _oldCropY ||
|
|
||||||
pixelBuffer.cropWidth != _oldCropWidth || pixelBuffer.cropHeight != _oldCropHeight ||
|
|
||||||
rotation != _oldRotation || frameWidth != _oldFrameWidth || frameHeight != _oldFrameHeight) {
|
|
||||||
getCubeVertexData(pixelBuffer.cropX,
|
|
||||||
pixelBuffer.cropY,
|
|
||||||
pixelBuffer.cropWidth,
|
|
||||||
pixelBuffer.cropHeight,
|
|
||||||
frameWidth,
|
|
||||||
frameHeight,
|
|
||||||
rotation,
|
|
||||||
(float *)_vertexBuffer.contents);
|
|
||||||
_oldCropX = pixelBuffer.cropX;
|
|
||||||
_oldCropY = pixelBuffer.cropY;
|
|
||||||
_oldCropWidth = pixelBuffer.cropWidth;
|
|
||||||
_oldCropHeight = pixelBuffer.cropHeight;
|
|
||||||
_oldRotation = rotation;
|
|
||||||
_oldFrameWidth = frameWidth;
|
|
||||||
_oldFrameHeight = frameHeight;
|
|
||||||
}
|
|
||||||
|
|
||||||
return YES;
|
return YES;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -240,6 +158,16 @@ static const NSInteger kMaxInflightBuffers = 1;
|
|||||||
return YES;
|
return YES;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
- (void)setupView:(__kindof MTKView *)view {
|
||||||
|
view.device = _device;
|
||||||
|
|
||||||
|
view.preferredFramesPerSecond = 30;
|
||||||
|
view.autoResizeDrawable = NO;
|
||||||
|
|
||||||
|
// We need to keep reference to the view as it's needed down the rendering pipeline.
|
||||||
|
_view = view;
|
||||||
|
}
|
||||||
|
|
||||||
- (void)loadAssets {
|
- (void)loadAssets {
|
||||||
id<MTLFunction> vertexFunction = [_defaultLibrary newFunctionWithName:vertexFunctionName];
|
id<MTLFunction> vertexFunction = [_defaultLibrary newFunctionWithName:vertexFunctionName];
|
||||||
id<MTLFunction> fragmentFunction = [_defaultLibrary newFunctionWithName:fragmentFunctionName];
|
id<MTLFunction> fragmentFunction = [_defaultLibrary newFunctionWithName:fragmentFunctionName];
|
||||||
@ -258,6 +186,12 @@ static const NSInteger kMaxInflightBuffers = 1;
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
- (void)setupBuffers {
|
||||||
|
_vertexBuffer = [_device newBufferWithBytes:cubeVertexData
|
||||||
|
length:sizeof(cubeVertexData)
|
||||||
|
options:MTLResourceOptionCPUCacheModeDefault];
|
||||||
|
}
|
||||||
|
|
||||||
- (void)render {
|
- (void)render {
|
||||||
// Wait until the inflight (curently sent to GPU) command buffer
|
// Wait until the inflight (curently sent to GPU) command buffer
|
||||||
// has completed the GPU work.
|
// has completed the GPU work.
|
||||||
@ -281,8 +215,7 @@ static const NSInteger kMaxInflightBuffers = 1;
|
|||||||
// Set context state.
|
// Set context state.
|
||||||
[renderEncoder pushDebugGroup:renderEncoderDebugGroup];
|
[renderEncoder pushDebugGroup:renderEncoderDebugGroup];
|
||||||
[renderEncoder setRenderPipelineState:_pipelineState];
|
[renderEncoder setRenderPipelineState:_pipelineState];
|
||||||
|
[renderEncoder setVertexBuffer:_vertexBuffer offset:_offset * sizeof(float) atIndex:0];
|
||||||
[renderEncoder setVertexBuffer:_vertexBuffer offset:0 atIndex:0];
|
|
||||||
[self uploadTexturesToRenderEncoder:renderEncoder];
|
[self uploadTexturesToRenderEncoder:renderEncoder];
|
||||||
|
|
||||||
[renderEncoder drawPrimitives:MTLPrimitiveTypeTriangleStrip
|
[renderEncoder drawPrimitives:MTLPrimitiveTypeTriangleStrip
|
||||||
|
@ -29,16 +29,17 @@
|
|||||||
#define RTCMTLRGBRendererClass NSClassFromString(@"RTCMTLRGBRenderer")
|
#define RTCMTLRGBRendererClass NSClassFromString(@"RTCMTLRGBRenderer")
|
||||||
|
|
||||||
@interface RTCMTLVideoView () <MTKViewDelegate>
|
@interface RTCMTLVideoView () <MTKViewDelegate>
|
||||||
@property(nonatomic) RTCMTLI420Renderer *rendererI420;
|
@property(nonatomic, strong) RTCMTLI420Renderer *rendererI420;
|
||||||
@property(nonatomic) RTCMTLNV12Renderer *rendererNV12;
|
@property(nonatomic, strong) RTCMTLNV12Renderer *rendererNV12;
|
||||||
@property(nonatomic) RTCMTLRGBRenderer *rendererRGB;
|
@property(nonatomic, strong) RTCMTLRGBRenderer *rendererRGB;
|
||||||
@property(nonatomic) MTKView *metalView;
|
@property(nonatomic, strong) MTKView *metalView;
|
||||||
@property(atomic) RTCVideoFrame *videoFrame;
|
@property(atomic, strong) RTCVideoFrame *videoFrame;
|
||||||
@property(nonatomic) CGSize videoFrameSize;
|
|
||||||
@property(nonatomic) int64_t lastFrameTimeNs;
|
|
||||||
@end
|
@end
|
||||||
|
|
||||||
@implementation RTCMTLVideoView
|
@implementation RTCMTLVideoView {
|
||||||
|
int64_t _lastFrameTimeNs;
|
||||||
|
CGSize _videoFrameSize;
|
||||||
|
}
|
||||||
|
|
||||||
@synthesize delegate = _delegate;
|
@synthesize delegate = _delegate;
|
||||||
@synthesize rendererI420 = _rendererI420;
|
@synthesize rendererI420 = _rendererI420;
|
||||||
@ -46,11 +47,6 @@
|
|||||||
@synthesize rendererRGB = _rendererRGB;
|
@synthesize rendererRGB = _rendererRGB;
|
||||||
@synthesize metalView = _metalView;
|
@synthesize metalView = _metalView;
|
||||||
@synthesize videoFrame = _videoFrame;
|
@synthesize videoFrame = _videoFrame;
|
||||||
@synthesize useDisplayLink = _useDisplayLink;
|
|
||||||
@synthesize videoFrameSize = _videoFrameSize;
|
|
||||||
@synthesize lastFrameTimeNs = _lastFrameTimeNs;
|
|
||||||
@synthesize enabled = _enabled;
|
|
||||||
@synthesize rotationOverride = _rotationOverride;
|
|
||||||
|
|
||||||
- (instancetype)initWithFrame:(CGRect)frameRect {
|
- (instancetype)initWithFrame:(CGRect)frameRect {
|
||||||
self = [super initWithFrame:frameRect];
|
self = [super initWithFrame:frameRect];
|
||||||
@ -68,36 +64,8 @@
|
|||||||
return self;
|
return self;
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)setUseDisplayLink:(BOOL)useDisplayLink {
|
|
||||||
_useDisplayLink = useDisplayLink;
|
|
||||||
[self updateRunningState];
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)setEnabled:(BOOL)enabled {
|
|
||||||
_enabled = enabled;
|
|
||||||
[self updateRunningState];
|
|
||||||
}
|
|
||||||
|
|
||||||
- (UIViewContentMode)videoContentMode {
|
|
||||||
return self.metalView.contentMode;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)setVideoContentMode:(UIViewContentMode)mode {
|
|
||||||
self.metalView.contentMode = mode;
|
|
||||||
}
|
|
||||||
|
|
||||||
#pragma mark - Private
|
#pragma mark - Private
|
||||||
|
|
||||||
- (void)updateRunningState {
|
|
||||||
if (self.useDisplayLink) {
|
|
||||||
self.metalView.paused = !self.enabled;
|
|
||||||
self.metalView.enableSetNeedsDisplay = YES;
|
|
||||||
} else {
|
|
||||||
self.metalView.paused = YES;
|
|
||||||
self.metalView.enableSetNeedsDisplay = NO;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
+ (BOOL)isMetalAvailable {
|
+ (BOOL)isMetalAvailable {
|
||||||
#if defined(RTC_SUPPORTS_METAL)
|
#if defined(RTC_SUPPORTS_METAL)
|
||||||
return MTLCreateSystemDefaultDevice() != nil;
|
return MTLCreateSystemDefaultDevice() != nil;
|
||||||
@ -106,6 +74,11 @@
|
|||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
|
+ (MTKView *)createMetalView:(CGRect)frame {
|
||||||
|
MTKView *view = [[MTKViewClass alloc] initWithFrame:frame];
|
||||||
|
return view;
|
||||||
|
}
|
||||||
|
|
||||||
+ (RTCMTLNV12Renderer *)createNV12Renderer {
|
+ (RTCMTLNV12Renderer *)createNV12Renderer {
|
||||||
return [[RTCMTLNV12RendererClass alloc] init];
|
return [[RTCMTLNV12RendererClass alloc] init];
|
||||||
}
|
}
|
||||||
@ -121,28 +94,33 @@
|
|||||||
- (void)configure {
|
- (void)configure {
|
||||||
NSAssert([RTCMTLVideoView isMetalAvailable], @"Metal not availiable on this device");
|
NSAssert([RTCMTLVideoView isMetalAvailable], @"Metal not availiable on this device");
|
||||||
|
|
||||||
_enabled = YES;
|
_metalView = [RTCMTLVideoView createMetalView:self.bounds];
|
||||||
_useDisplayLink = YES;
|
[self configureMetalView];
|
||||||
[self updateRunningState];
|
|
||||||
|
|
||||||
self.metalView = [[MTKViewClass alloc] initWithFrame:self.bounds];
|
|
||||||
self.metalView.delegate = self;
|
|
||||||
self.metalView.paused = YES;
|
|
||||||
self.metalView.enableSetNeedsDisplay = NO;
|
|
||||||
self.metalView.contentMode = UIViewContentModeScaleAspectFill;
|
|
||||||
[self addSubview:self.metalView];
|
|
||||||
self.videoFrameSize = CGSizeZero;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
- (void)configureMetalView {
|
||||||
|
if (_metalView) {
|
||||||
|
_metalView.delegate = self;
|
||||||
|
[self addSubview:_metalView];
|
||||||
|
_metalView.contentMode = UIViewContentModeScaleAspectFit;
|
||||||
|
_videoFrameSize = CGSizeZero;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)setVideoContentMode:(UIViewContentMode)mode {
|
||||||
|
_metalView.contentMode = mode;
|
||||||
|
}
|
||||||
|
|
||||||
|
#pragma mark - Private
|
||||||
|
|
||||||
- (void)layoutSubviews {
|
- (void)layoutSubviews {
|
||||||
[super layoutSubviews];
|
[super layoutSubviews];
|
||||||
|
|
||||||
CGRect bounds = self.bounds;
|
CGRect bounds = self.bounds;
|
||||||
self.metalView.frame = bounds;
|
_metalView.frame = bounds;
|
||||||
if (!CGSizeEqualToSize(self.videoFrameSize, CGSizeZero)) {
|
if (!CGSizeEqualToSize(_videoFrameSize, CGSizeZero)) {
|
||||||
self.metalView.drawableSize = [self drawableSize];
|
_metalView.drawableSize = _videoFrameSize;
|
||||||
} else {
|
} else {
|
||||||
self.metalView.drawableSize = bounds.size;
|
_metalView.drawableSize = bounds.size;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -152,11 +130,10 @@
|
|||||||
NSAssert(view == self.metalView, @"Receiving draw callbacks from foreign instance.");
|
NSAssert(view == self.metalView, @"Receiving draw callbacks from foreign instance.");
|
||||||
RTCVideoFrame *videoFrame = self.videoFrame;
|
RTCVideoFrame *videoFrame = self.videoFrame;
|
||||||
// Skip rendering if we've already rendered this frame.
|
// Skip rendering if we've already rendered this frame.
|
||||||
if (!videoFrame || videoFrame.timeStampNs == self.lastFrameTimeNs) {
|
if (!videoFrame || videoFrame.timeStampNs == _lastFrameTimeNs) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
RTCMTLRenderer *renderer;
|
|
||||||
if ([videoFrame.buffer isKindOfClass:[RTCCVPixelBuffer class]]) {
|
if ([videoFrame.buffer isKindOfClass:[RTCCVPixelBuffer class]]) {
|
||||||
RTCCVPixelBuffer *buffer = (RTCCVPixelBuffer*)videoFrame.buffer;
|
RTCCVPixelBuffer *buffer = (RTCCVPixelBuffer*)videoFrame.buffer;
|
||||||
const OSType pixelFormat = CVPixelBufferGetPixelFormatType(buffer.pixelBuffer);
|
const OSType pixelFormat = CVPixelBufferGetPixelFormatType(buffer.pixelBuffer);
|
||||||
@ -169,7 +146,7 @@
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
renderer = self.rendererRGB;
|
[self.rendererRGB drawFrame:videoFrame];
|
||||||
} else {
|
} else {
|
||||||
if (!self.rendererNV12) {
|
if (!self.rendererNV12) {
|
||||||
self.rendererNV12 = [RTCMTLVideoView createNV12Renderer];
|
self.rendererNV12 = [RTCMTLVideoView createNV12Renderer];
|
||||||
@ -179,7 +156,7 @@
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
renderer = self.rendererNV12;
|
[self.rendererNV12 drawFrame:videoFrame];
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if (!self.rendererI420) {
|
if (!self.rendererI420) {
|
||||||
@ -190,82 +167,30 @@
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
renderer = self.rendererI420;
|
[self.rendererI420 drawFrame:videoFrame];
|
||||||
}
|
}
|
||||||
|
_lastFrameTimeNs = videoFrame.timeStampNs;
|
||||||
renderer.rotationOverride = self.rotationOverride;
|
|
||||||
|
|
||||||
[renderer drawFrame:videoFrame];
|
|
||||||
self.lastFrameTimeNs = videoFrame.timeStampNs;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)mtkView:(MTKView *)view drawableSizeWillChange:(CGSize)size {
|
- (void)mtkView:(MTKView *)view drawableSizeWillChange:(CGSize)size {
|
||||||
}
|
}
|
||||||
|
|
||||||
- (RTCVideoRotation)frameRotation {
|
|
||||||
if (self.rotationOverride) {
|
|
||||||
RTCVideoRotation rotation;
|
|
||||||
#if defined(__IPHONE_11_0) && (__IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_11_0)
|
|
||||||
if (@available(iOS 11, *)) {
|
|
||||||
[self.rotationOverride getValue:&rotation size:sizeof(rotation)];
|
|
||||||
} else
|
|
||||||
#endif
|
|
||||||
{
|
|
||||||
[self.rotationOverride getValue:&rotation];
|
|
||||||
}
|
|
||||||
return rotation;
|
|
||||||
}
|
|
||||||
|
|
||||||
return self.videoFrame.rotation;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (CGSize)drawableSize {
|
|
||||||
// Flip width/height if the rotations are not the same.
|
|
||||||
CGSize videoFrameSize = self.videoFrameSize;
|
|
||||||
|
|
||||||
BOOL useLandscape = ([self frameRotation] == RTCVideoRotation_0) ||
|
|
||||||
([self frameRotation] == RTCVideoRotation_180);
|
|
||||||
BOOL sizeIsLandscape = (self.videoFrame.rotation == RTCVideoRotation_0) ||
|
|
||||||
(self.videoFrame.rotation == RTCVideoRotation_180);
|
|
||||||
|
|
||||||
if (useLandscape == sizeIsLandscape) {
|
|
||||||
return videoFrameSize;
|
|
||||||
} else {
|
|
||||||
return CGSizeMake(videoFrameSize.height, videoFrameSize.width);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#pragma mark - RTCVideoRenderer
|
#pragma mark - RTCVideoRenderer
|
||||||
|
|
||||||
- (void)setSize:(CGSize)size {
|
- (void)setSize:(CGSize)size {
|
||||||
__weak RTCMTLVideoView *weakSelf = self;
|
self.metalView.drawableSize = size;
|
||||||
dispatch_async(dispatch_get_main_queue(), ^{
|
dispatch_async(dispatch_get_main_queue(), ^{
|
||||||
RTCMTLVideoView *strongSelf = weakSelf;
|
_videoFrameSize = size;
|
||||||
|
[self.delegate videoView:self didChangeVideoSize:size];
|
||||||
strongSelf.videoFrameSize = size;
|
|
||||||
CGSize drawableSize = [strongSelf drawableSize];
|
|
||||||
|
|
||||||
strongSelf.metalView.drawableSize = drawableSize;
|
|
||||||
[strongSelf setNeedsLayout];
|
|
||||||
[strongSelf.delegate videoView:self didChangeVideoSize:size];
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)renderFrame:(nullable RTCVideoFrame *)frame {
|
- (void)renderFrame:(nullable RTCVideoFrame *)frame {
|
||||||
if (!self.isEnabled) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (frame == nil) {
|
if (frame == nil) {
|
||||||
RTCLogInfo(@"Incoming frame is nil. Exiting render callback.");
|
RTCLogInfo(@"Incoming frame is nil. Exiting render callback.");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
self.videoFrame = frame;
|
self.videoFrame = frame;
|
||||||
|
|
||||||
if (!self.useDisplayLink) {
|
|
||||||
[self.metalView draw];
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
@ -10,7 +10,6 @@
|
|||||||
|
|
||||||
#import <Foundation/Foundation.h>
|
#import <Foundation/Foundation.h>
|
||||||
|
|
||||||
#import "WebRTC/RTCVideoFrame.h"
|
|
||||||
#import "WebRTC/RTCVideoRenderer.h"
|
#import "WebRTC/RTCVideoRenderer.h"
|
||||||
|
|
||||||
// Check if metal is supported in WebRTC.
|
// Check if metal is supported in WebRTC.
|
||||||
@ -36,21 +35,7 @@ RTC_EXPORT
|
|||||||
|
|
||||||
@property(nonatomic, weak) id<RTCVideoViewDelegate> delegate;
|
@property(nonatomic, weak) id<RTCVideoViewDelegate> delegate;
|
||||||
|
|
||||||
@property(nonatomic) UIViewContentMode videoContentMode;
|
- (void)setVideoContentMode:(UIViewContentMode)mode;
|
||||||
|
|
||||||
/** @abstract Enables/disables rendering.
|
|
||||||
*/
|
|
||||||
@property(nonatomic, getter=isEnabled) BOOL enabled;
|
|
||||||
|
|
||||||
/** @abstract If YES, the backing MTKView will use a display link to issue
|
|
||||||
draw calls.
|
|
||||||
@discussion Default is YES.
|
|
||||||
*/
|
|
||||||
@property(nonatomic) BOOL useDisplayLink;
|
|
||||||
|
|
||||||
/** @abstract Wrapped RTCVideoRotation, or nil.
|
|
||||||
*/
|
|
||||||
@property(nullable) NSValue* rotationOverride;
|
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user