MacOS: Add metal renderer and view.

BUG=webrtc:7079

Review-Url: https://codereview.webrtc.org/2778693003
Cr-Commit-Position: refs/heads/master@{#17482}
This commit is contained in:
denicija
2017-03-31 02:47:29 -07:00
committed by Commit bot
parent 515dff40b7
commit 124a6fcddf
6 changed files with 554 additions and 20 deletions

View File

@ -12,6 +12,7 @@
#import <AVFoundation/AVFoundation.h>
#import "WebRTC/RTCMTLNSVideoView.h"
#import "WebRTC/RTCNSGLVideoView.h"
#import "WebRTC/RTCVideoTrack.h"
@ -35,8 +36,8 @@ static NSUInteger const kBottomViewHeight = 200;
@interface APPRTCMainView : NSView
@property(nonatomic, weak) id<APPRTCMainViewDelegate> delegate;
@property(nonatomic, readonly) RTCNSGLVideoView* localVideoView;
@property(nonatomic, readonly) RTCNSGLVideoView* remoteVideoView;
@property(nonatomic, readonly) NSView<RTCVideoRenderer>* localVideoView;
@property(nonatomic, readonly) NSView<RTCVideoRenderer>* remoteVideoView;
- (void)displayLogMessage:(NSString*)message;
@ -169,10 +170,10 @@ static NSUInteger const kBottomViewHeight = 200;
roomString = [NSUUID UUID].UUIDString;
roomString = [roomString stringByReplacingOccurrencesOfString:@"-" withString:@""];
}
[self.delegate appRTCMainView:self
didEnterRoomId:roomString
loopback:_loopbackButton.intValue];
[self setNeedsUpdateConstraints:YES];
}
#pragma mark - RTCNSGLVideoViewDelegate
@ -214,6 +215,16 @@ static NSUInteger const kBottomViewHeight = 200;
[_scrollView setDocumentView:_logView];
[self addSubview:_scrollView];
// NOTE (daniela): Ignoring Clang diagonstic here.
// We're performing run time check to make sure class is available on runtime.
// If not we're providing sensible default.
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wpartial-availability"
if ([RTCMTLNSVideoView class]) {
_remoteVideoView = [[RTCMTLNSVideoView alloc] initWithFrame:NSZeroRect];
_localVideoView = [[RTCMTLNSVideoView alloc] initWithFrame:NSZeroRect];
} else {
NSOpenGLPixelFormatAttribute attributes[] = {
NSOpenGLPFADoubleBuffer,
NSOpenGLPFADepthSize, 24,
@ -223,16 +234,22 @@ static NSUInteger const kBottomViewHeight = 200;
};
NSOpenGLPixelFormat* pixelFormat =
[[NSOpenGLPixelFormat alloc] initWithAttributes:attributes];
_remoteVideoView = [[RTCNSGLVideoView alloc] initWithFrame:NSZeroRect
pixelFormat:pixelFormat];
[_remoteVideoView setTranslatesAutoresizingMaskIntoConstraints:NO];
_remoteVideoView.delegate = self;
[self addSubview:_remoteVideoView];
_localVideoView = [[RTCNSGLVideoView alloc] initWithFrame:NSZeroRect
pixelFormat:pixelFormat];
RTCNSGLVideoView* remote =
[[RTCNSGLVideoView alloc] initWithFrame:NSZeroRect pixelFormat:pixelFormat];
remote.delegate = self;
_remoteVideoView = remote;
RTCNSGLVideoView* local =
[[RTCNSGLVideoView alloc] initWithFrame:NSZeroRect pixelFormat:pixelFormat];
local.delegate = self;
_localVideoView = local;
}
#pragma clang diagnostic pop
[_remoteVideoView setTranslatesAutoresizingMaskIntoConstraints:NO];
[self addSubview:_remoteVideoView];
[_localVideoView setTranslatesAutoresizingMaskIntoConstraints:NO];
_localVideoView.delegate = self;
[self addSubview:_localVideoView];
}

View File

@ -214,12 +214,19 @@ if (is_ios || is_mac) {
if (is_mac) {
sources += [
"objc/Framework/Classes/Metal/RTCMTLI420Renderer.h",
"objc/Framework/Classes/Metal/RTCMTLI420Renderer.mm",
"objc/Framework/Classes/Metal/RTCMTLNSVideoView.m",
"objc/Framework/Classes/RTCNSGLVideoView.m",
"objc/Framework/Headers/WebRTC/RTCMTLNSVideoView.h",
"objc/Framework/Headers/WebRTC/RTCNSGLVideoView.h",
]
libs = [
"CoreVideo.framework",
"CoreMedia.framework",
"OpenGL.framework",
"Metal.framework",
"MetalKit.framework",
]
}

View File

@ -0,0 +1,42 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <Foundation/Foundation.h>
#import <MetalKit/MTKView.h>
#import "WebRTC/RTCVideoFrame.h"
/**
* Protocol defining ability to render RTCVideoFrame in Metal enabled views.
*/
@protocol RTCMTLRenderer <NSObject>
/**
* Method to be implemented to perform actual rendering of the provided frame.
*
* @param frame The frame to be rendered.
*/
- (void)drawFrame:(RTCVideoFrame *)frame;
@end
NS_AVAILABLE_MAC(10.11)
/**
* Implementation of RTCMTLRenderer protocol for rendering native nv12 video frames.
*/
@interface RTCMTLI420Renderer : NSObject <RTCMTLRenderer>
/**
* Sets the provided view as rendering destination if possible.
*
* If not possible method returns NO and callers of the method are responisble for performing
* cleanups.
*/
- (BOOL)addRenderingDestination:(__kindof MTKView *)view;
@end

View File

@ -0,0 +1,336 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCMTLI420Renderer.h"
#import <Metal/Metal.h>
#import <MetalKit/MetalKit.h>
#import "WebRTC/RTCLogging.h"
#import "WebRTC/RTCVideoFrame.h"
#include "webrtc/api/video/video_rotation.h"
#define MTL_STRINGIFY(s) @ #s
// As defined in shaderSource.
static NSString *const vertexFunctionName = @"vertexPassthrough";
static NSString *const fragmentFunctionName = @"fragmentColorConversion";
static NSString *const pipelineDescriptorLabel = @"RTCPipeline";
static NSString *const commandBufferLabel = @"RTCCommandBuffer";
static NSString *const renderEncoderLabel = @"RTCEncoder";
static NSString *const renderEncoderDebugGroup = @"RTCDrawFrame";
static const float cubeVertexData[64] = {
-1.0, -1.0, 0.0, 1.0, 1.0, -1.0, 1.0, 1.0, -1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0,
// rotation = 90, offset = 16.
-1.0, -1.0, 1.0, 1.0, 1.0, -1.0, 1.0, 0.0, -1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0,
// rotation = 180, offset = 32.
-1.0, -1.0, 1.0, 0.0, 1.0, -1.0, 0.0, 0.0, -1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0,
// rotation = 270, offset = 48.
-1.0, -1.0, 0.0, 0.0, 1.0, -1.0, 0.0, 1.0, -1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0,
};
static inline int offsetForRotation(webrtc::VideoRotation rotation) {
switch (rotation) {
case webrtc::kVideoRotation_0:
return 0;
case webrtc::kVideoRotation_90:
return 16;
case webrtc::kVideoRotation_180:
return 32;
case webrtc::kVideoRotation_270:
return 48;
}
return 0;
}
static NSString *const shaderSource = MTL_STRINGIFY(
using namespace metal; typedef struct {
packed_float2 position;
packed_float2 texcoord;
} Vertex;
typedef struct {
float4 position[[position]];
float2 texcoord;
} Varyings;
vertex Varyings vertexPassthrough(device Vertex * verticies[[buffer(0)]],
unsigned int vid[[vertex_id]]) {
Varyings out;
device Vertex &v = verticies[vid];
out.position = float4(float2(v.position), 0.0, 1.0);
out.texcoord = v.texcoord;
return out;
}
fragment half4 fragmentColorConversion(
Varyings in[[stage_in]], texture2d<float, access::sample> textureY[[texture(0)]],
texture2d<float, access::sample> textureU[[texture(1)]],
texture2d<float, access::sample> textureV[[texture(2)]]) {
constexpr sampler s(address::clamp_to_edge, filter::linear);
float y;
float u;
float v;
float r;
float g;
float b;
// Conversion for YUV to rgb from http://www.fourcc.org/fccyvrgb.php
y = textureY.sample(s, in.texcoord).r;
u = textureU.sample(s, in.texcoord).r;
v = textureV.sample(s, in.texcoord).r;
u = u - 0.5;
v = v - 0.5;
r = y + 1.403 * v;
g = y - 0.344 * u - 0.714 * v;
b = y + 1.770 * u;
float4 out = float4(r, g, b, 1.0);
return half4(out);
});
// The max number of command buffers in flight.
// For now setting it up to 1.
// In future we might use triple buffering method if it improves performance.
static const NSInteger kMaxInflightBuffers = 1;
@implementation RTCMTLI420Renderer {
__kindof MTKView *_view;
// Controller.
dispatch_semaphore_t _inflight_semaphore;
// Renderer.
id<MTLDevice> _device;
id<MTLCommandQueue> _commandQueue;
id<MTLLibrary> _defaultLibrary;
id<MTLRenderPipelineState> _pipelineState;
// Textures.
id<MTLTexture> _yTexture;
id<MTLTexture> _uTexture;
id<MTLTexture> _vTexture;
MTLTextureDescriptor *_descriptor;
MTLTextureDescriptor *_chromaDescriptor;
int _width;
int _height;
int _chromaWidth;
int _chromaHeight;
// Buffers.
id<MTLBuffer> _vertexBuffer;
// RTC Frame parameters.
int _offset;
}
- (instancetype)init {
if (self = [super init]) {
// Offset of 0 is equal to rotation of 0.
_offset = 0;
_inflight_semaphore = dispatch_semaphore_create(kMaxInflightBuffers);
}
return self;
}
- (BOOL)addRenderingDestination:(__kindof MTKView *)view {
return [self setupWithView:view];
}
#pragma mark - Private
- (BOOL)setupWithView:(__kindof MTKView *)view {
BOOL success = NO;
if ([self setupMetal]) {
[self setupView:view];
[self loadAssets];
[self setupBuffers];
success = YES;
}
return success;
}
#pragma mark - GPU methods
- (BOOL)setupMetal {
// Set the view to use the default device.
_device = MTLCreateSystemDefaultDevice();
if (!_device) {
return NO;
}
// Create a new command queue.
_commandQueue = [_device newCommandQueue];
// Load metal library from source.
NSError *libraryError = nil;
id<MTLLibrary> sourceLibrary =
[_device newLibraryWithSource:shaderSource options:NULL error:&libraryError];
if (libraryError) {
RTCLogError(@"Metal: Library with source failed\n%@", libraryError);
return NO;
}
if (!sourceLibrary) {
RTCLogError(@"Metal: Failed to load library. %@", libraryError);
return NO;
}
_defaultLibrary = sourceLibrary;
return YES;
}
- (void)setupView:(__kindof MTKView *)view {
view.device = _device;
view.preferredFramesPerSecond = 30;
view.autoResizeDrawable = NO;
// We need to keep reference to the view as it's needed down the rendering pipeline.
_view = view;
}
- (void)loadAssets {
id<MTLFunction> vertexFunction = [_defaultLibrary newFunctionWithName:vertexFunctionName];
id<MTLFunction> fragmentFunction = [_defaultLibrary newFunctionWithName:fragmentFunctionName];
MTLRenderPipelineDescriptor *pipelineDescriptor = [[MTLRenderPipelineDescriptor alloc] init];
pipelineDescriptor.label = pipelineDescriptorLabel;
pipelineDescriptor.vertexFunction = vertexFunction;
pipelineDescriptor.fragmentFunction = fragmentFunction;
pipelineDescriptor.colorAttachments[0].pixelFormat = _view.colorPixelFormat;
pipelineDescriptor.depthAttachmentPixelFormat = MTLPixelFormatInvalid;
NSError *error = nil;
_pipelineState = [_device newRenderPipelineStateWithDescriptor:pipelineDescriptor error:&error];
if (!_pipelineState) {
RTCLogError(@"Metal: Failed to create pipeline state. %@", error);
}
}
- (void)setupBuffers {
_vertexBuffer = [_device newBufferWithBytes:cubeVertexData
length:sizeof(cubeVertexData)
options:MTLStorageModeShared];
}
- (void)render {
dispatch_semaphore_wait(_inflight_semaphore, DISPATCH_TIME_FOREVER);
id<MTLCommandBuffer> commandBuffer = [_commandQueue commandBuffer];
commandBuffer.label = commandBufferLabel;
__block dispatch_semaphore_t block_semaphore = _inflight_semaphore;
[commandBuffer addCompletedHandler:^(id<MTLCommandBuffer> _Nonnull) {
dispatch_semaphore_signal(block_semaphore);
}];
MTLRenderPassDescriptor *_renderPassDescriptor = _view.currentRenderPassDescriptor;
if (_renderPassDescriptor) { // Valid drawable.
id<MTLRenderCommandEncoder> renderEncoder =
[commandBuffer renderCommandEncoderWithDescriptor:_renderPassDescriptor];
renderEncoder.label = renderEncoderLabel;
// Set context state.
[renderEncoder pushDebugGroup:renderEncoderDebugGroup];
[renderEncoder setRenderPipelineState:_pipelineState];
[renderEncoder setVertexBuffer:_vertexBuffer offset:_offset * sizeof(float) atIndex:0];
[renderEncoder setFragmentTexture:_yTexture atIndex:0];
[renderEncoder setFragmentTexture:_uTexture atIndex:1];
[renderEncoder setFragmentTexture:_vTexture atIndex:2];
[renderEncoder drawPrimitives:MTLPrimitiveTypeTriangleStrip
vertexStart:0
vertexCount:4
instanceCount:1];
[renderEncoder popDebugGroup];
[renderEncoder endEncoding];
[commandBuffer presentDrawable:_view.currentDrawable];
}
[commandBuffer commit];
}
#pragma mark - RTCMTLRenderer
- (void)drawFrame:(RTCVideoFrame *)frame {
if (!frame) {
return;
}
if ([self setupTexturesForFrame:frame]) {
@autoreleasepool {
[self render];
}
}
}
- (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame {
// Luma (y) texture.
if (!_descriptor || (_width != frame.width && _height != frame.height)) {
_width = frame.width;
_height = frame.height;
_descriptor = [MTLTextureDescriptor texture2DDescriptorWithPixelFormat:MTLPixelFormatR8Unorm
width:_width
height:_height
mipmapped:NO];
_descriptor.usage = MTLTextureUsageShaderRead;
_yTexture = [_device newTextureWithDescriptor:_descriptor];
}
// Chroma (u,v) textures
[_yTexture replaceRegion:MTLRegionMake2D(0, 0, _width, _height)
mipmapLevel:0
withBytes:frame.dataY
bytesPerRow:frame.strideY];
if (!_chromaDescriptor ||
(_chromaWidth != frame.width / 2 && _chromaHeight != frame.height / 2)) {
_chromaWidth = frame.width / 2;
_chromaHeight = frame.height / 2;
_chromaDescriptor =
[MTLTextureDescriptor texture2DDescriptorWithPixelFormat:MTLPixelFormatR8Unorm
width:_chromaWidth
height:_chromaHeight
mipmapped:NO];
_chromaDescriptor.usage = MTLTextureUsageShaderRead;
_uTexture = [_device newTextureWithDescriptor:_chromaDescriptor];
_vTexture = [_device newTextureWithDescriptor:_chromaDescriptor];
}
[_uTexture replaceRegion:MTLRegionMake2D(0, 0, _chromaWidth, _chromaHeight)
mipmapLevel:0
withBytes:frame.dataU
bytesPerRow:frame.strideU];
[_vTexture replaceRegion:MTLRegionMake2D(0, 0, _chromaWidth, _chromaHeight)
mipmapLevel:0
withBytes:frame.dataV
bytesPerRow:frame.strideV];
_offset = offsetForRotation((webrtc::VideoRotation)frame.rotation);
return (_uTexture != nil) && (_yTexture != nil) && (_vTexture != nil);
}
@end

View File

@ -0,0 +1,115 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCMTLNSVideoView.h"
#import <Metal/Metal.h>
#import <MetalKit/MetalKit.h>
#import "WebRTC/RTCVideoFrame.h"
#import "RTCMTLI420Renderer.h"
@interface RTCMTLNSVideoView () <MTKViewDelegate>
@property(nonatomic) id<RTCMTLRenderer> renderer;
@property(nonatomic, strong) MTKView *metalView;
@property(atomic, strong) RTCVideoFrame *videoFrame;
@end
@implementation RTCMTLNSVideoView {
id<RTCMTLRenderer> _renderer;
}
@synthesize renderer = _renderer;
@synthesize metalView = _metalView;
@synthesize videoFrame = _videoFrame;
- (instancetype)initWithFrame:(CGRect)frameRect {
self = [super initWithFrame:frameRect];
if (self) {
[self configure];
}
return self;
}
- (instancetype)initWithCoder:(NSCoder *)aCoder {
self = [super initWithCoder:aCoder];
if (self) {
[self configure];
}
return self;
}
#pragma mark - Private
+ (BOOL)isMetalAvailable {
return YES;
}
- (void)configure {
if ([[self class] isMetalAvailable]) {
_metalView = [[MTKView alloc] initWithFrame:self.bounds];
[self addSubview:_metalView];
_metalView.layerContentsPlacement = NSViewLayerContentsPlacementScaleProportionallyToFit;
_metalView.translatesAutoresizingMaskIntoConstraints = NO;
_metalView.framebufferOnly = YES;
_metalView.delegate = self;
_renderer = [[RTCMTLI420Renderer alloc] init];
if (![(RTCMTLI420Renderer *)_renderer addRenderingDestination:_metalView]) {
_renderer = nil;
};
}
}
- (void)updateConstraints {
NSDictionary *views = NSDictionaryOfVariableBindings(_metalView);
NSArray *constraintsHorizontal =
[NSLayoutConstraint constraintsWithVisualFormat:@"H:|-0-[_metalView]-0-|"
options:0
metrics:nil
views:views];
[self addConstraints:constraintsHorizontal];
NSArray *constraintsVertical =
[NSLayoutConstraint constraintsWithVisualFormat:@"V:|-0-[_metalView]-0-|"
options:0
metrics:nil
views:views];
[self addConstraints:constraintsVertical];
[super updateConstraints];
}
#pragma mark - MTKViewDelegate methods
- (void)drawInMTKView:(nonnull MTKView *)view {
if (view == self.metalView) {
[_renderer drawFrame:self.videoFrame];
}
}
- (void)mtkView:(MTKView *)view drawableSizeWillChange:(CGSize)size {
}
#pragma mark - RTCVideoRenderer
- (void)setSize:(CGSize)size {
_metalView.drawableSize = size;
[_metalView draw];
}
- (void)renderFrame:(nullable RTCVideoFrame *)frame {
if (frame == nil) {
return;
}
self.videoFrame = [frame newI420VideoFrame];
}
@end

View File

@ -0,0 +1,17 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <Cocoa/Cocoa.h>
#import "WebRTC/RTCVideoRenderer.h"
NS_AVAILABLE_MAC(10.11)
@interface RTCMTLNSVideoView : NSView <RTCVideoRenderer>
@end