iOS: Add support for rendering native CVPixelBuffers directly
This CL adds support in RTCEAGLVideoView for rendering CVPixelBuffers as OpenGL ES textures directly, compared to the current code that first converts the CVPixelBuffers to I420, and then reuploads them as textures. This is only supported on iOS with the use of a CVOpenGLESTextureCache. The I420 rendering and native rendering are separated in two different implementations of a simple shader interface: @protocol Shader - (BOOL)drawFrame:(RTCVideoFrame*)frame; @end GL resources are allocated when the shader is instantiated and released when the shader is destroyed. RTCEAGLVideoView will lazily instantiate the necessary shader when it receives the first frame of that kind. This is primarily done to avoid allocating GL resources for both I420 and native rendering. Some other changes are: - Print GL shader compilation errors. - Remove updateTextureSizesForFrame() function. The textures will resize automatically anyway when the texture data is uploaded with glTexImage2D(). patch from issue 2154243002 at patchset 140001 (http://crrev.com/2154243002#ps140001) Continuing magjed@'s work since he is OOO this week. BUG= Review-Url: https://codereview.webrtc.org/2202823004 Cr-Commit-Position: refs/heads/master@{#13668}
This commit is contained in:
@ -81,6 +81,7 @@ if (is_ios || (is_mac && mac_deployment_target == "10.7")) {
|
|||||||
"objc/Framework/Classes/RTCDataChannel.mm",
|
"objc/Framework/Classes/RTCDataChannel.mm",
|
||||||
"objc/Framework/Classes/RTCDataChannelConfiguration+Private.h",
|
"objc/Framework/Classes/RTCDataChannelConfiguration+Private.h",
|
||||||
"objc/Framework/Classes/RTCDataChannelConfiguration.mm",
|
"objc/Framework/Classes/RTCDataChannelConfiguration.mm",
|
||||||
|
"objc/Framework/Classes/RTCI420Shader.mm",
|
||||||
"objc/Framework/Classes/RTCIceCandidate+Private.h",
|
"objc/Framework/Classes/RTCIceCandidate+Private.h",
|
||||||
"objc/Framework/Classes/RTCIceCandidate.mm",
|
"objc/Framework/Classes/RTCIceCandidate.mm",
|
||||||
"objc/Framework/Classes/RTCIceServer+Private.h",
|
"objc/Framework/Classes/RTCIceServer+Private.h",
|
||||||
@ -91,6 +92,7 @@ if (is_ios || (is_mac && mac_deployment_target == "10.7")) {
|
|||||||
"objc/Framework/Classes/RTCMediaStream.mm",
|
"objc/Framework/Classes/RTCMediaStream.mm",
|
||||||
"objc/Framework/Classes/RTCMediaStreamTrack+Private.h",
|
"objc/Framework/Classes/RTCMediaStreamTrack+Private.h",
|
||||||
"objc/Framework/Classes/RTCMediaStreamTrack.mm",
|
"objc/Framework/Classes/RTCMediaStreamTrack.mm",
|
||||||
|
"objc/Framework/Classes/RTCOpenGLDefines.h",
|
||||||
"objc/Framework/Classes/RTCOpenGLVideoRenderer.h",
|
"objc/Framework/Classes/RTCOpenGLVideoRenderer.h",
|
||||||
"objc/Framework/Classes/RTCOpenGLVideoRenderer.mm",
|
"objc/Framework/Classes/RTCOpenGLVideoRenderer.mm",
|
||||||
"objc/Framework/Classes/RTCPeerConnection+DataChannel.mm",
|
"objc/Framework/Classes/RTCPeerConnection+DataChannel.mm",
|
||||||
@ -111,6 +113,9 @@ if (is_ios || (is_mac && mac_deployment_target == "10.7")) {
|
|||||||
"objc/Framework/Classes/RTCRtpSender.mm",
|
"objc/Framework/Classes/RTCRtpSender.mm",
|
||||||
"objc/Framework/Classes/RTCSessionDescription+Private.h",
|
"objc/Framework/Classes/RTCSessionDescription+Private.h",
|
||||||
"objc/Framework/Classes/RTCSessionDescription.mm",
|
"objc/Framework/Classes/RTCSessionDescription.mm",
|
||||||
|
"objc/Framework/Classes/RTCShader+Private.h",
|
||||||
|
"objc/Framework/Classes/RTCShader.h",
|
||||||
|
"objc/Framework/Classes/RTCShader.mm",
|
||||||
"objc/Framework/Classes/RTCStatsReport+Private.h",
|
"objc/Framework/Classes/RTCStatsReport+Private.h",
|
||||||
"objc/Framework/Classes/RTCStatsReport.mm",
|
"objc/Framework/Classes/RTCStatsReport.mm",
|
||||||
"objc/Framework/Classes/RTCVideoFrame+Private.h",
|
"objc/Framework/Classes/RTCVideoFrame+Private.h",
|
||||||
@ -152,6 +157,7 @@ if (is_ios || (is_mac && mac_deployment_target == "10.7")) {
|
|||||||
if (is_ios) {
|
if (is_ios) {
|
||||||
sources += [
|
sources += [
|
||||||
"objc/Framework/Classes/RTCEAGLVideoView.m",
|
"objc/Framework/Classes/RTCEAGLVideoView.m",
|
||||||
|
"objc/Framework/Classes/RTCNativeNV12Shader.mm",
|
||||||
"objc/Framework/Headers/WebRTC/RTCEAGLVideoView.h",
|
"objc/Framework/Headers/WebRTC/RTCEAGLVideoView.h",
|
||||||
]
|
]
|
||||||
libs = [
|
libs = [
|
||||||
|
|||||||
@ -210,10 +210,12 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
- (void)renderFrame:(RTCVideoFrame *)frame {
|
- (void)renderFrame:(RTCVideoFrame *)frame {
|
||||||
|
#if !TARGET_OS_IPHONE
|
||||||
// Generate the i420 frame on video send thread instead of main thread.
|
// Generate the i420 frame on video send thread instead of main thread.
|
||||||
// TODO(tkchin): Remove this once RTCEAGLVideoView supports uploading
|
// TODO(tkchin): Remove this once RTCEAGLVideoView supports uploading
|
||||||
// CVPixelBuffer textures.
|
// CVPixelBuffer textures on OSX.
|
||||||
[frame convertBufferIfNeeded];
|
[frame convertBufferIfNeeded];
|
||||||
|
#endif
|
||||||
self.videoFrame = frame;
|
self.videoFrame = frame;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
211
webrtc/sdk/objc/Framework/Classes/RTCI420Shader.mm
Normal file
211
webrtc/sdk/objc/Framework/Classes/RTCI420Shader.mm
Normal file
@ -0,0 +1,211 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||||
|
*
|
||||||
|
* Use of this source code is governed by a BSD-style license
|
||||||
|
* that can be found in the LICENSE file in the root of the source
|
||||||
|
* tree. An additional intellectual property rights grant can be found
|
||||||
|
* in the file PATENTS. All contributing project authors may
|
||||||
|
* be found in the AUTHORS file in the root of the source tree.
|
||||||
|
*/
|
||||||
|
|
||||||
|
#import "RTCShader.h"
|
||||||
|
|
||||||
|
#include <vector>
|
||||||
|
|
||||||
|
#import "RTCShader+Private.h"
|
||||||
|
#import "WebRTC/RTCVideoFrame.h"
|
||||||
|
|
||||||
|
// |kNumTextures| must not exceed 8, which is the limit in OpenGLES2. Two sets
|
||||||
|
// of 3 textures are used here, one for each of the Y, U and V planes. Having
|
||||||
|
// two sets alleviates CPU blockage in the event that the GPU is asked to render
|
||||||
|
// to a texture that is already in use.
|
||||||
|
static const GLsizei kNumTextureSets = 2;
|
||||||
|
static const GLsizei kNumTexturesPerSet = 3;
|
||||||
|
static const GLsizei kNumTextures = kNumTexturesPerSet * kNumTextureSets;
|
||||||
|
|
||||||
|
// Fragment shader converts YUV values from input textures into a final RGB
|
||||||
|
// pixel. The conversion formula is from http://www.fourcc.org/fccyvrgb.php.
|
||||||
|
static const char kI420FragmentShaderSource[] =
|
||||||
|
SHADER_VERSION
|
||||||
|
"precision highp float;"
|
||||||
|
FRAGMENT_SHADER_IN " vec2 v_texcoord;\n"
|
||||||
|
"uniform lowp sampler2D s_textureY;\n"
|
||||||
|
"uniform lowp sampler2D s_textureU;\n"
|
||||||
|
"uniform lowp sampler2D s_textureV;\n"
|
||||||
|
FRAGMENT_SHADER_OUT
|
||||||
|
"void main() {\n"
|
||||||
|
" float y, u, v, r, g, b;\n"
|
||||||
|
" y = " FRAGMENT_SHADER_TEXTURE "(s_textureY, v_texcoord).r;\n"
|
||||||
|
" u = " FRAGMENT_SHADER_TEXTURE "(s_textureU, v_texcoord).r;\n"
|
||||||
|
" v = " FRAGMENT_SHADER_TEXTURE "(s_textureV, v_texcoord).r;\n"
|
||||||
|
" u = u - 0.5;\n"
|
||||||
|
" v = v - 0.5;\n"
|
||||||
|
" r = y + 1.403 * v;\n"
|
||||||
|
" g = y - 0.344 * u - 0.714 * v;\n"
|
||||||
|
" b = y + 1.770 * u;\n"
|
||||||
|
" " FRAGMENT_SHADER_COLOR " = vec4(r, g, b, 1.0);\n"
|
||||||
|
" }\n";
|
||||||
|
|
||||||
|
@implementation RTCI420Shader {
|
||||||
|
BOOL _hasUnpackRowLength;
|
||||||
|
GLint _currentTextureSet;
|
||||||
|
// Handles for OpenGL constructs.
|
||||||
|
GLuint _textures[kNumTextures];
|
||||||
|
GLuint _i420Program;
|
||||||
|
GLuint _vertexArray;
|
||||||
|
GLuint _vertexBuffer;
|
||||||
|
GLint _ySampler;
|
||||||
|
GLint _uSampler;
|
||||||
|
GLint _vSampler;
|
||||||
|
// Used to create a non-padded plane for GPU upload when we receive padded
|
||||||
|
// frames.
|
||||||
|
std::vector<uint8_t> _planeBuffer;
|
||||||
|
}
|
||||||
|
|
||||||
|
- (instancetype)initWithContext:(GlContextType *)context {
|
||||||
|
if (self = [super init]) {
|
||||||
|
#if TARGET_OS_IPHONE
|
||||||
|
_hasUnpackRowLength = (context.API == kEAGLRenderingAPIOpenGLES3);
|
||||||
|
#else
|
||||||
|
_hasUnpackRowLength = YES;
|
||||||
|
#endif
|
||||||
|
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
|
||||||
|
if (![self setupI420Program] || ![self setupTextures] ||
|
||||||
|
!RTCSetupVerticesForProgram(_i420Program, &_vertexBuffer, &_vertexArray)) {
|
||||||
|
self = nil;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return self;
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)dealloc {
|
||||||
|
glDeleteProgram(_i420Program);
|
||||||
|
glDeleteTextures(kNumTextures, _textures);
|
||||||
|
glDeleteBuffers(1, &_vertexBuffer);
|
||||||
|
glDeleteVertexArrays(1, &_vertexArray);
|
||||||
|
}
|
||||||
|
|
||||||
|
- (BOOL)setupI420Program {
|
||||||
|
_i420Program = RTCCreateProgramFromFragmentSource(kI420FragmentShaderSource);
|
||||||
|
if (!_i420Program) {
|
||||||
|
return NO;
|
||||||
|
}
|
||||||
|
_ySampler = glGetUniformLocation(_i420Program, "s_textureY");
|
||||||
|
_uSampler = glGetUniformLocation(_i420Program, "s_textureU");
|
||||||
|
_vSampler = glGetUniformLocation(_i420Program, "s_textureV");
|
||||||
|
|
||||||
|
return (_ySampler >= 0 && _uSampler >= 0 && _vSampler >= 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
- (BOOL)setupTextures {
|
||||||
|
glGenTextures(kNumTextures, _textures);
|
||||||
|
// Set parameters for each of the textures we created.
|
||||||
|
for (GLsizei i = 0; i < kNumTextures; i++) {
|
||||||
|
glBindTexture(GL_TEXTURE_2D, _textures[i]);
|
||||||
|
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
||||||
|
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||||
|
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
||||||
|
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
||||||
|
}
|
||||||
|
return YES;
|
||||||
|
}
|
||||||
|
|
||||||
|
- (BOOL)drawFrame:(RTCVideoFrame*)frame {
|
||||||
|
glUseProgram(_i420Program);
|
||||||
|
if (![self updateTextureDataForFrame:frame]) {
|
||||||
|
return NO;
|
||||||
|
}
|
||||||
|
#if !TARGET_OS_IPHONE
|
||||||
|
glBindVertexArray(_vertexArray);
|
||||||
|
#endif
|
||||||
|
glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer);
|
||||||
|
glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
|
||||||
|
|
||||||
|
return YES;
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)uploadPlane:(const uint8_t *)plane
|
||||||
|
sampler:(GLint)sampler
|
||||||
|
offset:(GLint)offset
|
||||||
|
width:(size_t)width
|
||||||
|
height:(size_t)height
|
||||||
|
stride:(int32_t)stride {
|
||||||
|
glActiveTexture(static_cast<GLenum>(GL_TEXTURE0 + offset));
|
||||||
|
glBindTexture(GL_TEXTURE_2D, _textures[offset]);
|
||||||
|
|
||||||
|
// When setting texture sampler uniforms, the texture index is used not
|
||||||
|
// the texture handle.
|
||||||
|
glUniform1i(sampler, offset);
|
||||||
|
const uint8_t *uploadPlane = plane;
|
||||||
|
if ((size_t)stride != width) {
|
||||||
|
if (_hasUnpackRowLength) {
|
||||||
|
// GLES3 allows us to specify stride.
|
||||||
|
glPixelStorei(GL_UNPACK_ROW_LENGTH, stride);
|
||||||
|
glTexImage2D(GL_TEXTURE_2D,
|
||||||
|
0,
|
||||||
|
RTC_PIXEL_FORMAT,
|
||||||
|
static_cast<GLsizei>(width),
|
||||||
|
static_cast<GLsizei>(height),
|
||||||
|
0,
|
||||||
|
RTC_PIXEL_FORMAT,
|
||||||
|
GL_UNSIGNED_BYTE,
|
||||||
|
uploadPlane);
|
||||||
|
glPixelStorei(GL_UNPACK_ROW_LENGTH, 0);
|
||||||
|
return;
|
||||||
|
} else {
|
||||||
|
// Make an unpadded copy and upload that instead. Quick profiling showed
|
||||||
|
// that this is faster than uploading row by row using glTexSubImage2D.
|
||||||
|
uint8_t *unpaddedPlane = _planeBuffer.data();
|
||||||
|
for (size_t y = 0; y < height; ++y) {
|
||||||
|
memcpy(unpaddedPlane + y * width, plane + y * stride, width);
|
||||||
|
}
|
||||||
|
uploadPlane = unpaddedPlane;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
glTexImage2D(GL_TEXTURE_2D,
|
||||||
|
0,
|
||||||
|
RTC_PIXEL_FORMAT,
|
||||||
|
static_cast<GLsizei>(width),
|
||||||
|
static_cast<GLsizei>(height),
|
||||||
|
0,
|
||||||
|
RTC_PIXEL_FORMAT,
|
||||||
|
GL_UNSIGNED_BYTE,
|
||||||
|
uploadPlane);
|
||||||
|
}
|
||||||
|
|
||||||
|
- (BOOL)updateTextureDataForFrame:(RTCVideoFrame *)frame {
|
||||||
|
GLint textureOffset = _currentTextureSet * 3;
|
||||||
|
NSAssert(textureOffset + 3 <= kNumTextures, @"invalid offset");
|
||||||
|
|
||||||
|
if (frame.yPitch != static_cast<int32_t>(frame.width) ||
|
||||||
|
frame.uPitch != static_cast<int32_t>(frame.chromaWidth) ||
|
||||||
|
frame.vPitch != static_cast<int32_t>(frame.chromaWidth)) {
|
||||||
|
_planeBuffer.resize(frame.width * frame.height);
|
||||||
|
}
|
||||||
|
|
||||||
|
[self uploadPlane:frame.yPlane
|
||||||
|
sampler:_ySampler
|
||||||
|
offset:textureOffset
|
||||||
|
width:frame.width
|
||||||
|
height:frame.height
|
||||||
|
stride:frame.yPitch];
|
||||||
|
|
||||||
|
[self uploadPlane:frame.uPlane
|
||||||
|
sampler:_uSampler
|
||||||
|
offset:textureOffset + 1
|
||||||
|
width:frame.chromaWidth
|
||||||
|
height:frame.chromaHeight
|
||||||
|
stride:frame.uPitch];
|
||||||
|
|
||||||
|
[self uploadPlane:frame.vPlane
|
||||||
|
sampler:_vSampler
|
||||||
|
offset:textureOffset + 2
|
||||||
|
width:frame.chromaWidth
|
||||||
|
height:frame.chromaHeight
|
||||||
|
stride:frame.vPitch];
|
||||||
|
|
||||||
|
_currentTextureSet = (_currentTextureSet + 1) % kNumTextureSets;
|
||||||
|
return YES;
|
||||||
|
}
|
||||||
|
|
||||||
|
@end
|
||||||
161
webrtc/sdk/objc/Framework/Classes/RTCNativeNV12Shader.mm
Normal file
161
webrtc/sdk/objc/Framework/Classes/RTCNativeNV12Shader.mm
Normal file
@ -0,0 +1,161 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||||
|
*
|
||||||
|
* Use of this source code is governed by a BSD-style license
|
||||||
|
* that can be found in the LICENSE file in the root of the source
|
||||||
|
* tree. An additional intellectual property rights grant can be found
|
||||||
|
* in the file PATENTS. All contributing project authors may
|
||||||
|
* be found in the AUTHORS file in the root of the source tree.
|
||||||
|
*/
|
||||||
|
|
||||||
|
#import "RTCShader.h"
|
||||||
|
|
||||||
|
// Native CVPixelBufferRef rendering is only supported on iPhone because it
|
||||||
|
// depends on CVOpenGLESTextureCacheCreate.
|
||||||
|
#if TARGET_OS_IPHONE
|
||||||
|
|
||||||
|
#import <CoreVideo/CVOpenGLESTextureCache.h>
|
||||||
|
|
||||||
|
#import "RTCShader+Private.h"
|
||||||
|
#import "WebRTC/RTCVideoFrame.h"
|
||||||
|
|
||||||
|
#include "webrtc/base/checks.h"
|
||||||
|
|
||||||
|
static const char kNV12FragmentShaderSource[] =
|
||||||
|
SHADER_VERSION
|
||||||
|
"precision mediump float;"
|
||||||
|
FRAGMENT_SHADER_IN " vec2 v_texcoord;\n"
|
||||||
|
"uniform lowp sampler2D s_textureY;\n"
|
||||||
|
"uniform lowp sampler2D s_textureUV;\n"
|
||||||
|
FRAGMENT_SHADER_OUT
|
||||||
|
"void main() {\n"
|
||||||
|
" mediump float y;\n"
|
||||||
|
" mediump vec2 uv;\n"
|
||||||
|
" y = " FRAGMENT_SHADER_TEXTURE "(s_textureY, v_texcoord).r;\n"
|
||||||
|
" uv = " FRAGMENT_SHADER_TEXTURE "(s_textureUV, v_texcoord).ra -\n"
|
||||||
|
" vec2(0.5, 0.5);\n"
|
||||||
|
" " FRAGMENT_SHADER_COLOR " = vec4(y + 1.403 * uv.y,\n"
|
||||||
|
" y - 0.344 * uv.x - 0.714 * uv.y,\n"
|
||||||
|
" y + 1.770 * uv.x,\n"
|
||||||
|
" 1.0);\n"
|
||||||
|
" }\n";
|
||||||
|
|
||||||
|
@implementation RTCNativeNV12Shader {
|
||||||
|
GLuint _vertexBuffer;
|
||||||
|
GLuint _nv12Program;
|
||||||
|
GLint _ySampler;
|
||||||
|
GLint _uvSampler;
|
||||||
|
CVOpenGLESTextureCacheRef _textureCache;
|
||||||
|
}
|
||||||
|
|
||||||
|
- (instancetype)initWithContext:(GlContextType *)context {
|
||||||
|
if (self = [super init]) {
|
||||||
|
if (![self setupNV12Program] || ![self setupTextureCacheWithContext:context] ||
|
||||||
|
!RTCSetupVerticesForProgram(_nv12Program, &_vertexBuffer, nullptr)) {
|
||||||
|
self = nil;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return self;
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)dealloc {
|
||||||
|
glDeleteProgram(_nv12Program);
|
||||||
|
glDeleteBuffers(1, &_vertexBuffer);
|
||||||
|
if (_textureCache) {
|
||||||
|
CFRelease(_textureCache);
|
||||||
|
_textureCache = nullptr;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
- (BOOL)setupNV12Program {
|
||||||
|
_nv12Program = RTCCreateProgramFromFragmentSource(kNV12FragmentShaderSource);
|
||||||
|
if (!_nv12Program) {
|
||||||
|
return NO;
|
||||||
|
}
|
||||||
|
_ySampler = glGetUniformLocation(_nv12Program, "s_textureY");
|
||||||
|
_uvSampler = glGetUniformLocation(_nv12Program, "s_textureUV");
|
||||||
|
|
||||||
|
return (_ySampler >= 0 && _uvSampler >= 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
- (BOOL)setupTextureCacheWithContext:(GlContextType *)context {
|
||||||
|
CVReturn ret = CVOpenGLESTextureCacheCreate(
|
||||||
|
kCFAllocatorDefault, NULL,
|
||||||
|
#if COREVIDEO_USE_EAGLCONTEXT_CLASS_IN_API
|
||||||
|
context,
|
||||||
|
#else
|
||||||
|
(__bridge void *)context,
|
||||||
|
#endif
|
||||||
|
NULL, &_textureCache);
|
||||||
|
return ret == kCVReturnSuccess;
|
||||||
|
}
|
||||||
|
|
||||||
|
- (BOOL)drawFrame:(RTCVideoFrame *)frame {
|
||||||
|
CVPixelBufferRef pixelBuffer = frame.nativeHandle;
|
||||||
|
RTC_CHECK(pixelBuffer);
|
||||||
|
glUseProgram(_nv12Program);
|
||||||
|
const OSType pixelFormat = CVPixelBufferGetPixelFormatType(pixelBuffer);
|
||||||
|
RTC_CHECK(pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange ||
|
||||||
|
pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange)
|
||||||
|
<< "Unsupported native pixel format: " << pixelFormat;
|
||||||
|
|
||||||
|
// Y-plane.
|
||||||
|
const int lumaWidth = CVPixelBufferGetWidthOfPlane(pixelBuffer, 0);
|
||||||
|
const int lumaHeight = CVPixelBufferGetHeightOfPlane(pixelBuffer, 0);
|
||||||
|
|
||||||
|
CVOpenGLESTextureRef lumaTexture = nullptr;
|
||||||
|
glActiveTexture(GL_TEXTURE0);
|
||||||
|
glUniform1i(_ySampler, 0);
|
||||||
|
CVReturn ret = CVOpenGLESTextureCacheCreateTextureFromImage(
|
||||||
|
kCFAllocatorDefault, _textureCache, pixelBuffer, NULL, GL_TEXTURE_2D,
|
||||||
|
RTC_PIXEL_FORMAT, lumaWidth, lumaHeight, RTC_PIXEL_FORMAT,
|
||||||
|
GL_UNSIGNED_BYTE, 0, &lumaTexture);
|
||||||
|
if (ret != kCVReturnSuccess) {
|
||||||
|
CFRelease(lumaTexture);
|
||||||
|
return NO;
|
||||||
|
}
|
||||||
|
|
||||||
|
RTC_CHECK_EQ(static_cast<GLenum>(GL_TEXTURE_2D),
|
||||||
|
CVOpenGLESTextureGetTarget(lumaTexture));
|
||||||
|
glBindTexture(GL_TEXTURE_2D, CVOpenGLESTextureGetName(lumaTexture));
|
||||||
|
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
||||||
|
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||||
|
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
||||||
|
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
||||||
|
|
||||||
|
// UV-plane.
|
||||||
|
const int chromaWidth = CVPixelBufferGetWidthOfPlane(pixelBuffer, 1);
|
||||||
|
const int chromeHeight = CVPixelBufferGetHeightOfPlane(pixelBuffer, 1);
|
||||||
|
|
||||||
|
CVOpenGLESTextureRef chromaTexture = nullptr;
|
||||||
|
glActiveTexture(GL_TEXTURE1);
|
||||||
|
glUniform1i(_uvSampler, 1);
|
||||||
|
ret = CVOpenGLESTextureCacheCreateTextureFromImage(
|
||||||
|
kCFAllocatorDefault, _textureCache, pixelBuffer, NULL, GL_TEXTURE_2D,
|
||||||
|
GL_LUMINANCE_ALPHA, chromaWidth, chromeHeight, GL_LUMINANCE_ALPHA,
|
||||||
|
GL_UNSIGNED_BYTE, 1, &chromaTexture);
|
||||||
|
if (ret != kCVReturnSuccess) {
|
||||||
|
CFRelease(chromaTexture);
|
||||||
|
CFRelease(lumaTexture);
|
||||||
|
return NO;
|
||||||
|
}
|
||||||
|
|
||||||
|
RTC_CHECK_EQ(static_cast<GLenum>(GL_TEXTURE_2D),
|
||||||
|
CVOpenGLESTextureGetTarget(chromaTexture));
|
||||||
|
glBindTexture(GL_TEXTURE_2D, CVOpenGLESTextureGetName(chromaTexture));
|
||||||
|
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
||||||
|
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||||
|
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
||||||
|
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
||||||
|
|
||||||
|
glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer);
|
||||||
|
glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
|
||||||
|
|
||||||
|
CFRelease(chromaTexture);
|
||||||
|
CFRelease(lumaTexture);
|
||||||
|
|
||||||
|
return YES;
|
||||||
|
}
|
||||||
|
|
||||||
|
@end
|
||||||
|
#endif // TARGET_OS_IPHONE
|
||||||
35
webrtc/sdk/objc/Framework/Classes/RTCOpenGLDefines.h
Normal file
35
webrtc/sdk/objc/Framework/Classes/RTCOpenGLDefines.h
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||||
|
*
|
||||||
|
* Use of this source code is governed by a BSD-style license
|
||||||
|
* that can be found in the LICENSE file in the root of the source
|
||||||
|
* tree. An additional intellectual property rights grant can be found
|
||||||
|
* in the file PATENTS. All contributing project authors may
|
||||||
|
* be found in the AUTHORS file in the root of the source tree.
|
||||||
|
*/
|
||||||
|
|
||||||
|
#if TARGET_OS_IPHONE
|
||||||
|
#define RTC_PIXEL_FORMAT GL_LUMINANCE
|
||||||
|
#define SHADER_VERSION
|
||||||
|
#define VERTEX_SHADER_IN "attribute"
|
||||||
|
#define VERTEX_SHADER_OUT "varying"
|
||||||
|
#define FRAGMENT_SHADER_IN "varying"
|
||||||
|
#define FRAGMENT_SHADER_OUT
|
||||||
|
#define FRAGMENT_SHADER_COLOR "gl_FragColor"
|
||||||
|
#define FRAGMENT_SHADER_TEXTURE "texture2D"
|
||||||
|
|
||||||
|
@class EAGLContext;
|
||||||
|
typedef EAGLContext GlContextType;
|
||||||
|
#else
|
||||||
|
#define RTC_PIXEL_FORMAT GL_RED
|
||||||
|
#define SHADER_VERSION "#version 150\n"
|
||||||
|
#define VERTEX_SHADER_IN "in"
|
||||||
|
#define VERTEX_SHADER_OUT "out"
|
||||||
|
#define FRAGMENT_SHADER_IN "in"
|
||||||
|
#define FRAGMENT_SHADER_OUT "out vec4 fragColor;\n"
|
||||||
|
#define FRAGMENT_SHADER_COLOR "fragColor"
|
||||||
|
#define FRAGMENT_SHADER_TEXTURE "texture"
|
||||||
|
|
||||||
|
@class NSOpenGLContext;
|
||||||
|
typedef NSOpenGLContext GlContextType;
|
||||||
|
#endif
|
||||||
@ -10,159 +10,14 @@
|
|||||||
|
|
||||||
#import "RTCOpenGLVideoRenderer.h"
|
#import "RTCOpenGLVideoRenderer.h"
|
||||||
|
|
||||||
#if TARGET_OS_IPHONE
|
#import "RTCShader+Private.h"
|
||||||
#import <OpenGLES/ES3/gl.h>
|
|
||||||
#else
|
|
||||||
#import <OpenGL/gl3.h>
|
|
||||||
#endif
|
|
||||||
#include <string.h>
|
|
||||||
#include <memory>
|
|
||||||
|
|
||||||
#import "WebRTC/RTCVideoFrame.h"
|
#import "WebRTC/RTCVideoFrame.h"
|
||||||
|
|
||||||
|
|
||||||
// TODO(tkchin): check and log openGL errors. Methods here return BOOLs in
|
|
||||||
// anticipation of that happening in the future.
|
|
||||||
|
|
||||||
#if TARGET_OS_IPHONE
|
|
||||||
#define RTC_PIXEL_FORMAT GL_LUMINANCE
|
|
||||||
#define SHADER_VERSION
|
|
||||||
#define VERTEX_SHADER_IN "attribute"
|
|
||||||
#define VERTEX_SHADER_OUT "varying"
|
|
||||||
#define FRAGMENT_SHADER_IN "varying"
|
|
||||||
#define FRAGMENT_SHADER_OUT
|
|
||||||
#define FRAGMENT_SHADER_COLOR "gl_FragColor"
|
|
||||||
#define FRAGMENT_SHADER_TEXTURE "texture2D"
|
|
||||||
#else
|
|
||||||
#define RTC_PIXEL_FORMAT GL_RED
|
|
||||||
#define SHADER_VERSION "#version 150\n"
|
|
||||||
#define VERTEX_SHADER_IN "in"
|
|
||||||
#define VERTEX_SHADER_OUT "out"
|
|
||||||
#define FRAGMENT_SHADER_IN "in"
|
|
||||||
#define FRAGMENT_SHADER_OUT "out vec4 fragColor;\n"
|
|
||||||
#define FRAGMENT_SHADER_COLOR "fragColor"
|
|
||||||
#define FRAGMENT_SHADER_TEXTURE "texture"
|
|
||||||
#endif
|
|
||||||
|
|
||||||
// Vertex shader doesn't do anything except pass coordinates through.
|
|
||||||
static const char kVertexShaderSource[] =
|
|
||||||
SHADER_VERSION
|
|
||||||
VERTEX_SHADER_IN " vec2 position;\n"
|
|
||||||
VERTEX_SHADER_IN " vec2 texcoord;\n"
|
|
||||||
VERTEX_SHADER_OUT " vec2 v_texcoord;\n"
|
|
||||||
"void main() {\n"
|
|
||||||
" gl_Position = vec4(position.x, position.y, 0.0, 1.0);\n"
|
|
||||||
" v_texcoord = texcoord;\n"
|
|
||||||
"}\n";
|
|
||||||
|
|
||||||
// Fragment shader converts YUV values from input textures into a final RGB
|
|
||||||
// pixel. The conversion formula is from http://www.fourcc.org/fccyvrgb.php.
|
|
||||||
static const char kFragmentShaderSource[] =
|
|
||||||
SHADER_VERSION
|
|
||||||
"precision highp float;"
|
|
||||||
FRAGMENT_SHADER_IN " vec2 v_texcoord;\n"
|
|
||||||
"uniform lowp sampler2D s_textureY;\n"
|
|
||||||
"uniform lowp sampler2D s_textureU;\n"
|
|
||||||
"uniform lowp sampler2D s_textureV;\n"
|
|
||||||
FRAGMENT_SHADER_OUT
|
|
||||||
"void main() {\n"
|
|
||||||
" float y, u, v, r, g, b;\n"
|
|
||||||
" y = " FRAGMENT_SHADER_TEXTURE "(s_textureY, v_texcoord).r;\n"
|
|
||||||
" u = " FRAGMENT_SHADER_TEXTURE "(s_textureU, v_texcoord).r;\n"
|
|
||||||
" v = " FRAGMENT_SHADER_TEXTURE "(s_textureV, v_texcoord).r;\n"
|
|
||||||
" u = u - 0.5;\n"
|
|
||||||
" v = v - 0.5;\n"
|
|
||||||
" r = y + 1.403 * v;\n"
|
|
||||||
" g = y - 0.344 * u - 0.714 * v;\n"
|
|
||||||
" b = y + 1.770 * u;\n"
|
|
||||||
" " FRAGMENT_SHADER_COLOR " = vec4(r, g, b, 1.0);\n"
|
|
||||||
" }\n";
|
|
||||||
|
|
||||||
// Compiles a shader of the given |type| with GLSL source |source| and returns
|
|
||||||
// the shader handle or 0 on error.
|
|
||||||
GLuint CreateShader(GLenum type, const GLchar *source) {
|
|
||||||
GLuint shader = glCreateShader(type);
|
|
||||||
if (!shader) {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
glShaderSource(shader, 1, &source, NULL);
|
|
||||||
glCompileShader(shader);
|
|
||||||
GLint compileStatus = GL_FALSE;
|
|
||||||
glGetShaderiv(shader, GL_COMPILE_STATUS, &compileStatus);
|
|
||||||
if (compileStatus == GL_FALSE) {
|
|
||||||
glDeleteShader(shader);
|
|
||||||
shader = 0;
|
|
||||||
}
|
|
||||||
return shader;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Links a shader program with the given vertex and fragment shaders and
|
|
||||||
// returns the program handle or 0 on error.
|
|
||||||
GLuint CreateProgram(GLuint vertexShader, GLuint fragmentShader) {
|
|
||||||
if (vertexShader == 0 || fragmentShader == 0) {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
GLuint program = glCreateProgram();
|
|
||||||
if (!program) {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
glAttachShader(program, vertexShader);
|
|
||||||
glAttachShader(program, fragmentShader);
|
|
||||||
glLinkProgram(program);
|
|
||||||
GLint linkStatus = GL_FALSE;
|
|
||||||
glGetProgramiv(program, GL_LINK_STATUS, &linkStatus);
|
|
||||||
if (linkStatus == GL_FALSE) {
|
|
||||||
glDeleteProgram(program);
|
|
||||||
program = 0;
|
|
||||||
}
|
|
||||||
return program;
|
|
||||||
}
|
|
||||||
|
|
||||||
// When modelview and projection matrices are identity (default) the world is
|
|
||||||
// contained in the square around origin with unit size 2. Drawing to these
|
|
||||||
// coordinates is equivalent to drawing to the entire screen. The texture is
|
|
||||||
// stretched over that square using texture coordinates (u, v) that range
|
|
||||||
// from (0, 0) to (1, 1) inclusive. Texture coordinates are flipped vertically
|
|
||||||
// here because the incoming frame has origin in upper left hand corner but
|
|
||||||
// OpenGL expects origin in bottom left corner.
|
|
||||||
const GLfloat gVertices[] = {
|
|
||||||
// X, Y, U, V.
|
|
||||||
-1, -1, 0, 1, // Bottom left.
|
|
||||||
1, -1, 1, 1, // Bottom right.
|
|
||||||
1, 1, 1, 0, // Top right.
|
|
||||||
-1, 1, 0, 0, // Top left.
|
|
||||||
};
|
|
||||||
|
|
||||||
// |kNumTextures| must not exceed 8, which is the limit in OpenGLES2. Two sets
|
|
||||||
// of 3 textures are used here, one for each of the Y, U and V planes. Having
|
|
||||||
// two sets alleviates CPU blockage in the event that the GPU is asked to render
|
|
||||||
// to a texture that is already in use.
|
|
||||||
static const GLsizei kNumTextureSets = 2;
|
|
||||||
static const GLsizei kNumTextures = 3 * kNumTextureSets;
|
|
||||||
|
|
||||||
@implementation RTCOpenGLVideoRenderer {
|
@implementation RTCOpenGLVideoRenderer {
|
||||||
#if TARGET_OS_IPHONE
|
GlContextType *_context;
|
||||||
EAGLContext *_context;
|
|
||||||
#else
|
|
||||||
NSOpenGLContext *_context;
|
|
||||||
#endif
|
|
||||||
BOOL _isInitialized;
|
BOOL _isInitialized;
|
||||||
GLint _currentTextureSet;
|
id<RTCShader> _i420Shader;
|
||||||
// Handles for OpenGL constructs.
|
id<RTCShader> _nv12Shader;
|
||||||
GLuint _textures[kNumTextures];
|
|
||||||
GLuint _program;
|
|
||||||
#if !TARGET_OS_IPHONE
|
|
||||||
GLuint _vertexArray;
|
|
||||||
#endif
|
|
||||||
GLuint _vertexBuffer;
|
|
||||||
GLint _position;
|
|
||||||
GLint _texcoord;
|
|
||||||
GLint _ySampler;
|
|
||||||
GLint _uSampler;
|
|
||||||
GLint _vSampler;
|
|
||||||
// Used to create a non-padded plane for GPU upload when we receive padded
|
|
||||||
// frames.
|
|
||||||
std::unique_ptr<uint8_t[]> _planeBuffer;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@synthesize lastDrawnFrame = _lastDrawnFrame;
|
@synthesize lastDrawnFrame = _lastDrawnFrame;
|
||||||
@ -172,11 +27,7 @@ static const GLsizei kNumTextures = 3 * kNumTextureSets;
|
|||||||
glDisable(GL_DITHER);
|
glDisable(GL_DITHER);
|
||||||
}
|
}
|
||||||
|
|
||||||
#if TARGET_OS_IPHONE
|
- (instancetype)initWithContext:(GlContextType *)context {
|
||||||
- (instancetype)initWithContext:(EAGLContext *)context {
|
|
||||||
#else
|
|
||||||
- (instancetype)initWithContext:(NSOpenGLContext *)context {
|
|
||||||
#endif
|
|
||||||
NSAssert(context != nil, @"context cannot be nil");
|
NSAssert(context != nil, @"context cannot be nil");
|
||||||
if (self = [super init]) {
|
if (self = [super init]) {
|
||||||
_context = context;
|
_context = context;
|
||||||
@ -185,29 +36,37 @@ static const GLsizei kNumTextures = 3 * kNumTextureSets;
|
|||||||
}
|
}
|
||||||
|
|
||||||
- (BOOL)drawFrame:(RTCVideoFrame *)frame {
|
- (BOOL)drawFrame:(RTCVideoFrame *)frame {
|
||||||
if (!_isInitialized) {
|
if (!_isInitialized || !frame || frame == _lastDrawnFrame) {
|
||||||
return NO;
|
|
||||||
}
|
|
||||||
if (_lastDrawnFrame == frame) {
|
|
||||||
return NO;
|
return NO;
|
||||||
}
|
}
|
||||||
[self ensureGLContext];
|
[self ensureGLContext];
|
||||||
glClear(GL_COLOR_BUFFER_BIT);
|
glClear(GL_COLOR_BUFFER_BIT);
|
||||||
if (frame) {
|
id<RTCShader> shader = nil;
|
||||||
if (![self updateTextureSizesForFrame:frame] ||
|
#if TARGET_OS_IPHONE
|
||||||
![self updateTextureDataForFrame:frame]) {
|
if (frame.nativeHandle) {
|
||||||
|
if (!_nv12Shader) {
|
||||||
|
_nv12Shader = [[RTCNativeNV12Shader alloc] initWithContext:_context];
|
||||||
|
}
|
||||||
|
shader = _nv12Shader;
|
||||||
|
#else
|
||||||
|
// Rendering native CVPixelBuffer is not supported on OS X.
|
||||||
|
if (false) {
|
||||||
|
#endif
|
||||||
|
} else {
|
||||||
|
if (!_i420Shader) {
|
||||||
|
_i420Shader = [[RTCI420Shader alloc] initWithContext:_context];
|
||||||
|
}
|
||||||
|
shader = _i420Shader;
|
||||||
|
}
|
||||||
|
if (!shader || ![shader drawFrame:frame]) {
|
||||||
return NO;
|
return NO;
|
||||||
}
|
}
|
||||||
#if !TARGET_OS_IPHONE
|
|
||||||
glBindVertexArray(_vertexArray);
|
|
||||||
#endif
|
|
||||||
glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer);
|
|
||||||
glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
|
|
||||||
}
|
|
||||||
#if !TARGET_OS_IPHONE
|
#if !TARGET_OS_IPHONE
|
||||||
[_context flushBuffer];
|
[_context flushBuffer];
|
||||||
#endif
|
#endif
|
||||||
_lastDrawnFrame = frame;
|
_lastDrawnFrame = frame;
|
||||||
|
|
||||||
return YES;
|
return YES;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -216,17 +75,6 @@ static const GLsizei kNumTextures = 3 * kNumTextureSets;
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
[self ensureGLContext];
|
[self ensureGLContext];
|
||||||
if (![self setupProgram]) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (![self setupTextures]) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (![self setupVertices]) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
glUseProgram(_program);
|
|
||||||
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
|
|
||||||
_isInitialized = YES;
|
_isInitialized = YES;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -235,14 +83,8 @@ static const GLsizei kNumTextures = 3 * kNumTextureSets;
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
[self ensureGLContext];
|
[self ensureGLContext];
|
||||||
glDeleteProgram(_program);
|
_i420Shader = nil;
|
||||||
_program = 0;
|
_nv12Shader = nil;
|
||||||
glDeleteTextures(kNumTextures, _textures);
|
|
||||||
glDeleteBuffers(1, &_vertexBuffer);
|
|
||||||
_vertexBuffer = 0;
|
|
||||||
#if !TARGET_OS_IPHONE
|
|
||||||
glDeleteVertexArrays(1, &_vertexArray);
|
|
||||||
#endif
|
|
||||||
_isInitialized = NO;
|
_isInitialized = NO;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -261,224 +103,4 @@ static const GLsizei kNumTextures = 3 * kNumTextureSets;
|
|||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
- (BOOL)setupProgram {
|
|
||||||
NSAssert(!_program, @"program already set up");
|
|
||||||
GLuint vertexShader = CreateShader(GL_VERTEX_SHADER, kVertexShaderSource);
|
|
||||||
NSAssert(vertexShader, @"failed to create vertex shader");
|
|
||||||
GLuint fragmentShader =
|
|
||||||
CreateShader(GL_FRAGMENT_SHADER, kFragmentShaderSource);
|
|
||||||
NSAssert(fragmentShader, @"failed to create fragment shader");
|
|
||||||
_program = CreateProgram(vertexShader, fragmentShader);
|
|
||||||
// Shaders are created only to generate program.
|
|
||||||
if (vertexShader) {
|
|
||||||
glDeleteShader(vertexShader);
|
|
||||||
}
|
|
||||||
if (fragmentShader) {
|
|
||||||
glDeleteShader(fragmentShader);
|
|
||||||
}
|
|
||||||
if (!_program) {
|
|
||||||
return NO;
|
|
||||||
}
|
|
||||||
_position = glGetAttribLocation(_program, "position");
|
|
||||||
_texcoord = glGetAttribLocation(_program, "texcoord");
|
|
||||||
_ySampler = glGetUniformLocation(_program, "s_textureY");
|
|
||||||
_uSampler = glGetUniformLocation(_program, "s_textureU");
|
|
||||||
_vSampler = glGetUniformLocation(_program, "s_textureV");
|
|
||||||
if (_position < 0 || _texcoord < 0 || _ySampler < 0 || _uSampler < 0 ||
|
|
||||||
_vSampler < 0) {
|
|
||||||
return NO;
|
|
||||||
}
|
|
||||||
return YES;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (BOOL)setupTextures {
|
|
||||||
glGenTextures(kNumTextures, _textures);
|
|
||||||
// Set parameters for each of the textures we created.
|
|
||||||
for (GLsizei i = 0; i < kNumTextures; i++) {
|
|
||||||
glActiveTexture(GL_TEXTURE0 + i);
|
|
||||||
glBindTexture(GL_TEXTURE_2D, _textures[i]);
|
|
||||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
|
||||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
|
||||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
|
||||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
|
||||||
}
|
|
||||||
return YES;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (BOOL)updateTextureSizesForFrame:(RTCVideoFrame *)frame {
|
|
||||||
if (frame.height == _lastDrawnFrame.height &&
|
|
||||||
frame.width == _lastDrawnFrame.width &&
|
|
||||||
frame.chromaWidth == _lastDrawnFrame.chromaWidth &&
|
|
||||||
frame.chromaHeight == _lastDrawnFrame.chromaHeight) {
|
|
||||||
return YES;
|
|
||||||
}
|
|
||||||
GLsizei lumaWidth = static_cast<GLsizei>(frame.width);
|
|
||||||
GLsizei lumaHeight = static_cast<GLsizei>(frame.height);
|
|
||||||
GLsizei chromaWidth = static_cast<GLsizei>(frame.chromaWidth);
|
|
||||||
GLsizei chromaHeight = static_cast<GLsizei>(frame.chromaHeight);
|
|
||||||
for (GLint i = 0; i < kNumTextureSets; i++) {
|
|
||||||
glActiveTexture(GL_TEXTURE0 + i * 3);
|
|
||||||
glTexImage2D(GL_TEXTURE_2D,
|
|
||||||
0,
|
|
||||||
RTC_PIXEL_FORMAT,
|
|
||||||
lumaWidth,
|
|
||||||
lumaHeight,
|
|
||||||
0,
|
|
||||||
RTC_PIXEL_FORMAT,
|
|
||||||
GL_UNSIGNED_BYTE,
|
|
||||||
0);
|
|
||||||
glActiveTexture(GL_TEXTURE0 + i * 3 + 1);
|
|
||||||
glTexImage2D(GL_TEXTURE_2D,
|
|
||||||
0,
|
|
||||||
RTC_PIXEL_FORMAT,
|
|
||||||
chromaWidth,
|
|
||||||
chromaHeight,
|
|
||||||
0,
|
|
||||||
RTC_PIXEL_FORMAT,
|
|
||||||
GL_UNSIGNED_BYTE,
|
|
||||||
0);
|
|
||||||
glActiveTexture(GL_TEXTURE0 + i * 3 + 2);
|
|
||||||
glTexImage2D(GL_TEXTURE_2D,
|
|
||||||
0,
|
|
||||||
RTC_PIXEL_FORMAT,
|
|
||||||
chromaWidth,
|
|
||||||
chromaHeight,
|
|
||||||
0,
|
|
||||||
RTC_PIXEL_FORMAT,
|
|
||||||
GL_UNSIGNED_BYTE,
|
|
||||||
0);
|
|
||||||
}
|
|
||||||
if ((NSUInteger)frame.yPitch != frame.width ||
|
|
||||||
(NSUInteger)frame.uPitch != frame.chromaWidth ||
|
|
||||||
(NSUInteger)frame.vPitch != frame.chromaWidth) {
|
|
||||||
_planeBuffer.reset(new uint8_t[frame.width * frame.height]);
|
|
||||||
} else {
|
|
||||||
_planeBuffer.reset();
|
|
||||||
}
|
|
||||||
return YES;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)uploadPlane:(const uint8_t *)plane
|
|
||||||
sampler:(GLint)sampler
|
|
||||||
offset:(GLint)offset
|
|
||||||
width:(size_t)width
|
|
||||||
height:(size_t)height
|
|
||||||
stride:(int32_t)stride {
|
|
||||||
glActiveTexture(static_cast<GLenum>(GL_TEXTURE0 + offset));
|
|
||||||
// When setting texture sampler uniforms, the texture index is used not
|
|
||||||
// the texture handle.
|
|
||||||
glUniform1i(sampler, offset);
|
|
||||||
#if TARGET_OS_IPHONE
|
|
||||||
BOOL hasUnpackRowLength = _context.API == kEAGLRenderingAPIOpenGLES3;
|
|
||||||
#else
|
|
||||||
BOOL hasUnpackRowLength = YES;
|
|
||||||
#endif
|
|
||||||
const uint8_t *uploadPlane = plane;
|
|
||||||
if ((size_t)stride != width) {
|
|
||||||
if (hasUnpackRowLength) {
|
|
||||||
// GLES3 allows us to specify stride.
|
|
||||||
glPixelStorei(GL_UNPACK_ROW_LENGTH, stride);
|
|
||||||
glTexImage2D(GL_TEXTURE_2D,
|
|
||||||
0,
|
|
||||||
RTC_PIXEL_FORMAT,
|
|
||||||
static_cast<GLsizei>(width),
|
|
||||||
static_cast<GLsizei>(height),
|
|
||||||
0,
|
|
||||||
RTC_PIXEL_FORMAT,
|
|
||||||
GL_UNSIGNED_BYTE,
|
|
||||||
uploadPlane);
|
|
||||||
glPixelStorei(GL_UNPACK_ROW_LENGTH, 0);
|
|
||||||
return;
|
|
||||||
} else {
|
|
||||||
// Make an unpadded copy and upload that instead. Quick profiling showed
|
|
||||||
// that this is faster than uploading row by row using glTexSubImage2D.
|
|
||||||
uint8_t *unpaddedPlane = _planeBuffer.get();
|
|
||||||
for (size_t y = 0; y < height; ++y) {
|
|
||||||
memcpy(unpaddedPlane + y * width, plane + y * stride, width);
|
|
||||||
}
|
|
||||||
uploadPlane = unpaddedPlane;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
glTexImage2D(GL_TEXTURE_2D,
|
|
||||||
0,
|
|
||||||
RTC_PIXEL_FORMAT,
|
|
||||||
static_cast<GLsizei>(width),
|
|
||||||
static_cast<GLsizei>(height),
|
|
||||||
0,
|
|
||||||
RTC_PIXEL_FORMAT,
|
|
||||||
GL_UNSIGNED_BYTE,
|
|
||||||
uploadPlane);
|
|
||||||
}
|
|
||||||
|
|
||||||
- (BOOL)updateTextureDataForFrame:(RTCVideoFrame *)frame {
|
|
||||||
GLint textureOffset = _currentTextureSet * 3;
|
|
||||||
NSAssert(textureOffset + 3 <= kNumTextures, @"invalid offset");
|
|
||||||
|
|
||||||
[self uploadPlane:frame.yPlane
|
|
||||||
sampler:_ySampler
|
|
||||||
offset:textureOffset
|
|
||||||
width:frame.width
|
|
||||||
height:frame.height
|
|
||||||
stride:frame.yPitch];
|
|
||||||
|
|
||||||
[self uploadPlane:frame.uPlane
|
|
||||||
sampler:_uSampler
|
|
||||||
offset:textureOffset + 1
|
|
||||||
width:frame.chromaWidth
|
|
||||||
height:frame.chromaHeight
|
|
||||||
stride:frame.uPitch];
|
|
||||||
|
|
||||||
[self uploadPlane:frame.vPlane
|
|
||||||
sampler:_vSampler
|
|
||||||
offset:textureOffset + 2
|
|
||||||
width:frame.chromaWidth
|
|
||||||
height:frame.chromaHeight
|
|
||||||
stride:frame.vPitch];
|
|
||||||
|
|
||||||
_currentTextureSet = (_currentTextureSet + 1) % kNumTextureSets;
|
|
||||||
return YES;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (BOOL)setupVertices {
|
|
||||||
#if !TARGET_OS_IPHONE
|
|
||||||
NSAssert(!_vertexArray, @"vertex array already set up");
|
|
||||||
glGenVertexArrays(1, &_vertexArray);
|
|
||||||
if (!_vertexArray) {
|
|
||||||
return NO;
|
|
||||||
}
|
|
||||||
glBindVertexArray(_vertexArray);
|
|
||||||
#endif
|
|
||||||
NSAssert(!_vertexBuffer, @"vertex buffer already set up");
|
|
||||||
glGenBuffers(1, &_vertexBuffer);
|
|
||||||
if (!_vertexBuffer) {
|
|
||||||
#if !TARGET_OS_IPHONE
|
|
||||||
glDeleteVertexArrays(1, &_vertexArray);
|
|
||||||
_vertexArray = 0;
|
|
||||||
#endif
|
|
||||||
return NO;
|
|
||||||
}
|
|
||||||
glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer);
|
|
||||||
glBufferData(GL_ARRAY_BUFFER, sizeof(gVertices), gVertices, GL_DYNAMIC_DRAW);
|
|
||||||
|
|
||||||
// Read position attribute from |gVertices| with size of 2 and stride of 4
|
|
||||||
// beginning at the start of the array. The last argument indicates offset
|
|
||||||
// of data within |gVertices| as supplied to the vertex buffer.
|
|
||||||
glVertexAttribPointer(
|
|
||||||
_position, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(GLfloat), (void *)0);
|
|
||||||
glEnableVertexAttribArray(_position);
|
|
||||||
|
|
||||||
// Read texcoord attribute from |gVertices| with size of 2 and stride of 4
|
|
||||||
// beginning at the first texcoord in the array. The last argument indicates
|
|
||||||
// offset of data within |gVertices| as supplied to the vertex buffer.
|
|
||||||
glVertexAttribPointer(_texcoord,
|
|
||||||
2,
|
|
||||||
GL_FLOAT,
|
|
||||||
GL_FALSE,
|
|
||||||
4 * sizeof(GLfloat),
|
|
||||||
(void *)(2 * sizeof(GLfloat)));
|
|
||||||
glEnableVertexAttribArray(_texcoord);
|
|
||||||
|
|
||||||
return YES;
|
|
||||||
}
|
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|||||||
27
webrtc/sdk/objc/Framework/Classes/RTCShader+Private.h
Normal file
27
webrtc/sdk/objc/Framework/Classes/RTCShader+Private.h
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||||
|
*
|
||||||
|
* Use of this source code is governed by a BSD-style license
|
||||||
|
* that can be found in the LICENSE file in the root of the source
|
||||||
|
* tree. An additional intellectual property rights grant can be found
|
||||||
|
* in the file PATENTS. All contributing project authors may
|
||||||
|
* be found in the AUTHORS file in the root of the source tree.
|
||||||
|
*/
|
||||||
|
|
||||||
|
#import "RTCShader.h"
|
||||||
|
|
||||||
|
#import "WebRTC/RTCMacros.h"
|
||||||
|
|
||||||
|
#if TARGET_OS_IPHONE
|
||||||
|
#import <OpenGLES/ES3/gl.h>
|
||||||
|
#else
|
||||||
|
#import <OpenGL/gl3.h>
|
||||||
|
#endif
|
||||||
|
|
||||||
|
RTC_EXTERN const char kRTCVertexShaderSource[];
|
||||||
|
|
||||||
|
RTC_EXTERN GLuint RTCCreateShader(GLenum type, const GLchar *source);
|
||||||
|
RTC_EXTERN GLuint RTCCreateProgram(GLuint vertexShader, GLuint fragmentShader);
|
||||||
|
RTC_EXTERN GLuint RTCCreateProgramFromFragmentSource(const char fragmentShaderSource[]);
|
||||||
|
RTC_EXTERN BOOL RTCSetupVerticesForProgram(
|
||||||
|
GLuint program, GLuint* vertexBuffer, GLuint* vertexArray);
|
||||||
43
webrtc/sdk/objc/Framework/Classes/RTCShader.h
Normal file
43
webrtc/sdk/objc/Framework/Classes/RTCShader.h
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||||
|
*
|
||||||
|
* Use of this source code is governed by a BSD-style license
|
||||||
|
* that can be found in the LICENSE file in the root of the source
|
||||||
|
* tree. An additional intellectual property rights grant can be found
|
||||||
|
* in the file PATENTS. All contributing project authors may
|
||||||
|
* be found in the AUTHORS file in the root of the source tree.
|
||||||
|
*/
|
||||||
|
|
||||||
|
#import "RTCOpenGLDefines.h"
|
||||||
|
|
||||||
|
@class RTCVideoFrame;
|
||||||
|
|
||||||
|
@protocol RTCShader <NSObject>
|
||||||
|
|
||||||
|
- (BOOL)drawFrame:(RTCVideoFrame *)frame;
|
||||||
|
|
||||||
|
@end
|
||||||
|
|
||||||
|
// Shader for non-native I420 frames.
|
||||||
|
@interface RTCI420Shader : NSObject <RTCShader>
|
||||||
|
|
||||||
|
- (instancetype)init NS_UNAVAILABLE;
|
||||||
|
- (instancetype)initWithContext:(GlContextType *)context NS_DESIGNATED_INITIALIZER;
|
||||||
|
- (BOOL)drawFrame:(RTCVideoFrame *)frame;
|
||||||
|
|
||||||
|
@end
|
||||||
|
|
||||||
|
// Native CVPixelBufferRef rendering is only supported on iPhone because it
|
||||||
|
// depends on CVOpenGLESTextureCacheCreate.
|
||||||
|
#if TARGET_OS_IPHONE
|
||||||
|
|
||||||
|
// Shader for native NV12 frames.
|
||||||
|
@interface RTCNativeNV12Shader : NSObject <RTCShader>
|
||||||
|
|
||||||
|
- (instancetype)init NS_UNAVAILABLE;
|
||||||
|
- (instancetype)initWithContext:(GlContextType *)context NS_DESIGNATED_INITIALIZER;
|
||||||
|
- (BOOL)drawFrame:(RTCVideoFrame *)frame;
|
||||||
|
|
||||||
|
@end
|
||||||
|
|
||||||
|
#endif // TARGET_OS_IPHONE
|
||||||
152
webrtc/sdk/objc/Framework/Classes/RTCShader.mm
Normal file
152
webrtc/sdk/objc/Framework/Classes/RTCShader.mm
Normal file
@ -0,0 +1,152 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||||
|
*
|
||||||
|
* Use of this source code is governed by a BSD-style license
|
||||||
|
* that can be found in the LICENSE file in the root of the source
|
||||||
|
* tree. An additional intellectual property rights grant can be found
|
||||||
|
* in the file PATENTS. All contributing project authors may
|
||||||
|
* be found in the AUTHORS file in the root of the source tree.
|
||||||
|
*/
|
||||||
|
|
||||||
|
#import "RTCShader.h"
|
||||||
|
|
||||||
|
#include <memory>
|
||||||
|
|
||||||
|
#import "RTCShader+Private.h"
|
||||||
|
|
||||||
|
#include "webrtc/base/checks.h"
|
||||||
|
#include "webrtc/base/logging.h"
|
||||||
|
|
||||||
|
// Vertex shader doesn't do anything except pass coordinates through.
|
||||||
|
const char kRTCVertexShaderSource[] =
|
||||||
|
SHADER_VERSION
|
||||||
|
VERTEX_SHADER_IN " vec2 position;\n"
|
||||||
|
VERTEX_SHADER_IN " vec2 texcoord;\n"
|
||||||
|
VERTEX_SHADER_OUT " vec2 v_texcoord;\n"
|
||||||
|
"void main() {\n"
|
||||||
|
" gl_Position = vec4(position.x, position.y, 0.0, 1.0);\n"
|
||||||
|
" v_texcoord = texcoord;\n"
|
||||||
|
"}\n";
|
||||||
|
|
||||||
|
// When modelview and projection matrices are identity (default) the world is
|
||||||
|
// contained in the square around origin with unit size 2. Drawing to these
|
||||||
|
// coordinates is equivalent to drawing to the entire screen. The texture is
|
||||||
|
// stretched over that square using texture coordinates (u, v) that range
|
||||||
|
// from (0, 0) to (1, 1) inclusive. Texture coordinates are flipped vertically
|
||||||
|
// here because the incoming frame has origin in upper left hand corner but
|
||||||
|
// OpenGL expects origin in bottom left corner.
|
||||||
|
static const GLfloat gVertices[] = {
|
||||||
|
// X, Y, U, V.
|
||||||
|
-1, -1, 0, 1, // Bottom left.
|
||||||
|
1, -1, 1, 1, // Bottom right.
|
||||||
|
1, 1, 1, 0, // Top right.
|
||||||
|
-1, 1, 0, 0, // Top left.
|
||||||
|
};
|
||||||
|
|
||||||
|
// Compiles a shader of the given |type| with GLSL source |source| and returns
|
||||||
|
// the shader handle or 0 on error.
|
||||||
|
GLuint RTCCreateShader(GLenum type, const GLchar *source) {
|
||||||
|
GLuint shader = glCreateShader(type);
|
||||||
|
if (!shader) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
glShaderSource(shader, 1, &source, NULL);
|
||||||
|
glCompileShader(shader);
|
||||||
|
GLint compileStatus = GL_FALSE;
|
||||||
|
glGetShaderiv(shader, GL_COMPILE_STATUS, &compileStatus);
|
||||||
|
if (compileStatus == GL_FALSE) {
|
||||||
|
GLint logLength = 0;
|
||||||
|
// The null termination character is included in the returned log length.
|
||||||
|
glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &logLength);
|
||||||
|
if (logLength > 0) {
|
||||||
|
std::unique_ptr<char[]> compileLog(new char[logLength]);
|
||||||
|
// The returned string is null terminated.
|
||||||
|
glGetShaderInfoLog(shader, logLength, NULL, compileLog.get());
|
||||||
|
LOG(LS_ERROR) << "Shader compile error: " << compileLog.get();
|
||||||
|
}
|
||||||
|
glDeleteShader(shader);
|
||||||
|
shader = 0;
|
||||||
|
}
|
||||||
|
return shader;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Links a shader program with the given vertex and fragment shaders and
|
||||||
|
// returns the program handle or 0 on error.
|
||||||
|
GLuint RTCCreateProgram(GLuint vertexShader, GLuint fragmentShader) {
|
||||||
|
if (vertexShader == 0 || fragmentShader == 0) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
GLuint program = glCreateProgram();
|
||||||
|
if (!program) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
glAttachShader(program, vertexShader);
|
||||||
|
glAttachShader(program, fragmentShader);
|
||||||
|
glLinkProgram(program);
|
||||||
|
GLint linkStatus = GL_FALSE;
|
||||||
|
glGetProgramiv(program, GL_LINK_STATUS, &linkStatus);
|
||||||
|
if (linkStatus == GL_FALSE) {
|
||||||
|
glDeleteProgram(program);
|
||||||
|
program = 0;
|
||||||
|
}
|
||||||
|
return program;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Creates and links a shader program with the given fragment shader source and
|
||||||
|
// a plain vertex shader. Returns the program handle or 0 on error.
|
||||||
|
GLuint RTCCreateProgramFromFragmentSource(const char fragmentShaderSource[]) {
|
||||||
|
GLuint vertexShader = RTCCreateShader(GL_VERTEX_SHADER, kRTCVertexShaderSource);
|
||||||
|
RTC_CHECK(vertexShader) << "failed to create vertex shader";
|
||||||
|
GLuint fragmentShader =
|
||||||
|
RTCCreateShader(GL_FRAGMENT_SHADER, fragmentShaderSource);
|
||||||
|
RTC_CHECK(fragmentShader) << "failed to create fragment shader";
|
||||||
|
GLuint program = RTCCreateProgram(vertexShader, fragmentShader);
|
||||||
|
// Shaders are created only to generate program.
|
||||||
|
if (vertexShader) {
|
||||||
|
glDeleteShader(vertexShader);
|
||||||
|
}
|
||||||
|
if (fragmentShader) {
|
||||||
|
glDeleteShader(fragmentShader);
|
||||||
|
}
|
||||||
|
return program;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set vertex shader variables 'position' and 'texcoord' in |program| to the
|
||||||
|
// |gVertices| array above. It will use |vertexBuffer| and |vertexArray| to
|
||||||
|
// store the vertex data.
|
||||||
|
BOOL RTCSetupVerticesForProgram(GLuint program, GLuint* vertexBuffer, GLuint* vertexArray) {
|
||||||
|
GLint position = glGetAttribLocation(program, "position");
|
||||||
|
GLint texcoord = glGetAttribLocation(program, "texcoord");
|
||||||
|
if (position < 0 || texcoord < 0) {
|
||||||
|
return NO;
|
||||||
|
}
|
||||||
|
#if !TARGET_OS_IPHONE
|
||||||
|
glGenVertexArrays(1, vertexArray);
|
||||||
|
if (*vertexArray == 0) {
|
||||||
|
return NO;
|
||||||
|
}
|
||||||
|
glBindVertexArray(*vertexArray);
|
||||||
|
#endif
|
||||||
|
glGenBuffers(1, vertexBuffer);
|
||||||
|
if (*vertexBuffer == 0) {
|
||||||
|
return NO;
|
||||||
|
}
|
||||||
|
glBindBuffer(GL_ARRAY_BUFFER, *vertexBuffer);
|
||||||
|
glBufferData(GL_ARRAY_BUFFER, sizeof(gVertices), gVertices, GL_DYNAMIC_DRAW);
|
||||||
|
|
||||||
|
// Read position attribute from |gVertices| with size of 2 and stride of 4
|
||||||
|
// beginning at the start of the array. The last argument indicates offset
|
||||||
|
// of data within |gVertices| as supplied to the vertex buffer.
|
||||||
|
glVertexAttribPointer(position, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(GLfloat),
|
||||||
|
(void *)0);
|
||||||
|
glEnableVertexAttribArray(position);
|
||||||
|
|
||||||
|
// Read texcoord attribute from |gVertices| with size of 2 and stride of 4
|
||||||
|
// beginning at the first texcoord in the array. The last argument indicates
|
||||||
|
// offset of data within |gVertices| as supplied to the vertex buffer.
|
||||||
|
glVertexAttribPointer(texcoord, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(GLfloat),
|
||||||
|
(void *)(2 * sizeof(GLfloat)));
|
||||||
|
glEnableVertexAttribArray(texcoord);
|
||||||
|
|
||||||
|
return YES;
|
||||||
|
}
|
||||||
@ -106,16 +106,17 @@
|
|||||||
],
|
],
|
||||||
}, # link_settings
|
}, # link_settings
|
||||||
'sources': [
|
'sources': [
|
||||||
'objc/Framework/Classes/RTCAudioTrack+Private.h',
|
|
||||||
'objc/Framework/Classes/RTCAudioTrack.mm',
|
|
||||||
'objc/Framework/Classes/RTCAVFoundationVideoSource+Private.h',
|
'objc/Framework/Classes/RTCAVFoundationVideoSource+Private.h',
|
||||||
'objc/Framework/Classes/RTCAVFoundationVideoSource.mm',
|
'objc/Framework/Classes/RTCAVFoundationVideoSource.mm',
|
||||||
|
'objc/Framework/Classes/RTCAudioTrack+Private.h',
|
||||||
|
'objc/Framework/Classes/RTCAudioTrack.mm',
|
||||||
'objc/Framework/Classes/RTCConfiguration+Private.h',
|
'objc/Framework/Classes/RTCConfiguration+Private.h',
|
||||||
'objc/Framework/Classes/RTCConfiguration.mm',
|
'objc/Framework/Classes/RTCConfiguration.mm',
|
||||||
'objc/Framework/Classes/RTCDataChannel+Private.h',
|
'objc/Framework/Classes/RTCDataChannel+Private.h',
|
||||||
'objc/Framework/Classes/RTCDataChannel.mm',
|
'objc/Framework/Classes/RTCDataChannel.mm',
|
||||||
'objc/Framework/Classes/RTCDataChannelConfiguration+Private.h',
|
'objc/Framework/Classes/RTCDataChannelConfiguration+Private.h',
|
||||||
'objc/Framework/Classes/RTCDataChannelConfiguration.mm',
|
'objc/Framework/Classes/RTCDataChannelConfiguration.mm',
|
||||||
|
'objc/Framework/Classes/RTCI420Shader.mm',
|
||||||
'objc/Framework/Classes/RTCIceCandidate+Private.h',
|
'objc/Framework/Classes/RTCIceCandidate+Private.h',
|
||||||
'objc/Framework/Classes/RTCIceCandidate.mm',
|
'objc/Framework/Classes/RTCIceCandidate.mm',
|
||||||
'objc/Framework/Classes/RTCIceServer+Private.h',
|
'objc/Framework/Classes/RTCIceServer+Private.h',
|
||||||
@ -126,6 +127,7 @@
|
|||||||
'objc/Framework/Classes/RTCMediaStream.mm',
|
'objc/Framework/Classes/RTCMediaStream.mm',
|
||||||
'objc/Framework/Classes/RTCMediaStreamTrack+Private.h',
|
'objc/Framework/Classes/RTCMediaStreamTrack+Private.h',
|
||||||
'objc/Framework/Classes/RTCMediaStreamTrack.mm',
|
'objc/Framework/Classes/RTCMediaStreamTrack.mm',
|
||||||
|
'objc/Framework/Classes/RTCOpenGLDefines.h',
|
||||||
'objc/Framework/Classes/RTCOpenGLVideoRenderer.h',
|
'objc/Framework/Classes/RTCOpenGLVideoRenderer.h',
|
||||||
'objc/Framework/Classes/RTCOpenGLVideoRenderer.mm',
|
'objc/Framework/Classes/RTCOpenGLVideoRenderer.mm',
|
||||||
'objc/Framework/Classes/RTCPeerConnection+DataChannel.mm',
|
'objc/Framework/Classes/RTCPeerConnection+DataChannel.mm',
|
||||||
@ -146,6 +148,9 @@
|
|||||||
'objc/Framework/Classes/RTCRtpSender.mm',
|
'objc/Framework/Classes/RTCRtpSender.mm',
|
||||||
'objc/Framework/Classes/RTCSessionDescription+Private.h',
|
'objc/Framework/Classes/RTCSessionDescription+Private.h',
|
||||||
'objc/Framework/Classes/RTCSessionDescription.mm',
|
'objc/Framework/Classes/RTCSessionDescription.mm',
|
||||||
|
'objc/Framework/Classes/RTCShader+Private.h',
|
||||||
|
'objc/Framework/Classes/RTCShader.h',
|
||||||
|
'objc/Framework/Classes/RTCShader.mm',
|
||||||
'objc/Framework/Classes/RTCStatsReport+Private.h',
|
'objc/Framework/Classes/RTCStatsReport+Private.h',
|
||||||
'objc/Framework/Classes/RTCStatsReport.mm',
|
'objc/Framework/Classes/RTCStatsReport.mm',
|
||||||
'objc/Framework/Classes/RTCVideoFrame+Private.h',
|
'objc/Framework/Classes/RTCVideoFrame+Private.h',
|
||||||
@ -159,8 +164,8 @@
|
|||||||
'objc/Framework/Classes/RTCVideoTrack.mm',
|
'objc/Framework/Classes/RTCVideoTrack.mm',
|
||||||
'objc/Framework/Classes/avfoundationvideocapturer.h',
|
'objc/Framework/Classes/avfoundationvideocapturer.h',
|
||||||
'objc/Framework/Classes/avfoundationvideocapturer.mm',
|
'objc/Framework/Classes/avfoundationvideocapturer.mm',
|
||||||
'objc/Framework/Headers/WebRTC/RTCAudioTrack.h',
|
|
||||||
'objc/Framework/Headers/WebRTC/RTCAVFoundationVideoSource.h',
|
'objc/Framework/Headers/WebRTC/RTCAVFoundationVideoSource.h',
|
||||||
|
'objc/Framework/Headers/WebRTC/RTCAudioTrack.h',
|
||||||
'objc/Framework/Headers/WebRTC/RTCConfiguration.h',
|
'objc/Framework/Headers/WebRTC/RTCConfiguration.h',
|
||||||
'objc/Framework/Headers/WebRTC/RTCDataChannel.h',
|
'objc/Framework/Headers/WebRTC/RTCDataChannel.h',
|
||||||
'objc/Framework/Headers/WebRTC/RTCDataChannelConfiguration.h',
|
'objc/Framework/Headers/WebRTC/RTCDataChannelConfiguration.h',
|
||||||
@ -187,6 +192,7 @@
|
|||||||
['OS=="ios"', {
|
['OS=="ios"', {
|
||||||
'sources': [
|
'sources': [
|
||||||
'objc/Framework/Classes/RTCEAGLVideoView.m',
|
'objc/Framework/Classes/RTCEAGLVideoView.m',
|
||||||
|
'objc/Framework/Classes/RTCNativeNV12Shader.mm',
|
||||||
'objc/Framework/Headers/WebRTC/RTCEAGLVideoView.h',
|
'objc/Framework/Headers/WebRTC/RTCEAGLVideoView.h',
|
||||||
],
|
],
|
||||||
'link_settings': {
|
'link_settings': {
|
||||||
|
|||||||
Reference in New Issue
Block a user