Obj-C SDK Cleanup

This CL separates the files under sdk/objc into logical directories, replacing
the previous file layout under Framework/.

A long term goal is to have some system set up to generate the files under
sdk/objc/api (the PeerConnection API wrappers) from the C++ code. In the shorter
term the goal is to abstract out shared concepts from these classes in order to
make them as uniform as possible.

The separation into base/, components/, and helpers/ are to differentiate between
the base layer's common protocols, various utilities and the actual platform
specific components.

The old directory layout that resembled a framework's internal layout is not
necessary, since it is generated by the framework target when building it.

Bug: webrtc:9627
Change-Id: Ib084fd83f050ae980649ca99e841f4fb0580bd8f
Reviewed-on: https://webrtc-review.googlesource.com/94142
Reviewed-by: Kári Helgason <kthelgason@webrtc.org>
Reviewed-by: Mirko Bonadei <mbonadei@webrtc.org>
Reviewed-by: Rasmus Brandt <brandtr@webrtc.org>
Reviewed-by: Henrik Andreassson <henrika@webrtc.org>
Commit-Queue: Anders Carlsson <andersc@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#24493}
This commit is contained in:
Anders Carlsson
2018-08-30 09:30:29 +02:00
committed by Commit Bot
parent 9ea5765f78
commit 7bca8ca4e2
470 changed files with 7255 additions and 5258 deletions

View File

@ -1,23 +0,0 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <AVFoundation/AVFoundation.h>
#import <CoreMedia/CoreMedia.h>
NS_ASSUME_NONNULL_BEGIN
@interface AVCaptureSession (DevicePosition)
// Check the image's EXIF for the camera the image came from.
+ (AVCaptureDevicePosition)devicePositionForSampleBuffer:(CMSampleBufferRef)sampleBuffer;
@end
NS_ASSUME_NONNULL_END

View File

@ -1,51 +0,0 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "AVCaptureSession+DevicePosition.h"
BOOL CFStringContainsString(CFStringRef theString, CFStringRef stringToFind) {
return CFStringFindWithOptions(theString,
stringToFind,
CFRangeMake(0, CFStringGetLength(theString)),
kCFCompareCaseInsensitive,
nil);
}
@implementation AVCaptureSession (DevicePosition)
+ (AVCaptureDevicePosition)devicePositionForSampleBuffer:(CMSampleBufferRef)sampleBuffer {
// Check the image's EXIF for the camera the image came from.
AVCaptureDevicePosition cameraPosition = AVCaptureDevicePositionUnspecified;
CFDictionaryRef attachments = CMCopyDictionaryOfAttachments(
kCFAllocatorDefault, sampleBuffer, kCMAttachmentMode_ShouldPropagate);
if (attachments) {
int size = CFDictionaryGetCount(attachments);
if (size > 0) {
CFDictionaryRef cfExifDictVal = nil;
if (CFDictionaryGetValueIfPresent(
attachments, (const void *)CFSTR("{Exif}"), (const void **)&cfExifDictVal)) {
CFStringRef cfLensModelStrVal;
if (CFDictionaryGetValueIfPresent(cfExifDictVal,
(const void *)CFSTR("LensModel"),
(const void **)&cfLensModelStrVal)) {
if (CFStringContainsString(cfLensModelStrVal, CFSTR("front"))) {
cameraPosition = AVCaptureDevicePositionFront;
} else if (CFStringContainsString(cfLensModelStrVal, CFSTR("back"))) {
cameraPosition = AVCaptureDevicePositionBack;
}
}
}
}
CFRelease(attachments);
}
return cameraPosition;
}
@end

View File

@ -1,346 +0,0 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCVideoFrameBuffer.h"
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "rtc_base/checks.h"
#include "rtc_base/logging.h"
#include "third_party/libyuv/include/libyuv.h"
#if !defined(NDEBUG) && defined(WEBRTC_IOS)
#import <UIKit/UIKit.h>
#import <VideoToolbox/VideoToolbox.h>
#endif
@implementation RTCCVPixelBuffer {
int _width;
int _height;
int _bufferWidth;
int _bufferHeight;
int _cropWidth;
int _cropHeight;
}
@synthesize pixelBuffer = _pixelBuffer;
@synthesize cropX = _cropX;
@synthesize cropY = _cropY;
@synthesize cropWidth = _cropWidth;
@synthesize cropHeight = _cropHeight;
+ (NSSet<NSNumber*>*)supportedPixelFormats {
return [NSSet setWithObjects:@(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange),
@(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange),
@(kCVPixelFormatType_32BGRA),
@(kCVPixelFormatType_32ARGB),
nil];
}
- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer {
return [self initWithPixelBuffer:pixelBuffer
adaptedWidth:CVPixelBufferGetWidth(pixelBuffer)
adaptedHeight:CVPixelBufferGetHeight(pixelBuffer)
cropWidth:CVPixelBufferGetWidth(pixelBuffer)
cropHeight:CVPixelBufferGetHeight(pixelBuffer)
cropX:0
cropY:0];
}
- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer
adaptedWidth:(int)adaptedWidth
adaptedHeight:(int)adaptedHeight
cropWidth:(int)cropWidth
cropHeight:(int)cropHeight
cropX:(int)cropX
cropY:(int)cropY {
if (self = [super init]) {
_width = adaptedWidth;
_height = adaptedHeight;
_pixelBuffer = pixelBuffer;
_bufferWidth = CVPixelBufferGetWidth(_pixelBuffer);
_bufferHeight = CVPixelBufferGetHeight(_pixelBuffer);
_cropWidth = cropWidth;
_cropHeight = cropHeight;
// Can only crop at even pixels.
_cropX = cropX & ~1;
_cropY = cropY & ~1;
CVBufferRetain(_pixelBuffer);
}
return self;
}
- (void)dealloc {
CVBufferRelease(_pixelBuffer);
}
- (int)width {
return _width;
}
- (int)height {
return _height;
}
- (BOOL)requiresCropping {
return _cropWidth != _bufferWidth || _cropHeight != _bufferHeight;
}
- (BOOL)requiresScalingToWidth:(int)width height:(int)height {
return _cropWidth != width || _cropHeight != height;
}
- (int)bufferSizeForCroppingAndScalingToWidth:(int)width height:(int)height {
const OSType srcPixelFormat = CVPixelBufferGetPixelFormatType(_pixelBuffer);
switch (srcPixelFormat) {
case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange:
case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: {
int srcChromaWidth = (_cropWidth + 1) / 2;
int srcChromaHeight = (_cropHeight + 1) / 2;
int dstChromaWidth = (width + 1) / 2;
int dstChromaHeight = (height + 1) / 2;
return srcChromaWidth * srcChromaHeight * 2 + dstChromaWidth * dstChromaHeight * 2;
}
case kCVPixelFormatType_32BGRA:
case kCVPixelFormatType_32ARGB: {
return 0; // Scaling RGBA frames does not require a temporary buffer.
}
}
RTC_NOTREACHED() << "Unsupported pixel format.";
return 0;
}
- (BOOL)cropAndScaleTo:(CVPixelBufferRef)outputPixelBuffer
withTempBuffer:(nullable uint8_t*)tmpBuffer {
const OSType srcPixelFormat = CVPixelBufferGetPixelFormatType(_pixelBuffer);
const OSType dstPixelFormat = CVPixelBufferGetPixelFormatType(outputPixelBuffer);
switch (srcPixelFormat) {
case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange:
case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: {
size_t dstWidth = CVPixelBufferGetWidth(outputPixelBuffer);
size_t dstHeight = CVPixelBufferGetHeight(outputPixelBuffer);
if (dstWidth > 0 && dstHeight > 0) {
RTC_DCHECK(dstPixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange ||
dstPixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange);
if ([self requiresScalingToWidth:dstWidth height:dstHeight]) {
RTC_DCHECK(tmpBuffer);
}
[self cropAndScaleNV12To:outputPixelBuffer withTempBuffer:tmpBuffer];
}
break;
}
case kCVPixelFormatType_32BGRA:
case kCVPixelFormatType_32ARGB: {
RTC_DCHECK(srcPixelFormat == dstPixelFormat);
[self cropAndScaleARGBTo:outputPixelBuffer];
break;
}
default: { RTC_NOTREACHED() << "Unsupported pixel format."; }
}
return YES;
}
- (id<RTCI420Buffer>)toI420 {
const OSType pixelFormat = CVPixelBufferGetPixelFormatType(_pixelBuffer);
CVPixelBufferLockBaseAddress(_pixelBuffer, kCVPixelBufferLock_ReadOnly);
RTCMutableI420Buffer* i420Buffer =
[[RTCMutableI420Buffer alloc] initWithWidth:[self width] height:[self height]];
switch (pixelFormat) {
case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange:
case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: {
const uint8_t* srcY =
static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(_pixelBuffer, 0));
const int srcYStride = CVPixelBufferGetBytesPerRowOfPlane(_pixelBuffer, 0);
const uint8_t* srcUV =
static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(_pixelBuffer, 1));
const int srcUVStride = CVPixelBufferGetBytesPerRowOfPlane(_pixelBuffer, 1);
// Crop just by modifying pointers.
srcY += srcYStride * _cropY + _cropX;
srcUV += srcUVStride * (_cropY / 2) + _cropX;
// TODO(magjed): Use a frame buffer pool.
webrtc::NV12ToI420Scaler nv12ToI420Scaler;
nv12ToI420Scaler.NV12ToI420Scale(srcY,
srcYStride,
srcUV,
srcUVStride,
_cropWidth,
_cropHeight,
i420Buffer.mutableDataY,
i420Buffer.strideY,
i420Buffer.mutableDataU,
i420Buffer.strideU,
i420Buffer.mutableDataV,
i420Buffer.strideV,
i420Buffer.width,
i420Buffer.height);
break;
}
case kCVPixelFormatType_32BGRA:
case kCVPixelFormatType_32ARGB: {
CVPixelBufferRef scaledPixelBuffer = NULL;
CVPixelBufferRef sourcePixelBuffer = NULL;
if ([self requiresCropping] ||
[self requiresScalingToWidth:i420Buffer.width height:i420Buffer.height]) {
CVPixelBufferCreate(
NULL, i420Buffer.width, i420Buffer.height, pixelFormat, NULL, &scaledPixelBuffer);
[self cropAndScaleTo:scaledPixelBuffer withTempBuffer:NULL];
CVPixelBufferLockBaseAddress(scaledPixelBuffer, kCVPixelBufferLock_ReadOnly);
sourcePixelBuffer = scaledPixelBuffer;
} else {
sourcePixelBuffer = _pixelBuffer;
}
const uint8_t* src = static_cast<uint8_t*>(CVPixelBufferGetBaseAddress(sourcePixelBuffer));
const size_t bytesPerRow = CVPixelBufferGetBytesPerRow(sourcePixelBuffer);
if (pixelFormat == kCVPixelFormatType_32BGRA) {
// Corresponds to libyuv::FOURCC_ARGB
libyuv::ARGBToI420(src,
bytesPerRow,
i420Buffer.mutableDataY,
i420Buffer.strideY,
i420Buffer.mutableDataU,
i420Buffer.strideU,
i420Buffer.mutableDataV,
i420Buffer.strideV,
i420Buffer.width,
i420Buffer.height);
} else if (pixelFormat == kCVPixelFormatType_32ARGB) {
// Corresponds to libyuv::FOURCC_BGRA
libyuv::BGRAToI420(src,
bytesPerRow,
i420Buffer.mutableDataY,
i420Buffer.strideY,
i420Buffer.mutableDataU,
i420Buffer.strideU,
i420Buffer.mutableDataV,
i420Buffer.strideV,
i420Buffer.width,
i420Buffer.height);
}
if (scaledPixelBuffer) {
CVPixelBufferUnlockBaseAddress(scaledPixelBuffer, kCVPixelBufferLock_ReadOnly);
CVBufferRelease(scaledPixelBuffer);
}
break;
}
default: { RTC_NOTREACHED() << "Unsupported pixel format."; }
}
CVPixelBufferUnlockBaseAddress(_pixelBuffer, kCVPixelBufferLock_ReadOnly);
return i420Buffer;
}
#pragma mark - Debugging
#if !defined(NDEBUG) && defined(WEBRTC_IOS)
- (id)debugQuickLookObject {
CGImageRef cgImage;
VTCreateCGImageFromCVPixelBuffer(_pixelBuffer, NULL, &cgImage);
UIImage *image = [UIImage imageWithCGImage:cgImage scale:1.0 orientation:UIImageOrientationUp];
CGImageRelease(cgImage);
return image;
}
#endif
#pragma mark - Private
- (void)cropAndScaleNV12To:(CVPixelBufferRef)outputPixelBuffer withTempBuffer:(uint8_t*)tmpBuffer {
// Prepare output pointers.
CVReturn cvRet = CVPixelBufferLockBaseAddress(outputPixelBuffer, 0);
if (cvRet != kCVReturnSuccess) {
RTC_LOG(LS_ERROR) << "Failed to lock base address: " << cvRet;
}
const int dstWidth = CVPixelBufferGetWidth(outputPixelBuffer);
const int dstHeight = CVPixelBufferGetHeight(outputPixelBuffer);
uint8_t* dstY =
reinterpret_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 0));
const int dstYStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 0);
uint8_t* dstUV =
reinterpret_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 1));
const int dstUVStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 1);
// Prepare source pointers.
CVPixelBufferLockBaseAddress(_pixelBuffer, kCVPixelBufferLock_ReadOnly);
const uint8_t* srcY = static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(_pixelBuffer, 0));
const int srcYStride = CVPixelBufferGetBytesPerRowOfPlane(_pixelBuffer, 0);
const uint8_t* srcUV = static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(_pixelBuffer, 1));
const int srcUVStride = CVPixelBufferGetBytesPerRowOfPlane(_pixelBuffer, 1);
// Crop just by modifying pointers.
srcY += srcYStride * _cropY + _cropX;
srcUV += srcUVStride * (_cropY / 2) + _cropX;
webrtc::NV12Scale(tmpBuffer,
srcY,
srcYStride,
srcUV,
srcUVStride,
_cropWidth,
_cropHeight,
dstY,
dstYStride,
dstUV,
dstUVStride,
dstWidth,
dstHeight);
CVPixelBufferUnlockBaseAddress(_pixelBuffer, kCVPixelBufferLock_ReadOnly);
CVPixelBufferUnlockBaseAddress(outputPixelBuffer, 0);
}
- (void)cropAndScaleARGBTo:(CVPixelBufferRef)outputPixelBuffer {
// Prepare output pointers.
CVReturn cvRet = CVPixelBufferLockBaseAddress(outputPixelBuffer, 0);
if (cvRet != kCVReturnSuccess) {
RTC_LOG(LS_ERROR) << "Failed to lock base address: " << cvRet;
}
const int dstWidth = CVPixelBufferGetWidth(outputPixelBuffer);
const int dstHeight = CVPixelBufferGetHeight(outputPixelBuffer);
uint8_t* dst = reinterpret_cast<uint8_t*>(CVPixelBufferGetBaseAddress(outputPixelBuffer));
const int dstStride = CVPixelBufferGetBytesPerRow(outputPixelBuffer);
// Prepare source pointers.
CVPixelBufferLockBaseAddress(_pixelBuffer, kCVPixelBufferLock_ReadOnly);
const uint8_t* src = static_cast<uint8_t*>(CVPixelBufferGetBaseAddress(_pixelBuffer));
const int srcStride = CVPixelBufferGetBytesPerRow(_pixelBuffer);
// Crop just by modifying pointers. Need to ensure that src pointer points to a byte corresponding
// to the start of a new pixel (byte with B for BGRA) so that libyuv scales correctly.
const int bytesPerPixel = 4;
src += srcStride * _cropY + (_cropX * bytesPerPixel);
// kCVPixelFormatType_32BGRA corresponds to libyuv::FOURCC_ARGB
libyuv::ARGBScale(src,
srcStride,
_cropWidth,
_cropHeight,
dst,
dstStride,
dstWidth,
dstHeight,
libyuv::kFilterBox);
CVPixelBufferUnlockBaseAddress(_pixelBuffer, kCVPixelBufferLock_ReadOnly);
CVPixelBufferUnlockBaseAddress(outputPixelBuffer, 0);
}
@end

View File

@ -8,16 +8,4 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCVideoViewShading.h"
NS_ASSUME_NONNULL_BEGIN
/** Default RTCVideoViewShading that will be used in RTCNSGLVideoView and
* RTCEAGLVideoView if no external shader is specified. This shader will render
* the video in a rectangle without any color or geometric transformations.
*/
@interface RTCDefaultShader : NSObject<RTCVideoViewShading>
@end
NS_ASSUME_NONNULL_END
#import "components/renderer/opengl/RTCDefaultShader.h"

View File

@ -1,207 +0,0 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCDefaultShader.h"
#if TARGET_OS_IPHONE
#import <OpenGLES/ES3/gl.h>
#else
#import <OpenGL/gl3.h>
#endif
#import "RTCOpenGLDefines.h"
#import "RTCShader.h"
#import "WebRTC/RTCLogging.h"
#include "absl/types/optional.h"
static const int kYTextureUnit = 0;
static const int kUTextureUnit = 1;
static const int kVTextureUnit = 2;
static const int kUvTextureUnit = 1;
// Fragment shader converts YUV values from input textures into a final RGB
// pixel. The conversion formula is from http://www.fourcc.org/fccyvrgb.php.
static const char kI420FragmentShaderSource[] =
SHADER_VERSION
"precision highp float;"
FRAGMENT_SHADER_IN " vec2 v_texcoord;\n"
"uniform lowp sampler2D s_textureY;\n"
"uniform lowp sampler2D s_textureU;\n"
"uniform lowp sampler2D s_textureV;\n"
FRAGMENT_SHADER_OUT
"void main() {\n"
" float y, u, v, r, g, b;\n"
" y = " FRAGMENT_SHADER_TEXTURE "(s_textureY, v_texcoord).r;\n"
" u = " FRAGMENT_SHADER_TEXTURE "(s_textureU, v_texcoord).r;\n"
" v = " FRAGMENT_SHADER_TEXTURE "(s_textureV, v_texcoord).r;\n"
" u = u - 0.5;\n"
" v = v - 0.5;\n"
" r = y + 1.403 * v;\n"
" g = y - 0.344 * u - 0.714 * v;\n"
" b = y + 1.770 * u;\n"
" " FRAGMENT_SHADER_COLOR " = vec4(r, g, b, 1.0);\n"
" }\n";
static const char kNV12FragmentShaderSource[] =
SHADER_VERSION
"precision mediump float;"
FRAGMENT_SHADER_IN " vec2 v_texcoord;\n"
"uniform lowp sampler2D s_textureY;\n"
"uniform lowp sampler2D s_textureUV;\n"
FRAGMENT_SHADER_OUT
"void main() {\n"
" mediump float y;\n"
" mediump vec2 uv;\n"
" y = " FRAGMENT_SHADER_TEXTURE "(s_textureY, v_texcoord).r;\n"
" uv = " FRAGMENT_SHADER_TEXTURE "(s_textureUV, v_texcoord).ra -\n"
" vec2(0.5, 0.5);\n"
" " FRAGMENT_SHADER_COLOR " = vec4(y + 1.403 * uv.y,\n"
" y - 0.344 * uv.x - 0.714 * uv.y,\n"
" y + 1.770 * uv.x,\n"
" 1.0);\n"
" }\n";
@implementation RTCDefaultShader {
GLuint _vertexBuffer;
GLuint _vertexArray;
// Store current rotation and only upload new vertex data when rotation changes.
absl::optional<RTCVideoRotation> _currentRotation;
GLuint _i420Program;
GLuint _nv12Program;
}
- (void)dealloc {
glDeleteProgram(_i420Program);
glDeleteProgram(_nv12Program);
glDeleteBuffers(1, &_vertexBuffer);
glDeleteVertexArrays(1, &_vertexArray);
}
- (BOOL)createAndSetupI420Program {
NSAssert(!_i420Program, @"I420 program already created");
_i420Program = RTCCreateProgramFromFragmentSource(kI420FragmentShaderSource);
if (!_i420Program) {
return NO;
}
GLint ySampler = glGetUniformLocation(_i420Program, "s_textureY");
GLint uSampler = glGetUniformLocation(_i420Program, "s_textureU");
GLint vSampler = glGetUniformLocation(_i420Program, "s_textureV");
if (ySampler < 0 || uSampler < 0 || vSampler < 0) {
RTCLog(@"Failed to get uniform variable locations in I420 shader");
glDeleteProgram(_i420Program);
_i420Program = 0;
return NO;
}
glUseProgram(_i420Program);
glUniform1i(ySampler, kYTextureUnit);
glUniform1i(uSampler, kUTextureUnit);
glUniform1i(vSampler, kVTextureUnit);
return YES;
}
- (BOOL)createAndSetupNV12Program {
NSAssert(!_nv12Program, @"NV12 program already created");
_nv12Program = RTCCreateProgramFromFragmentSource(kNV12FragmentShaderSource);
if (!_nv12Program) {
return NO;
}
GLint ySampler = glGetUniformLocation(_nv12Program, "s_textureY");
GLint uvSampler = glGetUniformLocation(_nv12Program, "s_textureUV");
if (ySampler < 0 || uvSampler < 0) {
RTCLog(@"Failed to get uniform variable locations in NV12 shader");
glDeleteProgram(_nv12Program);
_nv12Program = 0;
return NO;
}
glUseProgram(_nv12Program);
glUniform1i(ySampler, kYTextureUnit);
glUniform1i(uvSampler, kUvTextureUnit);
return YES;
}
- (BOOL)prepareVertexBufferWithRotation:(RTCVideoRotation)rotation {
if (!_vertexBuffer && !RTCCreateVertexBuffer(&_vertexBuffer, &_vertexArray)) {
RTCLog(@"Failed to setup vertex buffer");
return NO;
}
#if !TARGET_OS_IPHONE
glBindVertexArray(_vertexArray);
#endif
glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer);
if (!_currentRotation || rotation != *_currentRotation) {
_currentRotation = absl::optional<RTCVideoRotation>(rotation);
RTCSetVertexData(*_currentRotation);
}
return YES;
}
- (void)applyShadingForFrameWithWidth:(int)width
height:(int)height
rotation:(RTCVideoRotation)rotation
yPlane:(GLuint)yPlane
uPlane:(GLuint)uPlane
vPlane:(GLuint)vPlane {
if (![self prepareVertexBufferWithRotation:rotation]) {
return;
}
if (!_i420Program && ![self createAndSetupI420Program]) {
RTCLog(@"Failed to setup I420 program");
return;
}
glUseProgram(_i420Program);
glActiveTexture(static_cast<GLenum>(GL_TEXTURE0 + kYTextureUnit));
glBindTexture(GL_TEXTURE_2D, yPlane);
glActiveTexture(static_cast<GLenum>(GL_TEXTURE0 + kUTextureUnit));
glBindTexture(GL_TEXTURE_2D, uPlane);
glActiveTexture(static_cast<GLenum>(GL_TEXTURE0 + kVTextureUnit));
glBindTexture(GL_TEXTURE_2D, vPlane);
glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
}
- (void)applyShadingForFrameWithWidth:(int)width
height:(int)height
rotation:(RTCVideoRotation)rotation
yPlane:(GLuint)yPlane
uvPlane:(GLuint)uvPlane {
if (![self prepareVertexBufferWithRotation:rotation]) {
return;
}
if (!_nv12Program && ![self createAndSetupNV12Program]) {
RTCLog(@"Failed to setup NV12 shader");
return;
}
glUseProgram(_nv12Program);
glActiveTexture(static_cast<GLenum>(GL_TEXTURE0 + kYTextureUnit));
glBindTexture(GL_TEXTURE_2D, yPlane);
glActiveTexture(static_cast<GLenum>(GL_TEXTURE0 + kUvTextureUnit));
glBindTexture(GL_TEXTURE_2D, uvPlane);
glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
}
@end

View File

@ -1,25 +0,0 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCVideoFrameBuffer.h"
#include "api/video/i420_buffer.h"
NS_ASSUME_NONNULL_BEGIN
@interface RTCI420Buffer ()
/** Initialize an RTCI420Buffer with its backing I420BufferInterface. */
- (instancetype)initWithFrameBuffer:(rtc::scoped_refptr<webrtc::I420BufferInterface>)i420Buffer;
- (rtc::scoped_refptr<webrtc::I420BufferInterface>)nativeI420Buffer;
@end
NS_ASSUME_NONNULL_END

View File

@ -1,159 +0,0 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCI420Buffer+Private.h"
#include "api/video/i420_buffer.h"
#if !defined(NDEBUG) && defined(WEBRTC_IOS)
#import <UIKit/UIKit.h>
#include "third_party/libyuv/include/libyuv.h"
#endif
@implementation RTCI420Buffer {
@protected
rtc::scoped_refptr<webrtc::I420BufferInterface> _i420Buffer;
}
- (instancetype)initWithWidth:(int)width height:(int)height {
if (self = [super init]) {
_i420Buffer = webrtc::I420Buffer::Create(width, height);
}
return self;
}
- (instancetype)initWithWidth:(int)width
height:(int)height
dataY:(const uint8_t *)dataY
dataU:(const uint8_t *)dataU
dataV:(const uint8_t *)dataV {
if (self = [super init]) {
_i420Buffer = webrtc::I420Buffer::Copy(
width, height, dataY, width, dataU, (width + 1) / 2, dataV, (width + 1) / 2);
}
return self;
}
- (instancetype)initWithWidth:(int)width
height:(int)height
strideY:(int)strideY
strideU:(int)strideU
strideV:(int)strideV {
if (self = [super init]) {
_i420Buffer = webrtc::I420Buffer::Create(width, height, strideY, strideU, strideV);
}
return self;
}
- (instancetype)initWithFrameBuffer:(rtc::scoped_refptr<webrtc::I420BufferInterface>)i420Buffer {
if (self = [super init]) {
_i420Buffer = i420Buffer;
}
return self;
}
- (int)width {
return _i420Buffer->width();
}
- (int)height {
return _i420Buffer->height();
}
- (int)strideY {
return _i420Buffer->StrideY();
}
- (int)strideU {
return _i420Buffer->StrideU();
}
- (int)strideV {
return _i420Buffer->StrideV();
}
- (int)chromaWidth {
return _i420Buffer->ChromaWidth();
}
- (int)chromaHeight {
return _i420Buffer->ChromaHeight();
}
- (const uint8_t *)dataY {
return _i420Buffer->DataY();
}
- (const uint8_t *)dataU {
return _i420Buffer->DataU();
}
- (const uint8_t *)dataV {
return _i420Buffer->DataV();
}
- (id<RTCI420Buffer>)toI420 {
return self;
}
#pragma mark - Private
- (rtc::scoped_refptr<webrtc::I420BufferInterface>)nativeI420Buffer {
return _i420Buffer;
}
#pragma mark - Debugging
#if !defined(NDEBUG) && defined(WEBRTC_IOS)
- (id)debugQuickLookObject {
UIGraphicsBeginImageContext(CGSizeMake(_i420Buffer->width(), _i420Buffer->height()));
CGContextRef c = UIGraphicsGetCurrentContext();
uint8_t *ctxData = (uint8_t *)CGBitmapContextGetData(c);
libyuv::I420ToARGB(_i420Buffer->DataY(),
_i420Buffer->StrideY(),
_i420Buffer->DataU(),
_i420Buffer->StrideU(),
_i420Buffer->DataV(),
_i420Buffer->StrideV(),
ctxData,
CGBitmapContextGetBytesPerRow(c),
CGBitmapContextGetWidth(c),
CGBitmapContextGetHeight(c));
UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return image;
}
#endif
@end
#pragma mark -
@implementation RTCMutableI420Buffer
- (uint8_t *)mutableDataY {
return static_cast<webrtc::I420Buffer *>(_i420Buffer.get())->MutableDataY();
}
- (uint8_t *)mutableDataU {
return static_cast<webrtc::I420Buffer *>(_i420Buffer.get())->MutableDataU();
}
- (uint8_t *)mutableDataV {
return static_cast<webrtc::I420Buffer *>(_i420Buffer.get())->MutableDataV();
}
@end

View File

@ -1,25 +0,0 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCOpenGLDefines.h"
#import "WebRTC/RTCVideoFrame.h"
@interface RTCI420TextureCache : NSObject
@property(nonatomic, readonly) GLuint yTexture;
@property(nonatomic, readonly) GLuint uTexture;
@property(nonatomic, readonly) GLuint vTexture;
- (instancetype)init NS_UNAVAILABLE;
- (instancetype)initWithContext:(GlContextType *)context NS_DESIGNATED_INITIALIZER;
- (void)uploadFrameToTextures:(RTCVideoFrame *)frame;
@end

View File

@ -1,155 +0,0 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCI420TextureCache.h"
#import "WebRTC/RTCVideoFrameBuffer.h"
#if TARGET_OS_IPHONE
#import <OpenGLES/ES3/gl.h>
#else
#import <OpenGL/gl3.h>
#endif
#include <vector>
// Two sets of 3 textures are used here, one for each of the Y, U and V planes. Having two sets
// alleviates CPU blockage in the event that the GPU is asked to render to a texture that is already
// in use.
static const GLsizei kNumTextureSets = 2;
static const GLsizei kNumTexturesPerSet = 3;
static const GLsizei kNumTextures = kNumTexturesPerSet * kNumTextureSets;
@implementation RTCI420TextureCache {
BOOL _hasUnpackRowLength;
GLint _currentTextureSet;
// Handles for OpenGL constructs.
GLuint _textures[kNumTextures];
// Used to create a non-padded plane for GPU upload when we receive padded frames.
std::vector<uint8_t> _planeBuffer;
}
- (GLuint)yTexture {
return _textures[_currentTextureSet * kNumTexturesPerSet];
}
- (GLuint)uTexture {
return _textures[_currentTextureSet * kNumTexturesPerSet + 1];
}
- (GLuint)vTexture {
return _textures[_currentTextureSet * kNumTexturesPerSet + 2];
}
- (instancetype)initWithContext:(GlContextType *)context {
if (self = [super init]) {
#if TARGET_OS_IPHONE
_hasUnpackRowLength = (context.API == kEAGLRenderingAPIOpenGLES3);
#else
_hasUnpackRowLength = YES;
#endif
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
[self setupTextures];
}
return self;
}
- (void)dealloc {
glDeleteTextures(kNumTextures, _textures);
}
- (void)setupTextures {
glGenTextures(kNumTextures, _textures);
// Set parameters for each of the textures we created.
for (GLsizei i = 0; i < kNumTextures; i++) {
glBindTexture(GL_TEXTURE_2D, _textures[i]);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
}
}
- (void)uploadPlane:(const uint8_t *)plane
texture:(GLuint)texture
width:(size_t)width
height:(size_t)height
stride:(int32_t)stride {
glBindTexture(GL_TEXTURE_2D, texture);
const uint8_t *uploadPlane = plane;
if ((size_t)stride != width) {
if (_hasUnpackRowLength) {
// GLES3 allows us to specify stride.
glPixelStorei(GL_UNPACK_ROW_LENGTH, stride);
glTexImage2D(GL_TEXTURE_2D,
0,
RTC_PIXEL_FORMAT,
static_cast<GLsizei>(width),
static_cast<GLsizei>(height),
0,
RTC_PIXEL_FORMAT,
GL_UNSIGNED_BYTE,
uploadPlane);
glPixelStorei(GL_UNPACK_ROW_LENGTH, 0);
return;
} else {
// Make an unpadded copy and upload that instead. Quick profiling showed
// that this is faster than uploading row by row using glTexSubImage2D.
uint8_t *unpaddedPlane = _planeBuffer.data();
for (size_t y = 0; y < height; ++y) {
memcpy(unpaddedPlane + y * width, plane + y * stride, width);
}
uploadPlane = unpaddedPlane;
}
}
glTexImage2D(GL_TEXTURE_2D,
0,
RTC_PIXEL_FORMAT,
static_cast<GLsizei>(width),
static_cast<GLsizei>(height),
0,
RTC_PIXEL_FORMAT,
GL_UNSIGNED_BYTE,
uploadPlane);
}
- (void)uploadFrameToTextures:(RTCVideoFrame *)frame {
_currentTextureSet = (_currentTextureSet + 1) % kNumTextureSets;
id<RTCI420Buffer> buffer = [frame.buffer toI420];
const int chromaWidth = buffer.chromaWidth;
const int chromaHeight = buffer.chromaHeight;
if (buffer.strideY != frame.width || buffer.strideU != chromaWidth ||
buffer.strideV != chromaWidth) {
_planeBuffer.resize(buffer.width * buffer.height);
}
[self uploadPlane:buffer.dataY
texture:self.yTexture
width:buffer.width
height:buffer.height
stride:buffer.strideY];
[self uploadPlane:buffer.dataU
texture:self.uTexture
width:chromaWidth
height:chromaHeight
stride:buffer.strideU];
[self uploadPlane:buffer.dataV
texture:self.vTexture
width:chromaWidth
height:chromaHeight
stride:buffer.strideV];
}
@end

View File

@ -8,24 +8,4 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#import <GLKit/GLKit.h>
@class RTCVideoFrame;
NS_ASSUME_NONNULL_BEGIN
@interface RTCNV12TextureCache : NSObject
@property(nonatomic, readonly) GLuint yTexture;
@property(nonatomic, readonly) GLuint uvTexture;
- (instancetype)init NS_UNAVAILABLE;
- (nullable instancetype)initWithContext:(EAGLContext *)context NS_DESIGNATED_INITIALIZER;
- (BOOL)uploadFrameToTextures:(RTCVideoFrame *)frame;
- (void)releaseTextures;
@end
NS_ASSUME_NONNULL_END
#import "components/renderer/opengl/RTCNV12TextureCache.h"

View File

@ -1,111 +0,0 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCNV12TextureCache.h"
#import "WebRTC/RTCVideoFrame.h"
#import "WebRTC/RTCVideoFrameBuffer.h"
@implementation RTCNV12TextureCache {
CVOpenGLESTextureCacheRef _textureCache;
CVOpenGLESTextureRef _yTextureRef;
CVOpenGLESTextureRef _uvTextureRef;
}
- (GLuint)yTexture {
return CVOpenGLESTextureGetName(_yTextureRef);
}
- (GLuint)uvTexture {
return CVOpenGLESTextureGetName(_uvTextureRef);
}
- (instancetype)initWithContext:(EAGLContext *)context {
if (self = [super init]) {
CVReturn ret = CVOpenGLESTextureCacheCreate(
kCFAllocatorDefault, NULL,
#if COREVIDEO_USE_EAGLCONTEXT_CLASS_IN_API
context,
#else
(__bridge void *)context,
#endif
NULL, &_textureCache);
if (ret != kCVReturnSuccess) {
self = nil;
}
}
return self;
}
- (BOOL)loadTexture:(CVOpenGLESTextureRef *)textureOut
pixelBuffer:(CVPixelBufferRef)pixelBuffer
planeIndex:(int)planeIndex
pixelFormat:(GLenum)pixelFormat {
const int width = CVPixelBufferGetWidthOfPlane(pixelBuffer, planeIndex);
const int height = CVPixelBufferGetHeightOfPlane(pixelBuffer, planeIndex);
if (*textureOut) {
CFRelease(*textureOut);
*textureOut = nil;
}
CVReturn ret = CVOpenGLESTextureCacheCreateTextureFromImage(
kCFAllocatorDefault, _textureCache, pixelBuffer, NULL, GL_TEXTURE_2D, pixelFormat, width,
height, pixelFormat, GL_UNSIGNED_BYTE, planeIndex, textureOut);
if (ret != kCVReturnSuccess) {
CFRelease(*textureOut);
*textureOut = nil;
return NO;
}
NSAssert(CVOpenGLESTextureGetTarget(*textureOut) == GL_TEXTURE_2D,
@"Unexpected GLES texture target");
glBindTexture(GL_TEXTURE_2D, CVOpenGLESTextureGetName(*textureOut));
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
return YES;
}
- (BOOL)uploadFrameToTextures:(RTCVideoFrame *)frame {
NSAssert([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]],
@"frame must be CVPixelBuffer backed");
RTCCVPixelBuffer *rtcPixelBuffer = (RTCCVPixelBuffer *)frame.buffer;
CVPixelBufferRef pixelBuffer = rtcPixelBuffer.pixelBuffer;
return [self loadTexture:&_yTextureRef
pixelBuffer:pixelBuffer
planeIndex:0
pixelFormat:GL_LUMINANCE] &&
[self loadTexture:&_uvTextureRef
pixelBuffer:pixelBuffer
planeIndex:1
pixelFormat:GL_LUMINANCE_ALPHA];
}
- (void)releaseTextures {
if (_uvTextureRef) {
CFRelease(_uvTextureRef);
_uvTextureRef = nil;
}
if (_yTextureRef) {
CFRelease(_yTextureRef);
_yTextureRef = nil;
}
}
- (void)dealloc {
[self releaseTextures];
if (_textureCache) {
CFRelease(_textureCache);
_textureCache = nil;
}
}
@end

View File

@ -1,37 +0,0 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <Foundation/Foundation.h>
#if TARGET_OS_IPHONE
#define RTC_PIXEL_FORMAT GL_LUMINANCE
#define SHADER_VERSION
#define VERTEX_SHADER_IN "attribute"
#define VERTEX_SHADER_OUT "varying"
#define FRAGMENT_SHADER_IN "varying"
#define FRAGMENT_SHADER_OUT
#define FRAGMENT_SHADER_COLOR "gl_FragColor"
#define FRAGMENT_SHADER_TEXTURE "texture2D"
@class EAGLContext;
typedef EAGLContext GlContextType;
#else
#define RTC_PIXEL_FORMAT GL_RED
#define SHADER_VERSION "#version 150\n"
#define VERTEX_SHADER_IN "in"
#define VERTEX_SHADER_OUT "out"
#define FRAGMENT_SHADER_IN "in"
#define FRAGMENT_SHADER_OUT "out vec4 fragColor;\n"
#define FRAGMENT_SHADER_COLOR "fragColor"
#define FRAGMENT_SHADER_TEXTURE "texture"
@class NSOpenGLContext;
typedef NSOpenGLContext GlContextType;
#endif

View File

@ -1,21 +0,0 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCVideoFrame.h"
RTC_EXTERN const char kRTCVertexShaderSource[];
RTC_EXTERN GLuint RTCCreateShader(GLenum type, const GLchar* source);
RTC_EXTERN GLuint RTCCreateProgram(GLuint vertexShader, GLuint fragmentShader);
RTC_EXTERN GLuint
RTCCreateProgramFromFragmentSource(const char fragmentShaderSource[]);
RTC_EXTERN BOOL RTCCreateVertexBuffer(GLuint* vertexBuffer,
GLuint* vertexArray);
RTC_EXTERN void RTCSetVertexData(RTCVideoRotation rotation);

View File

@ -1,189 +0,0 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCShader.h"
#if TARGET_OS_IPHONE
#import <OpenGLES/ES3/gl.h>
#else
#import <OpenGL/gl3.h>
#endif
#include <algorithm>
#include <array>
#include <memory>
#import "RTCOpenGLDefines.h"
#include "rtc_base/checks.h"
#include "rtc_base/logging.h"
// Vertex shader doesn't do anything except pass coordinates through.
const char kRTCVertexShaderSource[] =
SHADER_VERSION
VERTEX_SHADER_IN " vec2 position;\n"
VERTEX_SHADER_IN " vec2 texcoord;\n"
VERTEX_SHADER_OUT " vec2 v_texcoord;\n"
"void main() {\n"
" gl_Position = vec4(position.x, position.y, 0.0, 1.0);\n"
" v_texcoord = texcoord;\n"
"}\n";
// Compiles a shader of the given |type| with GLSL source |source| and returns
// the shader handle or 0 on error.
GLuint RTCCreateShader(GLenum type, const GLchar *source) {
GLuint shader = glCreateShader(type);
if (!shader) {
return 0;
}
glShaderSource(shader, 1, &source, NULL);
glCompileShader(shader);
GLint compileStatus = GL_FALSE;
glGetShaderiv(shader, GL_COMPILE_STATUS, &compileStatus);
if (compileStatus == GL_FALSE) {
GLint logLength = 0;
// The null termination character is included in the returned log length.
glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &logLength);
if (logLength > 0) {
std::unique_ptr<char[]> compileLog(new char[logLength]);
// The returned string is null terminated.
glGetShaderInfoLog(shader, logLength, NULL, compileLog.get());
RTC_LOG(LS_ERROR) << "Shader compile error: " << compileLog.get();
}
glDeleteShader(shader);
shader = 0;
}
return shader;
}
// Links a shader program with the given vertex and fragment shaders and
// returns the program handle or 0 on error.
GLuint RTCCreateProgram(GLuint vertexShader, GLuint fragmentShader) {
if (vertexShader == 0 || fragmentShader == 0) {
return 0;
}
GLuint program = glCreateProgram();
if (!program) {
return 0;
}
glAttachShader(program, vertexShader);
glAttachShader(program, fragmentShader);
glLinkProgram(program);
GLint linkStatus = GL_FALSE;
glGetProgramiv(program, GL_LINK_STATUS, &linkStatus);
if (linkStatus == GL_FALSE) {
glDeleteProgram(program);
program = 0;
}
return program;
}
// Creates and links a shader program with the given fragment shader source and
// a plain vertex shader. Returns the program handle or 0 on error.
GLuint RTCCreateProgramFromFragmentSource(const char fragmentShaderSource[]) {
GLuint vertexShader = RTCCreateShader(GL_VERTEX_SHADER, kRTCVertexShaderSource);
RTC_CHECK(vertexShader) << "failed to create vertex shader";
GLuint fragmentShader =
RTCCreateShader(GL_FRAGMENT_SHADER, fragmentShaderSource);
RTC_CHECK(fragmentShader) << "failed to create fragment shader";
GLuint program = RTCCreateProgram(vertexShader, fragmentShader);
// Shaders are created only to generate program.
if (vertexShader) {
glDeleteShader(vertexShader);
}
if (fragmentShader) {
glDeleteShader(fragmentShader);
}
// Set vertex shader variables 'position' and 'texcoord' in program.
GLint position = glGetAttribLocation(program, "position");
GLint texcoord = glGetAttribLocation(program, "texcoord");
if (position < 0 || texcoord < 0) {
glDeleteProgram(program);
return 0;
}
// Read position attribute with size of 2 and stride of 4 beginning at the start of the array. The
// last argument indicates offset of data within the vertex buffer.
glVertexAttribPointer(position, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(GLfloat), (void *)0);
glEnableVertexAttribArray(position);
// Read texcoord attribute with size of 2 and stride of 4 beginning at the first texcoord in the
// array. The last argument indicates offset of data within the vertex buffer.
glVertexAttribPointer(
texcoord, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(GLfloat), (void *)(2 * sizeof(GLfloat)));
glEnableVertexAttribArray(texcoord);
return program;
}
BOOL RTCCreateVertexBuffer(GLuint *vertexBuffer, GLuint *vertexArray) {
#if !TARGET_OS_IPHONE
glGenVertexArrays(1, vertexArray);
if (*vertexArray == 0) {
return NO;
}
glBindVertexArray(*vertexArray);
#endif
glGenBuffers(1, vertexBuffer);
if (*vertexBuffer == 0) {
glDeleteVertexArrays(1, vertexArray);
return NO;
}
glBindBuffer(GL_ARRAY_BUFFER, *vertexBuffer);
glBufferData(GL_ARRAY_BUFFER, 4 * 4 * sizeof(GLfloat), NULL, GL_DYNAMIC_DRAW);
return YES;
}
// Set vertex data to the currently bound vertex buffer.
void RTCSetVertexData(RTCVideoRotation rotation) {
// When modelview and projection matrices are identity (default) the world is
// contained in the square around origin with unit size 2. Drawing to these
// coordinates is equivalent to drawing to the entire screen. The texture is
// stretched over that square using texture coordinates (u, v) that range
// from (0, 0) to (1, 1) inclusive. Texture coordinates are flipped vertically
// here because the incoming frame has origin in upper left hand corner but
// OpenGL expects origin in bottom left corner.
std::array<std::array<GLfloat, 2>, 4> UVCoords = {{
{{0, 1}}, // Lower left.
{{1, 1}}, // Lower right.
{{1, 0}}, // Upper right.
{{0, 0}}, // Upper left.
}};
// Rotate the UV coordinates.
int rotation_offset;
switch (rotation) {
case RTCVideoRotation_0:
rotation_offset = 0;
break;
case RTCVideoRotation_90:
rotation_offset = 1;
break;
case RTCVideoRotation_180:
rotation_offset = 2;
break;
case RTCVideoRotation_270:
rotation_offset = 3;
break;
}
std::rotate(UVCoords.begin(), UVCoords.begin() + rotation_offset,
UVCoords.end());
const GLfloat gVertices[] = {
// X, Y, U, V.
-1, -1, UVCoords[0][0], UVCoords[0][1],
1, -1, UVCoords[1][0], UVCoords[1][1],
1, 1, UVCoords[2][0], UVCoords[2][1],
-1, 1, UVCoords[3][0], UVCoords[3][1],
};
glBufferSubData(GL_ARRAY_BUFFER, 0, sizeof(gVertices), gVertices);
}

View File

@ -1,19 +0,0 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <UIKit/UIKit.h>
#include "media/base/h264_profile_level_id.h"
@interface UIDevice (H264Profile)
+ (absl::optional<webrtc::H264::ProfileLevelId>)maxSupportedH264Profile;
@end

View File

@ -1,108 +0,0 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "UIDevice+H264Profile.h"
#import "WebRTC/UIDevice+RTCDevice.h"
#include <algorithm>
namespace {
using namespace webrtc::H264;
struct SupportedH264Profile {
const RTCDeviceType deviceType;
const ProfileLevelId profile;
};
constexpr SupportedH264Profile kH264MaxSupportedProfiles[] = {
// iPhones with at least iOS 9
{RTCDeviceTypeIPhoneX, {kProfileHigh, kLevel5_2}}, // https://support.apple.com/kb/SP770
{RTCDeviceTypeIPhone8, {kProfileHigh, kLevel5_2}}, // https://support.apple.com/kb/SP767
{RTCDeviceTypeIPhone8Plus, {kProfileHigh, kLevel5_2}}, // https://support.apple.com/kb/SP768
{RTCDeviceTypeIPhone7, {kProfileHigh, kLevel5_1}}, // https://support.apple.com/kb/SP743
{RTCDeviceTypeIPhone7Plus, {kProfileHigh, kLevel5_1}}, // https://support.apple.com/kb/SP744
{RTCDeviceTypeIPhoneSE, {kProfileHigh, kLevel4_2}}, // https://support.apple.com/kb/SP738
{RTCDeviceTypeIPhone6S, {kProfileHigh, kLevel4_2}}, // https://support.apple.com/kb/SP726
{RTCDeviceTypeIPhone6SPlus, {kProfileHigh, kLevel4_2}}, // https://support.apple.com/kb/SP727
{RTCDeviceTypeIPhone6, {kProfileHigh, kLevel4_2}}, // https://support.apple.com/kb/SP705
{RTCDeviceTypeIPhone6Plus, {kProfileHigh, kLevel4_2}}, // https://support.apple.com/kb/SP706
{RTCDeviceTypeIPhone5SGSM, {kProfileHigh, kLevel4_2}}, // https://support.apple.com/kb/SP685
{RTCDeviceTypeIPhone5SGSM_CDMA,
{kProfileHigh, kLevel4_2}}, // https://support.apple.com/kb/SP685
{RTCDeviceTypeIPhone5GSM, {kProfileHigh, kLevel4_1}}, // https://support.apple.com/kb/SP655
{RTCDeviceTypeIPhone5GSM_CDMA,
{kProfileHigh, kLevel4_1}}, // https://support.apple.com/kb/SP655
{RTCDeviceTypeIPhone5CGSM, {kProfileHigh, kLevel4_1}}, // https://support.apple.com/kb/SP684
{RTCDeviceTypeIPhone5CGSM_CDMA,
{kProfileHigh, kLevel4_1}}, // https://support.apple.com/kb/SP684
{RTCDeviceTypeIPhone4S, {kProfileHigh, kLevel4_1}}, // https://support.apple.com/kb/SP643
// iPods with at least iOS 9
{RTCDeviceTypeIPodTouch6G, {kProfileMain, kLevel4_1}}, // https://support.apple.com/kb/SP720
{RTCDeviceTypeIPodTouch5G, {kProfileMain, kLevel3_1}}, // https://support.apple.com/kb/SP657
// iPads with at least iOS 9
{RTCDeviceTypeIPad2Wifi, {kProfileHigh, kLevel4_1}}, // https://support.apple.com/kb/SP622
{RTCDeviceTypeIPad2GSM, {kProfileHigh, kLevel4_1}}, // https://support.apple.com/kb/SP622
{RTCDeviceTypeIPad2CDMA, {kProfileHigh, kLevel4_1}}, // https://support.apple.com/kb/SP622
{RTCDeviceTypeIPad2Wifi2, {kProfileHigh, kLevel4_1}}, // https://support.apple.com/kb/SP622
{RTCDeviceTypeIPadMiniWifi, {kProfileHigh, kLevel4_1}}, // https://support.apple.com/kb/SP661
{RTCDeviceTypeIPadMiniGSM, {kProfileHigh, kLevel4_1}}, // https://support.apple.com/kb/SP661
{RTCDeviceTypeIPadMiniGSM_CDMA,
{kProfileHigh, kLevel4_1}}, // https://support.apple.com/kb/SP661
{RTCDeviceTypeIPad3Wifi, {kProfileHigh, kLevel4_1}}, // https://support.apple.com/kb/SP647
{RTCDeviceTypeIPad3GSM_CDMA, {kProfileHigh, kLevel4_1}}, // https://support.apple.com/kb/SP647
{RTCDeviceTypeIPad3GSM, {kProfileHigh, kLevel4_1}}, // https://support.apple.com/kb/SP647
{RTCDeviceTypeIPad4Wifi, {kProfileHigh, kLevel4_1}}, // https://support.apple.com/kb/SP662
{RTCDeviceTypeIPad4GSM, {kProfileHigh, kLevel4_1}}, // https://support.apple.com/kb/SP662
{RTCDeviceTypeIPad4GSM_CDMA, {kProfileHigh, kLevel4_1}}, // https://support.apple.com/kb/SP662
{RTCDeviceTypeIPad5, {kProfileHigh, kLevel4_2}}, // https://support.apple.com/kb/SP751
{RTCDeviceTypeIPad6, {kProfileHigh, kLevel4_2}}, // https://support.apple.com/kb/SP774
{RTCDeviceTypeIPadAirWifi, {kProfileHigh, kLevel4_2}}, // https://support.apple.com/kb/SP692
{RTCDeviceTypeIPadAirCellular,
{kProfileHigh, kLevel4_2}}, // https://support.apple.com/kb/SP692
{RTCDeviceTypeIPadAirWifiCellular,
{kProfileHigh, kLevel4_2}}, // https://support.apple.com/kb/SP692
{RTCDeviceTypeIPadAir2, {kProfileHigh, kLevel4_2}}, // https://support.apple.com/kb/SP708
{RTCDeviceTypeIPadMini2GWifi, {kProfileHigh, kLevel4_2}}, // https://support.apple.com/kb/SP693
{RTCDeviceTypeIPadMini2GCellular,
{kProfileHigh, kLevel4_2}}, // https://support.apple.com/kb/SP693
{RTCDeviceTypeIPadMini2GWifiCellular,
{kProfileHigh, kLevel4_2}}, // https://support.apple.com/kb/SP693
{RTCDeviceTypeIPadMini3, {kProfileHigh, kLevel4_2}}, // https://support.apple.com/kb/SP709
{RTCDeviceTypeIPadMini4, {kProfileHigh, kLevel4_2}}, // https://support.apple.com/kb/SP725
{RTCDeviceTypeIPadPro9Inch, {kProfileHigh, kLevel4_2}}, // https://support.apple.com/kb/SP739
{RTCDeviceTypeIPadPro12Inch, {kProfileHigh, kLevel4_2}}, // https://support.apple.com/kb/sp723
{RTCDeviceTypeIPadPro12Inch2, {kProfileHigh, kLevel4_2}}, // https://support.apple.com/kb/SP761
{RTCDeviceTypeIPadPro10Inch, {kProfileHigh, kLevel4_2}}, // https://support.apple.com/kb/SP762
};
absl::optional<ProfileLevelId> FindMaxSupportedProfileForDevice(RTCDeviceType deviceType) {
const auto* result = std::find_if(std::begin(kH264MaxSupportedProfiles),
std::end(kH264MaxSupportedProfiles),
[deviceType](const SupportedH264Profile& supportedProfile) {
return supportedProfile.deviceType == deviceType;
});
if (result != std::end(kH264MaxSupportedProfiles)) {
return result->profile;
}
return absl::nullopt;
}
} // namespace
@implementation UIDevice (H264Profile)
+ (absl::optional<webrtc::H264::ProfileLevelId>)maxSupportedH264Profile {
return FindMaxSupportedProfileForDevice([self deviceType]);
}
@end