iOS render: Handle frame rotation in OpenGL

This CL handles frame rotation by updating the OpenGL vertex data in
RTCOpenGLVideoRenderer, instead of calling the expensive
libyuv::I420Rotate that will rotate the actual memory. Also, we can
handle rotated native frames instead of falling back to
NativeToI420Buffer.

Review-Url: https://codereview.webrtc.org/2176623002
Cr-Commit-Position: refs/heads/master@{#13715}
This commit is contained in:
magjed
2016-08-10 07:58:29 -07:00
committed by Commit bot
parent 4556b45076
commit fb372f0074
10 changed files with 119 additions and 64 deletions

View File

@ -13,7 +13,7 @@
#import <GLKit/GLKit.h>
#import "RTCOpenGLVideoRenderer.h"
#import "WebRTC//RTCVideoFrame.h"
#import "WebRTC/RTCVideoFrame.h"
// RTCDisplayLinkTimer wraps a CADisplayLink and is set to fire every two screen
// refreshes, which should be 30fps. We wrap the display link in order to avoid

View File

@ -15,6 +15,9 @@
#import "RTCShader+Private.h"
#import "WebRTC/RTCVideoFrame.h"
#include "webrtc/base/optional.h"
#include "webrtc/common_video/rotation.h"
// |kNumTextures| must not exceed 8, which is the limit in OpenGLES2. Two sets
// of 3 textures are used here, one for each of the Y, U and V planes. Having
// two sets alleviates CPU blockage in the event that the GPU is asked to render
@ -57,6 +60,9 @@ static const char kI420FragmentShaderSource[] =
GLint _ySampler;
GLint _uSampler;
GLint _vSampler;
// Store current rotation and only upload new vertex data when rotation
// changes.
rtc::Optional<webrtc::VideoRotation> _currentRotation;
// Used to create a non-padded plane for GPU upload when we receive padded
// frames.
std::vector<uint8_t> _planeBuffer;
@ -119,6 +125,11 @@ static const char kI420FragmentShaderSource[] =
glBindVertexArray(_vertexArray);
#endif
glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer);
if (!_currentRotation || frame.rotation != *_currentRotation) {
_currentRotation = rtc::Optional<webrtc::VideoRotation>(
static_cast<webrtc::VideoRotation>(frame.rotation));
RTCSetVertexData(*_currentRotation);
}
glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
return YES;

View File

@ -20,6 +20,8 @@
#import "WebRTC/RTCVideoFrame.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/optional.h"
#include "webrtc/common_video/rotation.h"
static const char kNV12FragmentShaderSource[] =
SHADER_VERSION
@ -46,6 +48,9 @@ static const char kNV12FragmentShaderSource[] =
GLint _ySampler;
GLint _uvSampler;
CVOpenGLESTextureCacheRef _textureCache;
// Store current rotation and only upload new vertex data when rotation
// changes.
rtc::Optional<webrtc::VideoRotation> _currentRotation;
}
- (instancetype)initWithContext:(GlContextType *)context {
@ -149,6 +154,11 @@ static const char kNV12FragmentShaderSource[] =
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer);
if (!_currentRotation || frame.rotation != *_currentRotation) {
_currentRotation = rtc::Optional<webrtc::VideoRotation>(
static_cast<webrtc::VideoRotation>(frame.rotation));
RTCSetVertexData(*_currentRotation);
}
glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
CFRelease(chromaTexture);

View File

@ -18,6 +18,8 @@
#import <OpenGL/gl3.h>
#endif
#include "webrtc/common_video/rotation.h"
RTC_EXTERN const char kRTCVertexShaderSource[];
RTC_EXTERN GLuint RTCCreateShader(GLenum type, const GLchar *source);
@ -25,3 +27,4 @@ RTC_EXTERN GLuint RTCCreateProgram(GLuint vertexShader, GLuint fragmentShader);
RTC_EXTERN GLuint RTCCreateProgramFromFragmentSource(const char fragmentShaderSource[]);
RTC_EXTERN BOOL RTCSetupVerticesForProgram(
GLuint program, GLuint* vertexBuffer, GLuint* vertexArray);
RTC_EXTERN void RTCSetVertexData(webrtc::VideoRotation rotation);

View File

@ -10,6 +10,8 @@
#import "RTCShader.h"
#include <algorithm>
#include <array>
#include <memory>
#import "RTCShader+Private.h"
@ -28,21 +30,6 @@ const char kRTCVertexShaderSource[] =
" v_texcoord = texcoord;\n"
"}\n";
// When modelview and projection matrices are identity (default) the world is
// contained in the square around origin with unit size 2. Drawing to these
// coordinates is equivalent to drawing to the entire screen. The texture is
// stretched over that square using texture coordinates (u, v) that range
// from (0, 0) to (1, 1) inclusive. Texture coordinates are flipped vertically
// here because the incoming frame has origin in upper left hand corner but
// OpenGL expects origin in bottom left corner.
static const GLfloat gVertices[] = {
// X, Y, U, V.
-1, -1, 0, 1, // Bottom left.
1, -1, 1, 1, // Bottom right.
1, 1, 1, 0, // Top right.
-1, 1, 0, 0, // Top left.
};
// Compiles a shader of the given |type| with GLSL source |source| and returns
// the shader handle or 0 on error.
GLuint RTCCreateShader(GLenum type, const GLchar *source) {
@ -111,9 +98,8 @@ GLuint RTCCreateProgramFromFragmentSource(const char fragmentShaderSource[]) {
return program;
}
// Set vertex shader variables 'position' and 'texcoord' in |program| to the
// |gVertices| array above. It will use |vertexBuffer| and |vertexArray| to
// store the vertex data.
// Set vertex shader variables 'position' and 'texcoord' in |program| to use
// |vertexBuffer| and |vertexArray| to store the vertex data.
BOOL RTCSetupVerticesForProgram(GLuint program, GLuint* vertexBuffer, GLuint* vertexArray) {
GLint position = glGetAttribLocation(program, "position");
GLint texcoord = glGetAttribLocation(program, "texcoord");
@ -132,11 +118,11 @@ BOOL RTCSetupVerticesForProgram(GLuint program, GLuint* vertexBuffer, GLuint* ve
return NO;
}
glBindBuffer(GL_ARRAY_BUFFER, *vertexBuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(gVertices), gVertices, GL_DYNAMIC_DRAW);
glBufferData(GL_ARRAY_BUFFER, 4 * 4 * sizeof(GLfloat), NULL, GL_DYNAMIC_DRAW);
// Read position attribute from |gVertices| with size of 2 and stride of 4
// beginning at the start of the array. The last argument indicates offset
// of data within |gVertices| as supplied to the vertex buffer.
// Read position attribute with size of 2 and stride of 4 beginning at the
// start of the array. The last argument indicates offset of data within the
// vertex buffer.
glVertexAttribPointer(position, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(GLfloat),
(void *)0);
glEnableVertexAttribArray(position);
@ -150,3 +136,49 @@ BOOL RTCSetupVerticesForProgram(GLuint program, GLuint* vertexBuffer, GLuint* ve
return YES;
}
// Set vertex data to the currently bound vertex buffer.
void RTCSetVertexData(webrtc::VideoRotation rotation) {
// When modelview and projection matrices are identity (default) the world is
// contained in the square around origin with unit size 2. Drawing to these
// coordinates is equivalent to drawing to the entire screen. The texture is
// stretched over that square using texture coordinates (u, v) that range
// from (0, 0) to (1, 1) inclusive. Texture coordinates are flipped vertically
// here because the incoming frame has origin in upper left hand corner but
// OpenGL expects origin in bottom left corner.
std::array<std::array<GLfloat, 2>, 4> UVCoords = {{
{{0, 1}}, // Lower left.
{{1, 1}}, // Lower right.
{{1, 0}}, // Upper right.
{{0, 0}}, // Upper left.
}};
// Rotate the UV coordinates.
int rotation_offset;
switch (rotation) {
case webrtc::kVideoRotation_0:
rotation_offset = 0;
break;
case webrtc::kVideoRotation_90:
rotation_offset = 1;
break;
case webrtc::kVideoRotation_180:
rotation_offset = 2;
break;
case webrtc::kVideoRotation_270:
rotation_offset = 3;
break;
}
std::rotate(UVCoords.begin(), UVCoords.begin() + rotation_offset,
UVCoords.end());
const GLfloat gVertices[] = {
// X, Y, U, V.
-1, -1, UVCoords[0][0], UVCoords[0][1],
1, -1, UVCoords[1][0], UVCoords[1][1],
1, 1, UVCoords[2][0], UVCoords[2][1],
-1, 1, UVCoords[3][0], UVCoords[3][1],
};
glBufferSubData(GL_ARRAY_BUFFER, 0, sizeof(gVertices), gVertices);
}

View File

@ -10,6 +10,7 @@
#import "WebRTC/RTCVideoFrame.h"
#include "webrtc/common_video/rotation.h"
#include "webrtc/media/base/videoframe.h"
NS_ASSUME_NONNULL_BEGIN
@ -19,7 +20,10 @@ NS_ASSUME_NONNULL_BEGIN
@property(nonatomic, readonly)
rtc::scoped_refptr<webrtc::VideoFrameBuffer> i420Buffer;
- (instancetype)initWithNativeFrame:(const cricket::VideoFrame *)nativeFrame
- (instancetype)initWithVideoBuffer:
(rtc::scoped_refptr<webrtc::VideoFrameBuffer>)videoBuffer
rotation:(webrtc::VideoRotation)rotation
timeStampNs:(int64_t)timeStampNs
NS_DESIGNATED_INITIALIZER;
@end

View File

@ -12,17 +12,25 @@
#include <memory>
#include "webrtc/common_video/rotation.h"
@implementation RTCVideoFrame {
std::unique_ptr<cricket::VideoFrame> _videoFrame;
rtc::scoped_refptr<webrtc::VideoFrameBuffer> _videoBuffer;
webrtc::VideoRotation _rotation;
int64_t _timeStampNs;
rtc::scoped_refptr<webrtc::VideoFrameBuffer> _i420Buffer;
}
- (size_t)width {
return _videoFrame->width();
return _videoBuffer->width();
}
- (size_t)height {
return _videoFrame->height();
return _videoBuffer->height();
}
- (int)rotation {
return static_cast<int>(_rotation);
}
// TODO(nisse): chromaWidth and chromaHeight are used only in
@ -78,34 +86,32 @@
return self.i420Buffer->StrideV();
}
- (int64_t)timeStamp {
return _videoFrame->GetTimeStamp();
- (int64_t)timeStampNs {
return _timeStampNs;
}
- (CVPixelBufferRef)nativeHandle {
return static_cast<CVPixelBufferRef>(
_videoFrame->video_frame_buffer()->native_handle());
return static_cast<CVPixelBufferRef>(_videoBuffer->native_handle());
}
- (void)convertBufferIfNeeded {
if (!_i420Buffer) {
if (_videoFrame->video_frame_buffer()->native_handle()) {
// Convert to I420.
_i420Buffer = _videoFrame->video_frame_buffer()->NativeToI420Buffer();
} else {
// Should already be I420.
_i420Buffer = _videoFrame->video_frame_buffer();
}
_i420Buffer = _videoBuffer->native_handle()
? _videoBuffer->NativeToI420Buffer()
: _videoBuffer;
}
}
#pragma mark - Private
- (instancetype)initWithNativeFrame:(const cricket::VideoFrame *)nativeFrame {
- (instancetype)initWithVideoBuffer:
(rtc::scoped_refptr<webrtc::VideoFrameBuffer>)videoBuffer
rotation:(webrtc::VideoRotation)rotation
timeStampNs:(int64_t)timeStampNs {
if (self = [super init]) {
// Keep a shallow copy of the video frame. The underlying frame buffer is
// not copied.
_videoFrame.reset(nativeFrame->Copy());
_videoBuffer = videoBuffer;
_rotation = rotation;
_timeStampNs = timeStampNs;
}
return self;
}

View File

@ -27,27 +27,14 @@ class VideoRendererAdapter
}
void OnFrame(const cricket::VideoFrame& nativeVideoFrame) override {
RTCVideoFrame *videoFrame = nil;
// Rotation of native handles is unsupported right now. Convert to CPU
// I420 buffer for rotation before calling the rotation method otherwise
// it will hit a DCHECK.
if (nativeVideoFrame.rotation() != webrtc::kVideoRotation_0 &&
nativeVideoFrame.video_frame_buffer()->native_handle()) {
rtc::scoped_refptr<webrtc::VideoFrameBuffer> i420Buffer =
nativeVideoFrame.video_frame_buffer()->NativeToI420Buffer();
std::unique_ptr<cricket::VideoFrame> cpuFrame(
new cricket::WebRtcVideoFrame(i420Buffer, nativeVideoFrame.rotation(),
nativeVideoFrame.timestamp_us(),
nativeVideoFrame.transport_frame_id()));
const cricket::VideoFrame *rotatedFrame =
cpuFrame->GetCopyWithRotationApplied();
videoFrame = [[RTCVideoFrame alloc] initWithNativeFrame:rotatedFrame];
} else {
const cricket::VideoFrame *rotatedFrame =
nativeVideoFrame.GetCopyWithRotationApplied();
videoFrame = [[RTCVideoFrame alloc] initWithNativeFrame:rotatedFrame];
}
CGSize current_size = CGSizeMake(videoFrame.width, videoFrame.height);
RTCVideoFrame* videoFrame = [[RTCVideoFrame alloc]
initWithVideoBuffer:nativeVideoFrame.video_frame_buffer()
rotation:nativeVideoFrame.rotation()
timeStampNs:nativeVideoFrame.GetTimeStamp()];
CGSize current_size = (videoFrame.rotation % 180 == 0)
? CGSizeMake(videoFrame.width, videoFrame.height)
: CGSizeMake(videoFrame.height, videoFrame.width);
if (!CGSizeEqualToSize(size_, current_size)) {
size_ = current_size;
[adapter_.videoRenderer setSize:size_];