Clean up RTCVideoFrame

RTCVideoFrame is an ObjectiveC version of webrtc::VideoFrame, but it
currently contains some extra logic beyond that. We want RTCVideoFrame
to be as simple as possible, i.e. just a container with no extra state,
so we can use it as input to RTCVideoSource without complicating the
interface for consumers.

BUG=webrtc:7177
NOTRY=True

Review-Url: https://codereview.webrtc.org/2695203004
Cr-Commit-Position: refs/heads/master@{#16740}
This commit is contained in:
magjed
2017-02-21 04:19:46 -08:00
committed by Commit bot
parent a518a39963
commit 7ee512581c
10 changed files with 84 additions and 122 deletions

View File

@ -215,12 +215,6 @@
} }
- (void)renderFrame:(RTCVideoFrame *)frame { - (void)renderFrame:(RTCVideoFrame *)frame {
#if !TARGET_OS_IPHONE
// Generate the i420 frame on video send thread instead of main thread.
// TODO(tkchin): Remove this once RTCEAGLVideoView supports uploading
// CVPixelBuffer textures on OSX.
[frame convertBufferIfNeeded];
#endif
self.videoFrame = frame; self.videoFrame = frame;
} }

View File

@ -15,7 +15,6 @@
#import "RTCShader+Private.h" #import "RTCShader+Private.h"
#import "WebRTC/RTCVideoFrame.h" #import "WebRTC/RTCVideoFrame.h"
#include "webrtc/api/video/video_rotation.h"
#include "webrtc/base/optional.h" #include "webrtc/base/optional.h"
// |kNumTextures| must not exceed 8, which is the limit in OpenGLES2. Two sets // |kNumTextures| must not exceed 8, which is the limit in OpenGLES2. Two sets
@ -62,7 +61,7 @@ static const char kI420FragmentShaderSource[] =
GLint _vSampler; GLint _vSampler;
// Store current rotation and only upload new vertex data when rotation // Store current rotation and only upload new vertex data when rotation
// changes. // changes.
rtc::Optional<webrtc::VideoRotation> _currentRotation; rtc::Optional<RTCVideoRotation> _currentRotation;
// Used to create a non-padded plane for GPU upload when we receive padded // Used to create a non-padded plane for GPU upload when we receive padded
// frames. // frames.
std::vector<uint8_t> _planeBuffer; std::vector<uint8_t> _planeBuffer;
@ -126,8 +125,7 @@ static const char kI420FragmentShaderSource[] =
#endif #endif
glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer); glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer);
if (!_currentRotation || frame.rotation != *_currentRotation) { if (!_currentRotation || frame.rotation != *_currentRotation) {
_currentRotation = rtc::Optional<webrtc::VideoRotation>( _currentRotation = rtc::Optional<RTCVideoRotation>(frame.rotation);
static_cast<webrtc::VideoRotation>(frame.rotation));
RTCSetVertexData(*_currentRotation); RTCSetVertexData(*_currentRotation);
} }
glDrawArrays(GL_TRIANGLE_FAN, 0, 4); glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
@ -188,32 +186,34 @@ static const char kI420FragmentShaderSource[] =
GLint textureOffset = _currentTextureSet * 3; GLint textureOffset = _currentTextureSet * 3;
NSAssert(textureOffset + 3 <= kNumTextures, @"invalid offset"); NSAssert(textureOffset + 3 <= kNumTextures, @"invalid offset");
if (frame.yPitch != static_cast<int32_t>(frame.width) || const int chromaWidth = (frame.width + 1) / 2;
frame.uPitch != static_cast<int32_t>(frame.chromaWidth) || const int chromaHeight = (frame.height + 1) / 2;
frame.vPitch != static_cast<int32_t>(frame.chromaWidth)) { if (frame.strideY != frame.width ||
frame.strideU != chromaWidth ||
frame.strideV != chromaWidth) {
_planeBuffer.resize(frame.width * frame.height); _planeBuffer.resize(frame.width * frame.height);
} }
[self uploadPlane:frame.yPlane [self uploadPlane:frame.dataY
sampler:_ySampler sampler:_ySampler
offset:textureOffset offset:textureOffset
width:frame.width width:frame.width
height:frame.height height:frame.height
stride:frame.yPitch]; stride:frame.strideY];
[self uploadPlane:frame.uPlane [self uploadPlane:frame.dataU
sampler:_uSampler sampler:_uSampler
offset:textureOffset + 1 offset:textureOffset + 1
width:frame.chromaWidth width:chromaWidth
height:frame.chromaHeight height:chromaHeight
stride:frame.uPitch]; stride:frame.strideU];
[self uploadPlane:frame.vPlane [self uploadPlane:frame.dataV
sampler:_vSampler sampler:_vSampler
offset:textureOffset + 2 offset:textureOffset + 2
width:frame.chromaWidth width:chromaWidth
height:frame.chromaHeight height:chromaHeight
stride:frame.vPitch]; stride:frame.strideV];
_currentTextureSet = (_currentTextureSet + 1) % kNumTextureSets; _currentTextureSet = (_currentTextureSet + 1) % kNumTextureSets;
return YES; return YES;

View File

@ -19,7 +19,6 @@
#import "RTCShader+Private.h" #import "RTCShader+Private.h"
#import "WebRTC/RTCVideoFrame.h" #import "WebRTC/RTCVideoFrame.h"
#include "webrtc/api/video/video_rotation.h"
#include "webrtc/base/checks.h" #include "webrtc/base/checks.h"
#include "webrtc/base/optional.h" #include "webrtc/base/optional.h"
@ -50,7 +49,7 @@ static const char kNV12FragmentShaderSource[] =
CVOpenGLESTextureCacheRef _textureCache; CVOpenGLESTextureCacheRef _textureCache;
// Store current rotation and only upload new vertex data when rotation // Store current rotation and only upload new vertex data when rotation
// changes. // changes.
rtc::Optional<webrtc::VideoRotation> _currentRotation; rtc::Optional<RTCVideoRotation> _currentRotation;
} }
- (instancetype)initWithContext:(GlContextType *)context { - (instancetype)initWithContext:(GlContextType *)context {
@ -155,8 +154,7 @@ static const char kNV12FragmentShaderSource[] =
glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer); glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer);
if (!_currentRotation || frame.rotation != *_currentRotation) { if (!_currentRotation || frame.rotation != *_currentRotation) {
_currentRotation = rtc::Optional<webrtc::VideoRotation>( _currentRotation = rtc::Optional<RTCVideoRotation>(frame.rotation);
static_cast<webrtc::VideoRotation>(frame.rotation));
RTCSetVertexData(*_currentRotation); RTCSetVertexData(*_currentRotation);
} }
glDrawArrays(GL_TRIANGLE_FAN, 0, 4); glDrawArrays(GL_TRIANGLE_FAN, 0, 4);

View File

@ -48,16 +48,20 @@
_nv12Shader = [[RTCNativeNV12Shader alloc] initWithContext:_context]; _nv12Shader = [[RTCNativeNV12Shader alloc] initWithContext:_context];
} }
shader = _nv12Shader; shader = _nv12Shader;
#else
// Rendering native CVPixelBuffer is not supported on OS X.
if (false) {
#endif
} else { } else {
if (!_i420Shader) { if (!_i420Shader) {
_i420Shader = [[RTCI420Shader alloc] initWithContext:_context]; _i420Shader = [[RTCI420Shader alloc] initWithContext:_context];
} }
shader = _i420Shader; shader = _i420Shader;
} }
#else
// Rendering native CVPixelBuffer is not supported on OS X.
frame = [frame newI420VideoFrame];
if (!_i420Shader) {
_i420Shader = [[RTCI420Shader alloc] initWithContext:_context];
}
shader = _i420Shader;
#endif
if (!shader || ![shader drawFrame:frame]) { if (!shader || ![shader drawFrame:frame]) {
return NO; return NO;
} }

View File

@ -11,6 +11,7 @@
#import "RTCShader.h" #import "RTCShader.h"
#import "WebRTC/RTCMacros.h" #import "WebRTC/RTCMacros.h"
#import "WebRTC/RTCVideoFrame.h"
#if TARGET_OS_IPHONE #if TARGET_OS_IPHONE
#import <OpenGLES/ES3/gl.h> #import <OpenGLES/ES3/gl.h>
@ -27,4 +28,4 @@ RTC_EXTERN GLuint RTCCreateProgram(GLuint vertexShader, GLuint fragmentShader);
RTC_EXTERN GLuint RTCCreateProgramFromFragmentSource(const char fragmentShaderSource[]); RTC_EXTERN GLuint RTCCreateProgramFromFragmentSource(const char fragmentShaderSource[]);
RTC_EXTERN BOOL RTCSetupVerticesForProgram( RTC_EXTERN BOOL RTCSetupVerticesForProgram(
GLuint program, GLuint* vertexBuffer, GLuint* vertexArray); GLuint program, GLuint* vertexBuffer, GLuint* vertexArray);
RTC_EXTERN void RTCSetVertexData(webrtc::VideoRotation rotation); RTC_EXTERN void RTCSetVertexData(RTCVideoRotation rotation);

View File

@ -138,7 +138,7 @@ BOOL RTCSetupVerticesForProgram(GLuint program, GLuint* vertexBuffer, GLuint* ve
} }
// Set vertex data to the currently bound vertex buffer. // Set vertex data to the currently bound vertex buffer.
void RTCSetVertexData(webrtc::VideoRotation rotation) { void RTCSetVertexData(RTCVideoRotation rotation) {
// When modelview and projection matrices are identity (default) the world is // When modelview and projection matrices are identity (default) the world is
// contained in the square around origin with unit size 2. Drawing to these // contained in the square around origin with unit size 2. Drawing to these
// coordinates is equivalent to drawing to the entire screen. The texture is // coordinates is equivalent to drawing to the entire screen. The texture is
@ -156,16 +156,16 @@ void RTCSetVertexData(webrtc::VideoRotation rotation) {
// Rotate the UV coordinates. // Rotate the UV coordinates.
int rotation_offset; int rotation_offset;
switch (rotation) { switch (rotation) {
case webrtc::kVideoRotation_0: case RTCVideoRotation_0:
rotation_offset = 0; rotation_offset = 0;
break; break;
case webrtc::kVideoRotation_90: case RTCVideoRotation_90:
rotation_offset = 1; rotation_offset = 1;
break; break;
case webrtc::kVideoRotation_180: case RTCVideoRotation_180:
rotation_offset = 2; rotation_offset = 2;
break; break;
case webrtc::kVideoRotation_270: case RTCVideoRotation_270:
rotation_offset = 3; rotation_offset = 3;
break; break;
} }

View File

@ -11,18 +11,14 @@
#import "WebRTC/RTCVideoFrame.h" #import "WebRTC/RTCVideoFrame.h"
#include "webrtc/api/video/video_frame_buffer.h" #include "webrtc/api/video/video_frame_buffer.h"
#include "webrtc/api/video/video_rotation.h"
NS_ASSUME_NONNULL_BEGIN NS_ASSUME_NONNULL_BEGIN
@interface RTCVideoFrame () @interface RTCVideoFrame ()
@property(nonatomic, readonly)
rtc::scoped_refptr<webrtc::VideoFrameBuffer> i420Buffer;
- (instancetype)initWithVideoBuffer: - (instancetype)initWithVideoBuffer:
(rtc::scoped_refptr<webrtc::VideoFrameBuffer>)videoBuffer (rtc::scoped_refptr<webrtc::VideoFrameBuffer>)videoBuffer
rotation:(webrtc::VideoRotation)rotation rotation:(RTCVideoRotation)rotation
timeStampNs:(int64_t)timeStampNs timeStampNs:(int64_t)timeStampNs
NS_DESIGNATED_INITIALIZER; NS_DESIGNATED_INITIALIZER;

View File

@ -10,80 +10,46 @@
#import "RTCVideoFrame+Private.h" #import "RTCVideoFrame+Private.h"
#include <memory>
#include "webrtc/api/video/video_rotation.h"
@implementation RTCVideoFrame { @implementation RTCVideoFrame {
rtc::scoped_refptr<webrtc::VideoFrameBuffer> _videoBuffer; rtc::scoped_refptr<webrtc::VideoFrameBuffer> _videoBuffer;
webrtc::VideoRotation _rotation; RTCVideoRotation _rotation;
int64_t _timeStampNs; int64_t _timeStampNs;
rtc::scoped_refptr<webrtc::VideoFrameBuffer> _i420Buffer;
} }
- (size_t)width { - (int)width {
return _videoBuffer->width(); return _videoBuffer->width();
} }
- (size_t)height { - (int)height {
return _videoBuffer->height(); return _videoBuffer->height();
} }
- (int)rotation { - (RTCVideoRotation)rotation {
return static_cast<int>(_rotation); return _rotation;
} }
// TODO(nisse): chromaWidth and chromaHeight are used only in - (const uint8_t *)dataY {
// RTCOpenGLVideoRenderer.mm. Update, and then delete these return _videoBuffer->DataY();
// properties.
- (size_t)chromaWidth {
return (self.width + 1) / 2;
} }
- (size_t)chromaHeight { - (const uint8_t *)dataU {
return (self.height + 1) / 2; return _videoBuffer->DataU();
} }
- (const uint8_t *)yPlane { - (const uint8_t *)dataV {
if (!self.i420Buffer) { return _videoBuffer->DataV();
return nullptr;
}
return self.i420Buffer->DataY();
} }
- (const uint8_t *)uPlane { - (int)strideY {
if (!self.i420Buffer) { return _videoBuffer->StrideY();
return nullptr;
}
return self.i420Buffer->DataU();
} }
- (const uint8_t *)vPlane { - (int)strideU {
if (!self.i420Buffer) { return _videoBuffer->StrideU();
return nullptr;
}
return self.i420Buffer->DataV();
} }
- (int32_t)yPitch { - (int)strideV {
if (!self.i420Buffer) { return _videoBuffer->StrideV();
return 0;
}
return self.i420Buffer->StrideY();
}
- (int32_t)uPitch {
if (!self.i420Buffer) {
return 0;
}
return self.i420Buffer->StrideU();
}
- (int32_t)vPitch {
if (!self.i420Buffer) {
return 0;
}
return self.i420Buffer->StrideV();
} }
- (int64_t)timeStampNs { - (int64_t)timeStampNs {
@ -94,19 +60,18 @@
return static_cast<CVPixelBufferRef>(_videoBuffer->native_handle()); return static_cast<CVPixelBufferRef>(_videoBuffer->native_handle());
} }
- (void)convertBufferIfNeeded { - (RTCVideoFrame *)newI420VideoFrame {
if (!_i420Buffer) { return [[RTCVideoFrame alloc]
_i420Buffer = _videoBuffer->native_handle() initWithVideoBuffer:_videoBuffer->NativeToI420Buffer()
? _videoBuffer->NativeToI420Buffer() rotation:_rotation
: _videoBuffer; timeStampNs:_timeStampNs];
}
} }
#pragma mark - Private #pragma mark - Private
- (instancetype)initWithVideoBuffer: - (instancetype)initWithVideoBuffer:
(rtc::scoped_refptr<webrtc::VideoFrameBuffer>)videoBuffer (rtc::scoped_refptr<webrtc::VideoFrameBuffer>)videoBuffer
rotation:(webrtc::VideoRotation)rotation rotation:(RTCVideoRotation)rotation
timeStampNs:(int64_t)timeStampNs { timeStampNs:(int64_t)timeStampNs {
if (self = [super init]) { if (self = [super init]) {
_videoBuffer = videoBuffer; _videoBuffer = videoBuffer;
@ -116,9 +81,4 @@
return self; return self;
} }
- (rtc::scoped_refptr<webrtc::VideoFrameBuffer>)i420Buffer {
[self convertBufferIfNeeded];
return _i420Buffer;
}
@end @end

View File

@ -27,9 +27,10 @@ class VideoRendererAdapter
void OnFrame(const webrtc::VideoFrame& nativeVideoFrame) override { void OnFrame(const webrtc::VideoFrame& nativeVideoFrame) override {
RTCVideoFrame* videoFrame = [[RTCVideoFrame alloc] RTCVideoFrame* videoFrame = [[RTCVideoFrame alloc]
initWithVideoBuffer:nativeVideoFrame.video_frame_buffer() initWithVideoBuffer:nativeVideoFrame.video_frame_buffer()
rotation:nativeVideoFrame.rotation() rotation:static_cast<RTCVideoRotation>(
nativeVideoFrame.rotation())
timeStampNs:nativeVideoFrame.timestamp_us() * timeStampNs:nativeVideoFrame.timestamp_us() *
rtc::kNumNanosecsPerMicrosec]; rtc::kNumNanosecsPerMicrosec];
CGSize current_size = (videoFrame.rotation % 180 == 0) CGSize current_size = (videoFrame.rotation % 180 == 0)
? CGSizeMake(videoFrame.width, videoFrame.height) ? CGSizeMake(videoFrame.width, videoFrame.height)
: CGSizeMake(videoFrame.height, videoFrame.width); : CGSizeMake(videoFrame.height, videoFrame.width);

View File

@ -15,25 +15,33 @@
NS_ASSUME_NONNULL_BEGIN NS_ASSUME_NONNULL_BEGIN
typedef NS_ENUM(NSInteger, RTCVideoRotation) {
RTCVideoRotation_0 = 0,
RTCVideoRotation_90 = 90,
RTCVideoRotation_180 = 180,
RTCVideoRotation_270 = 270,
};
// RTCVideoFrame is an ObjectiveC version of webrtc::VideoFrame. // RTCVideoFrame is an ObjectiveC version of webrtc::VideoFrame.
RTC_EXPORT RTC_EXPORT
@interface RTCVideoFrame : NSObject @interface RTCVideoFrame : NSObject
/** Width without rotation applied. */ /** Width without rotation applied. */
@property(nonatomic, readonly) size_t width; @property(nonatomic, readonly) int width;
/** Height without rotation applied. */ /** Height without rotation applied. */
@property(nonatomic, readonly) size_t height; @property(nonatomic, readonly) int height;
@property(nonatomic, readonly) int rotation; @property(nonatomic, readonly) RTCVideoRotation rotation;
@property(nonatomic, readonly) size_t chromaWidth; /** Accessing YUV data should only be done for I420 frames, i.e. if nativeHandle
@property(nonatomic, readonly) size_t chromaHeight; * is null. It is always possible to get such a frame by calling
// These can return NULL if the object is not backed by a buffer. * newI420VideoFrame.
@property(nonatomic, readonly, nullable) const uint8_t *yPlane; */
@property(nonatomic, readonly, nullable) const uint8_t *uPlane; @property(nonatomic, readonly, nullable) const uint8_t *dataY;
@property(nonatomic, readonly, nullable) const uint8_t *vPlane; @property(nonatomic, readonly, nullable) const uint8_t *dataU;
@property(nonatomic, readonly) int32_t yPitch; @property(nonatomic, readonly, nullable) const uint8_t *dataV;
@property(nonatomic, readonly) int32_t uPitch; @property(nonatomic, readonly) int strideY;
@property(nonatomic, readonly) int32_t vPitch; @property(nonatomic, readonly) int strideU;
@property(nonatomic, readonly) int strideV;
/** Timestamp in nanoseconds. */ /** Timestamp in nanoseconds. */
@property(nonatomic, readonly) int64_t timeStampNs; @property(nonatomic, readonly) int64_t timeStampNs;
@ -43,10 +51,10 @@ RTC_EXPORT
- (instancetype)init NS_UNAVAILABLE; - (instancetype)init NS_UNAVAILABLE;
/** If the frame is backed by a CVPixelBuffer, creates a backing i420 frame. /** Return a frame that is guaranteed to be I420, i.e. it is possible to access
* Calling the yuv plane properties will call this method if needed. * the YUV data on it.
*/ */
- (void)convertBufferIfNeeded; - (RTCVideoFrame *)newI420VideoFrame;
@end @end