Clean up RTCVideoFrame

RTCVideoFrame is an ObjectiveC version of webrtc::VideoFrame, but it
currently contains some extra logic beyond that. We want RTCVideoFrame
to be as simple as possible, i.e. just a container with no extra state,
so we can use it as input to RTCVideoSource without complicating the
interface for consumers.

BUG=webrtc:7177
NOTRY=True

Review-Url: https://codereview.webrtc.org/2695203004
Cr-Commit-Position: refs/heads/master@{#16740}
This commit is contained in:
magjed
2017-02-21 04:19:46 -08:00
committed by Commit bot
parent a518a39963
commit 7ee512581c
10 changed files with 84 additions and 122 deletions

View File

@ -215,12 +215,6 @@
}
- (void)renderFrame:(RTCVideoFrame *)frame {
#if !TARGET_OS_IPHONE
// Generate the i420 frame on video send thread instead of main thread.
// TODO(tkchin): Remove this once RTCEAGLVideoView supports uploading
// CVPixelBuffer textures on OSX.
[frame convertBufferIfNeeded];
#endif
self.videoFrame = frame;
}

View File

@ -15,7 +15,6 @@
#import "RTCShader+Private.h"
#import "WebRTC/RTCVideoFrame.h"
#include "webrtc/api/video/video_rotation.h"
#include "webrtc/base/optional.h"
// |kNumTextures| must not exceed 8, which is the limit in OpenGLES2. Two sets
@ -62,7 +61,7 @@ static const char kI420FragmentShaderSource[] =
GLint _vSampler;
// Store current rotation and only upload new vertex data when rotation
// changes.
rtc::Optional<webrtc::VideoRotation> _currentRotation;
rtc::Optional<RTCVideoRotation> _currentRotation;
// Used to create a non-padded plane for GPU upload when we receive padded
// frames.
std::vector<uint8_t> _planeBuffer;
@ -126,8 +125,7 @@ static const char kI420FragmentShaderSource[] =
#endif
glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer);
if (!_currentRotation || frame.rotation != *_currentRotation) {
_currentRotation = rtc::Optional<webrtc::VideoRotation>(
static_cast<webrtc::VideoRotation>(frame.rotation));
_currentRotation = rtc::Optional<RTCVideoRotation>(frame.rotation);
RTCSetVertexData(*_currentRotation);
}
glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
@ -188,32 +186,34 @@ static const char kI420FragmentShaderSource[] =
GLint textureOffset = _currentTextureSet * 3;
NSAssert(textureOffset + 3 <= kNumTextures, @"invalid offset");
if (frame.yPitch != static_cast<int32_t>(frame.width) ||
frame.uPitch != static_cast<int32_t>(frame.chromaWidth) ||
frame.vPitch != static_cast<int32_t>(frame.chromaWidth)) {
const int chromaWidth = (frame.width + 1) / 2;
const int chromaHeight = (frame.height + 1) / 2;
if (frame.strideY != frame.width ||
frame.strideU != chromaWidth ||
frame.strideV != chromaWidth) {
_planeBuffer.resize(frame.width * frame.height);
}
[self uploadPlane:frame.yPlane
[self uploadPlane:frame.dataY
sampler:_ySampler
offset:textureOffset
width:frame.width
height:frame.height
stride:frame.yPitch];
stride:frame.strideY];
[self uploadPlane:frame.uPlane
[self uploadPlane:frame.dataU
sampler:_uSampler
offset:textureOffset + 1
width:frame.chromaWidth
height:frame.chromaHeight
stride:frame.uPitch];
width:chromaWidth
height:chromaHeight
stride:frame.strideU];
[self uploadPlane:frame.vPlane
[self uploadPlane:frame.dataV
sampler:_vSampler
offset:textureOffset + 2
width:frame.chromaWidth
height:frame.chromaHeight
stride:frame.vPitch];
width:chromaWidth
height:chromaHeight
stride:frame.strideV];
_currentTextureSet = (_currentTextureSet + 1) % kNumTextureSets;
return YES;

View File

@ -19,7 +19,6 @@
#import "RTCShader+Private.h"
#import "WebRTC/RTCVideoFrame.h"
#include "webrtc/api/video/video_rotation.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/optional.h"
@ -50,7 +49,7 @@ static const char kNV12FragmentShaderSource[] =
CVOpenGLESTextureCacheRef _textureCache;
// Store current rotation and only upload new vertex data when rotation
// changes.
rtc::Optional<webrtc::VideoRotation> _currentRotation;
rtc::Optional<RTCVideoRotation> _currentRotation;
}
- (instancetype)initWithContext:(GlContextType *)context {
@ -155,8 +154,7 @@ static const char kNV12FragmentShaderSource[] =
glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer);
if (!_currentRotation || frame.rotation != *_currentRotation) {
_currentRotation = rtc::Optional<webrtc::VideoRotation>(
static_cast<webrtc::VideoRotation>(frame.rotation));
_currentRotation = rtc::Optional<RTCVideoRotation>(frame.rotation);
RTCSetVertexData(*_currentRotation);
}
glDrawArrays(GL_TRIANGLE_FAN, 0, 4);

View File

@ -48,16 +48,20 @@
_nv12Shader = [[RTCNativeNV12Shader alloc] initWithContext:_context];
}
shader = _nv12Shader;
#else
// Rendering native CVPixelBuffer is not supported on OS X.
if (false) {
#endif
} else {
if (!_i420Shader) {
_i420Shader = [[RTCI420Shader alloc] initWithContext:_context];
}
shader = _i420Shader;
}
#else
// Rendering native CVPixelBuffer is not supported on OS X.
frame = [frame newI420VideoFrame];
if (!_i420Shader) {
_i420Shader = [[RTCI420Shader alloc] initWithContext:_context];
}
shader = _i420Shader;
#endif
if (!shader || ![shader drawFrame:frame]) {
return NO;
}

View File

@ -11,6 +11,7 @@
#import "RTCShader.h"
#import "WebRTC/RTCMacros.h"
#import "WebRTC/RTCVideoFrame.h"
#if TARGET_OS_IPHONE
#import <OpenGLES/ES3/gl.h>
@ -27,4 +28,4 @@ RTC_EXTERN GLuint RTCCreateProgram(GLuint vertexShader, GLuint fragmentShader);
RTC_EXTERN GLuint RTCCreateProgramFromFragmentSource(const char fragmentShaderSource[]);
RTC_EXTERN BOOL RTCSetupVerticesForProgram(
GLuint program, GLuint* vertexBuffer, GLuint* vertexArray);
RTC_EXTERN void RTCSetVertexData(webrtc::VideoRotation rotation);
RTC_EXTERN void RTCSetVertexData(RTCVideoRotation rotation);

View File

@ -138,7 +138,7 @@ BOOL RTCSetupVerticesForProgram(GLuint program, GLuint* vertexBuffer, GLuint* ve
}
// Set vertex data to the currently bound vertex buffer.
void RTCSetVertexData(webrtc::VideoRotation rotation) {
void RTCSetVertexData(RTCVideoRotation rotation) {
// When modelview and projection matrices are identity (default) the world is
// contained in the square around origin with unit size 2. Drawing to these
// coordinates is equivalent to drawing to the entire screen. The texture is
@ -156,16 +156,16 @@ void RTCSetVertexData(webrtc::VideoRotation rotation) {
// Rotate the UV coordinates.
int rotation_offset;
switch (rotation) {
case webrtc::kVideoRotation_0:
case RTCVideoRotation_0:
rotation_offset = 0;
break;
case webrtc::kVideoRotation_90:
case RTCVideoRotation_90:
rotation_offset = 1;
break;
case webrtc::kVideoRotation_180:
case RTCVideoRotation_180:
rotation_offset = 2;
break;
case webrtc::kVideoRotation_270:
case RTCVideoRotation_270:
rotation_offset = 3;
break;
}

View File

@ -11,18 +11,14 @@
#import "WebRTC/RTCVideoFrame.h"
#include "webrtc/api/video/video_frame_buffer.h"
#include "webrtc/api/video/video_rotation.h"
NS_ASSUME_NONNULL_BEGIN
@interface RTCVideoFrame ()
@property(nonatomic, readonly)
rtc::scoped_refptr<webrtc::VideoFrameBuffer> i420Buffer;
- (instancetype)initWithVideoBuffer:
(rtc::scoped_refptr<webrtc::VideoFrameBuffer>)videoBuffer
rotation:(webrtc::VideoRotation)rotation
rotation:(RTCVideoRotation)rotation
timeStampNs:(int64_t)timeStampNs
NS_DESIGNATED_INITIALIZER;

View File

@ -10,80 +10,46 @@
#import "RTCVideoFrame+Private.h"
#include <memory>
#include "webrtc/api/video/video_rotation.h"
@implementation RTCVideoFrame {
rtc::scoped_refptr<webrtc::VideoFrameBuffer> _videoBuffer;
webrtc::VideoRotation _rotation;
RTCVideoRotation _rotation;
int64_t _timeStampNs;
rtc::scoped_refptr<webrtc::VideoFrameBuffer> _i420Buffer;
}
- (size_t)width {
- (int)width {
return _videoBuffer->width();
}
- (size_t)height {
- (int)height {
return _videoBuffer->height();
}
- (int)rotation {
return static_cast<int>(_rotation);
- (RTCVideoRotation)rotation {
return _rotation;
}
// TODO(nisse): chromaWidth and chromaHeight are used only in
// RTCOpenGLVideoRenderer.mm. Update, and then delete these
// properties.
- (size_t)chromaWidth {
return (self.width + 1) / 2;
- (const uint8_t *)dataY {
return _videoBuffer->DataY();
}
- (size_t)chromaHeight {
return (self.height + 1) / 2;
- (const uint8_t *)dataU {
return _videoBuffer->DataU();
}
- (const uint8_t *)yPlane {
if (!self.i420Buffer) {
return nullptr;
}
return self.i420Buffer->DataY();
- (const uint8_t *)dataV {
return _videoBuffer->DataV();
}
- (const uint8_t *)uPlane {
if (!self.i420Buffer) {
return nullptr;
}
return self.i420Buffer->DataU();
- (int)strideY {
return _videoBuffer->StrideY();
}
- (const uint8_t *)vPlane {
if (!self.i420Buffer) {
return nullptr;
}
return self.i420Buffer->DataV();
- (int)strideU {
return _videoBuffer->StrideU();
}
- (int32_t)yPitch {
if (!self.i420Buffer) {
return 0;
}
return self.i420Buffer->StrideY();
}
- (int32_t)uPitch {
if (!self.i420Buffer) {
return 0;
}
return self.i420Buffer->StrideU();
}
- (int32_t)vPitch {
if (!self.i420Buffer) {
return 0;
}
return self.i420Buffer->StrideV();
- (int)strideV {
return _videoBuffer->StrideV();
}
- (int64_t)timeStampNs {
@ -94,19 +60,18 @@
return static_cast<CVPixelBufferRef>(_videoBuffer->native_handle());
}
- (void)convertBufferIfNeeded {
if (!_i420Buffer) {
_i420Buffer = _videoBuffer->native_handle()
? _videoBuffer->NativeToI420Buffer()
: _videoBuffer;
}
- (RTCVideoFrame *)newI420VideoFrame {
return [[RTCVideoFrame alloc]
initWithVideoBuffer:_videoBuffer->NativeToI420Buffer()
rotation:_rotation
timeStampNs:_timeStampNs];
}
#pragma mark - Private
- (instancetype)initWithVideoBuffer:
(rtc::scoped_refptr<webrtc::VideoFrameBuffer>)videoBuffer
rotation:(webrtc::VideoRotation)rotation
rotation:(RTCVideoRotation)rotation
timeStampNs:(int64_t)timeStampNs {
if (self = [super init]) {
_videoBuffer = videoBuffer;
@ -116,9 +81,4 @@
return self;
}
- (rtc::scoped_refptr<webrtc::VideoFrameBuffer>)i420Buffer {
[self convertBufferIfNeeded];
return _i420Buffer;
}
@end

View File

@ -27,7 +27,8 @@ class VideoRendererAdapter
void OnFrame(const webrtc::VideoFrame& nativeVideoFrame) override {
RTCVideoFrame* videoFrame = [[RTCVideoFrame alloc]
initWithVideoBuffer:nativeVideoFrame.video_frame_buffer()
rotation:nativeVideoFrame.rotation()
rotation:static_cast<RTCVideoRotation>(
nativeVideoFrame.rotation())
timeStampNs:nativeVideoFrame.timestamp_us() *
rtc::kNumNanosecsPerMicrosec];
CGSize current_size = (videoFrame.rotation % 180 == 0)

View File

@ -15,25 +15,33 @@
NS_ASSUME_NONNULL_BEGIN
typedef NS_ENUM(NSInteger, RTCVideoRotation) {
RTCVideoRotation_0 = 0,
RTCVideoRotation_90 = 90,
RTCVideoRotation_180 = 180,
RTCVideoRotation_270 = 270,
};
// RTCVideoFrame is an ObjectiveC version of webrtc::VideoFrame.
RTC_EXPORT
@interface RTCVideoFrame : NSObject
/** Width without rotation applied. */
@property(nonatomic, readonly) size_t width;
@property(nonatomic, readonly) int width;
/** Height without rotation applied. */
@property(nonatomic, readonly) size_t height;
@property(nonatomic, readonly) int rotation;
@property(nonatomic, readonly) size_t chromaWidth;
@property(nonatomic, readonly) size_t chromaHeight;
// These can return NULL if the object is not backed by a buffer.
@property(nonatomic, readonly, nullable) const uint8_t *yPlane;
@property(nonatomic, readonly, nullable) const uint8_t *uPlane;
@property(nonatomic, readonly, nullable) const uint8_t *vPlane;
@property(nonatomic, readonly) int32_t yPitch;
@property(nonatomic, readonly) int32_t uPitch;
@property(nonatomic, readonly) int32_t vPitch;
@property(nonatomic, readonly) int height;
@property(nonatomic, readonly) RTCVideoRotation rotation;
/** Accessing YUV data should only be done for I420 frames, i.e. if nativeHandle
* is null. It is always possible to get such a frame by calling
* newI420VideoFrame.
*/
@property(nonatomic, readonly, nullable) const uint8_t *dataY;
@property(nonatomic, readonly, nullable) const uint8_t *dataU;
@property(nonatomic, readonly, nullable) const uint8_t *dataV;
@property(nonatomic, readonly) int strideY;
@property(nonatomic, readonly) int strideU;
@property(nonatomic, readonly) int strideV;
/** Timestamp in nanoseconds. */
@property(nonatomic, readonly) int64_t timeStampNs;
@ -43,10 +51,10 @@ RTC_EXPORT
- (instancetype)init NS_UNAVAILABLE;
/** If the frame is backed by a CVPixelBuffer, creates a backing i420 frame.
* Calling the yuv plane properties will call this method if needed.
/** Return a frame that is guaranteed to be I420, i.e. it is possible to access
* the YUV data on it.
*/
- (void)convertBufferIfNeeded;
- (RTCVideoFrame *)newI420VideoFrame;
@end