Clean up RTCVideoFrame

RTCVideoFrame is an ObjectiveC version of webrtc::VideoFrame, but it
currently contains some extra logic beyond that. We want RTCVideoFrame
to be as simple as possible, i.e. just a container with no extra state,
so we can use it as input to RTCVideoSource without complicating the
interface for consumers.

BUG=webrtc:7177
NOTRY=True

Review-Url: https://codereview.webrtc.org/2695203004
Cr-Commit-Position: refs/heads/master@{#16740}
This commit is contained in:
magjed
2017-02-21 04:19:46 -08:00
committed by Commit bot
parent a518a39963
commit 7ee512581c
10 changed files with 84 additions and 122 deletions

View File

@ -10,80 +10,46 @@
#import "RTCVideoFrame+Private.h"
#include <memory>
#include "webrtc/api/video/video_rotation.h"
@implementation RTCVideoFrame {
rtc::scoped_refptr<webrtc::VideoFrameBuffer> _videoBuffer;
webrtc::VideoRotation _rotation;
RTCVideoRotation _rotation;
int64_t _timeStampNs;
rtc::scoped_refptr<webrtc::VideoFrameBuffer> _i420Buffer;
}
- (size_t)width {
- (int)width {
return _videoBuffer->width();
}
- (size_t)height {
- (int)height {
return _videoBuffer->height();
}
- (int)rotation {
return static_cast<int>(_rotation);
- (RTCVideoRotation)rotation {
return _rotation;
}
// TODO(nisse): chromaWidth and chromaHeight are used only in
// RTCOpenGLVideoRenderer.mm. Update, and then delete these
// properties.
- (size_t)chromaWidth {
return (self.width + 1) / 2;
- (const uint8_t *)dataY {
return _videoBuffer->DataY();
}
- (size_t)chromaHeight {
return (self.height + 1) / 2;
- (const uint8_t *)dataU {
return _videoBuffer->DataU();
}
- (const uint8_t *)yPlane {
if (!self.i420Buffer) {
return nullptr;
}
return self.i420Buffer->DataY();
- (const uint8_t *)dataV {
return _videoBuffer->DataV();
}
- (const uint8_t *)uPlane {
if (!self.i420Buffer) {
return nullptr;
}
return self.i420Buffer->DataU();
- (int)strideY {
return _videoBuffer->StrideY();
}
- (const uint8_t *)vPlane {
if (!self.i420Buffer) {
return nullptr;
}
return self.i420Buffer->DataV();
- (int)strideU {
return _videoBuffer->StrideU();
}
- (int32_t)yPitch {
if (!self.i420Buffer) {
return 0;
}
return self.i420Buffer->StrideY();
}
- (int32_t)uPitch {
if (!self.i420Buffer) {
return 0;
}
return self.i420Buffer->StrideU();
}
- (int32_t)vPitch {
if (!self.i420Buffer) {
return 0;
}
return self.i420Buffer->StrideV();
- (int)strideV {
return _videoBuffer->StrideV();
}
- (int64_t)timeStampNs {
@ -94,19 +60,18 @@
return static_cast<CVPixelBufferRef>(_videoBuffer->native_handle());
}
- (void)convertBufferIfNeeded {
if (!_i420Buffer) {
_i420Buffer = _videoBuffer->native_handle()
? _videoBuffer->NativeToI420Buffer()
: _videoBuffer;
}
- (RTCVideoFrame *)newI420VideoFrame {
return [[RTCVideoFrame alloc]
initWithVideoBuffer:_videoBuffer->NativeToI420Buffer()
rotation:_rotation
timeStampNs:_timeStampNs];
}
#pragma mark - Private
- (instancetype)initWithVideoBuffer:
(rtc::scoped_refptr<webrtc::VideoFrameBuffer>)videoBuffer
rotation:(webrtc::VideoRotation)rotation
rotation:(RTCVideoRotation)rotation
timeStampNs:(int64_t)timeStampNs {
if (self = [super init]) {
_videoBuffer = videoBuffer;
@ -116,9 +81,4 @@
return self;
}
- (rtc::scoped_refptr<webrtc::VideoFrameBuffer>)i420Buffer {
[self convertBufferIfNeeded];
return _i420Buffer;
}
@end