Support more formats in RTCVideoFrame

Implement Obj-C version of webrtc::VideoFrameBuffer and use that in
RTCVideoFrame.

Bug: webrtc:7785
Change-Id: I49f42bcf451dd6769b3a79a65fe7b400dce22677
Reviewed-on: https://chromium-review.googlesource.com/536773
Commit-Queue: Anders Carlsson <andersc@webrtc.org>
Reviewed-by: Magnus Jedvert <magjed@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#18691}
This commit is contained in:
Anders Carlsson
2017-06-20 11:01:34 +02:00
committed by Commit Bot
parent 7f84aeaef6
commit bd2220a9c4
24 changed files with 783 additions and 169 deletions

View File

@ -12,6 +12,7 @@
#import "WebRTC/RTCCameraVideoCapturer.h"
#import "WebRTC/RTCLogging.h"
#import "WebRTC/RTCVideoFrameBuffer.h"
#if TARGET_OS_IPHONE
#import "WebRTC/UIDevice+RTCDevice.h"
@ -191,11 +192,12 @@ static inline BOOL IsMediaSubTypeSupported(FourCharCode mediaSubType) {
return;
}
RTCCVPixelBuffer *rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer];
int64_t timeStampNs = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) *
kNanosecondsPerSecond;
RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithPixelBuffer:pixelBuffer
rotation:_rotation
timeStampNs:timeStampNs];
RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuffer
rotation:_rotation
timeStampNs:timeStampNs];
[self.delegate capturer:self didCaptureVideoFrame:videoFrame];
}

View File

@ -11,6 +11,7 @@
#import "RTCFileVideoCapturer.h"
#import "WebRTC/RTCLogging.h"
#import "WebRTC/RTCVideoFrameBuffer.h"
@implementation RTCFileVideoCapturer {
AVAssetReader *_reader;
@ -133,10 +134,11 @@
return;
}
RTCCVPixelBuffer *rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer];
NSTimeInterval timeStampSeconds = CACurrentMediaTime();
int64_t timeStampNs = lroundf(timeStampSeconds * NSEC_PER_SEC);
RTCVideoFrame *videoFrame =
[[RTCVideoFrame alloc] initWithPixelBuffer:pixelBuffer rotation:0 timeStampNs:timeStampNs];
[[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuffer rotation:0 timeStampNs:timeStampNs];
CFRelease(sampleBuffer);
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{

View File

@ -1,29 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCVideoFrame.h"
#include "webrtc/api/video/video_frame_buffer.h"
NS_ASSUME_NONNULL_BEGIN
@interface RTCVideoFrame ()
@property(nonatomic, readonly) rtc::scoped_refptr<webrtc::VideoFrameBuffer> videoBuffer;
- (instancetype)initWithVideoBuffer:
(rtc::scoped_refptr<webrtc::VideoFrameBuffer>)videoBuffer
rotation:(RTCVideoRotation)rotation
timeStampNs:(int64_t)timeStampNs
NS_DESIGNATED_INITIALIZER;
@end
NS_ASSUME_NONNULL_END

View File

@ -8,22 +8,22 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCVideoFrame+Private.h"
#include "webrtc/sdk/objc/Framework/Classes/Video/corevideo_frame_buffer.h"
#import "webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoFrame.h"
#import "webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoFrameBuffer.h"
@implementation RTCVideoFrame {
rtc::scoped_refptr<webrtc::VideoFrameBuffer> _videoBuffer;
RTCVideoRotation _rotation;
int64_t _timeStampNs;
}
@synthesize buffer = _buffer;
- (int)width {
return _videoBuffer->width();
return _buffer.width;
}
- (int)height {
return _videoBuffer->height();
return _buffer.height;
}
- (RTCVideoRotation)rotation {
@ -31,27 +31,51 @@
}
- (const uint8_t *)dataY {
return _videoBuffer->GetI420()->DataY();
if ([_buffer conformsToProtocol:@protocol(RTCI420Buffer)]) {
return ((id<RTCI420Buffer>)_buffer).dataY;
} else {
return nullptr;
}
}
- (const uint8_t *)dataU {
return _videoBuffer->GetI420()->DataU();
if ([_buffer conformsToProtocol:@protocol(RTCI420Buffer)]) {
return ((id<RTCI420Buffer>)_buffer).dataU;
} else {
return nullptr;
}
}
- (const uint8_t *)dataV {
return _videoBuffer->GetI420()->DataV();
if ([_buffer conformsToProtocol:@protocol(RTCI420Buffer)]) {
return ((id<RTCI420Buffer>)_buffer).dataV;
} else {
return nullptr;
}
}
- (int)strideY {
return _videoBuffer->GetI420()->StrideY();
if ([_buffer conformsToProtocol:@protocol(RTCI420Buffer)]) {
return ((id<RTCI420Buffer>)_buffer).strideY;
} else {
return 0;
}
}
- (int)strideU {
return _videoBuffer->GetI420()->StrideU();
if ([_buffer conformsToProtocol:@protocol(RTCI420Buffer)]) {
return ((id<RTCI420Buffer>)_buffer).strideU;
} else {
return 0;
}
}
- (int)strideV {
return _videoBuffer->GetI420()->StrideV();
if ([_buffer conformsToProtocol:@protocol(RTCI420Buffer)]) {
return ((id<RTCI420Buffer>)_buffer).strideV;
} else {
return 0;
}
}
- (int64_t)timeStampNs {
@ -59,26 +83,25 @@
}
- (CVPixelBufferRef)nativeHandle {
return (_videoBuffer->type() == webrtc::VideoFrameBuffer::Type::kNative) ?
static_cast<webrtc::CoreVideoFrameBuffer *>(_videoBuffer.get())->pixel_buffer() :
nil;
if ([_buffer isKindOfClass:[RTCCVPixelBuffer class]]) {
return ((RTCCVPixelBuffer *)_buffer).pixelBuffer;
} else {
return nullptr;
}
}
- (RTCVideoFrame *)newI420VideoFrame {
return [[RTCVideoFrame alloc]
initWithVideoBuffer:_videoBuffer->ToI420()
rotation:_rotation
timeStampNs:_timeStampNs];
return [[RTCVideoFrame alloc] initWithBuffer:[_buffer toI420]
rotation:_rotation
timeStampNs:_timeStampNs];
}
- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer
rotation:(RTCVideoRotation)rotation
timeStampNs:(int64_t)timeStampNs {
rtc::scoped_refptr<webrtc::VideoFrameBuffer> videoBuffer(
new rtc::RefCountedObject<webrtc::CoreVideoFrameBuffer>(pixelBuffer));
return [self initWithVideoBuffer:videoBuffer
rotation:rotation
timeStampNs:timeStampNs];
return [self initWithBuffer:[[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer]
rotation:rotation
timeStampNs:timeStampNs];
}
- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer
@ -90,33 +113,26 @@
cropY:(int)cropY
rotation:(RTCVideoRotation)rotation
timeStampNs:(int64_t)timeStampNs {
rtc::scoped_refptr<webrtc::VideoFrameBuffer> videoBuffer(
new rtc::RefCountedObject<webrtc::CoreVideoFrameBuffer>(
pixelBuffer,
scaledWidth, scaledHeight,
cropWidth, cropHeight,
cropX, cropY));
return [self initWithVideoBuffer:videoBuffer
rotation:rotation
timeStampNs:timeStampNs];
RTCCVPixelBuffer *rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer
adaptedWidth:scaledWidth
adaptedHeight:scaledHeight
cropWidth:cropWidth
cropHeight:cropHeight
cropX:cropX
cropY:cropY];
return [self initWithBuffer:rtcPixelBuffer rotation:rotation timeStampNs:timeStampNs];
}
#pragma mark - Private
- (instancetype)initWithVideoBuffer:
(rtc::scoped_refptr<webrtc::VideoFrameBuffer>)videoBuffer
rotation:(RTCVideoRotation)rotation
timeStampNs:(int64_t)timeStampNs {
- (instancetype)initWithBuffer:(id<RTCVideoFrameBuffer>)buffer
rotation:(RTCVideoRotation)rotation
timeStampNs:(int64_t)timeStampNs {
if (self = [super init]) {
_videoBuffer = videoBuffer;
_buffer = buffer;
_rotation = rotation;
_timeStampNs = timeStampNs;
}
return self;
}
- (rtc::scoped_refptr<webrtc::VideoFrameBuffer>)videoBuffer {
return _videoBuffer;
}
@end

View File

@ -8,9 +8,11 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCI420Buffer+Private.h"
#import "RTCVideoRendererAdapter+Private.h"
#import "RTCVideoFrame+Private.h"
#import "WebRTC/RTCVideoFrame.h"
#import "WebRTC/RTCVideoFrameBuffer.h"
#import "objc_frame_buffer.h"
#include <memory>
@ -25,12 +27,20 @@ class VideoRendererAdapter
}
void OnFrame(const webrtc::VideoFrame& nativeVideoFrame) override {
rtc::scoped_refptr<VideoFrameBuffer> video_frame_buffer = nativeVideoFrame.video_frame_buffer();
id<RTCVideoFrameBuffer> rtc_frame_buffer;
if (video_frame_buffer->type() == VideoFrameBuffer::Type::kNative) {
rtc::scoped_refptr<ObjCFrameBuffer> objc_frame_buffer(
static_cast<ObjCFrameBuffer*>(video_frame_buffer.get()));
rtc_frame_buffer = (id<RTCVideoFrameBuffer>)objc_frame_buffer->wrapped_frame_buffer();
} else {
rtc_frame_buffer = [[RTCI420Buffer alloc] initWithFrameBuffer:video_frame_buffer->ToI420()];
}
RTCVideoFrame* videoFrame = [[RTCVideoFrame alloc]
initWithVideoBuffer:nativeVideoFrame.video_frame_buffer()
rotation:static_cast<RTCVideoRotation>(
nativeVideoFrame.rotation())
timeStampNs:nativeVideoFrame.timestamp_us() *
rtc::kNumNanosecsPerMicrosec];
initWithBuffer:rtc_frame_buffer
rotation:static_cast<RTCVideoRotation>(nativeVideoFrame.rotation())
timeStampNs:nativeVideoFrame.timestamp_us() * rtc::kNumNanosecsPerMicrosec];
CGSize current_size = (videoFrame.rotation % 180 == 0)
? CGSizeMake(videoFrame.width, videoFrame.height)
: CGSizeMake(videoFrame.height, videoFrame.width);