Support more formats in RTCVideoFrame

Implement Obj-C version of webrtc::VideoFrameBuffer and use that in
RTCVideoFrame.

Bug: webrtc:7785
Change-Id: I49f42bcf451dd6769b3a79a65fe7b400dce22677
Reviewed-on: https://chromium-review.googlesource.com/536773
Commit-Queue: Anders Carlsson <andersc@webrtc.org>
Reviewed-by: Magnus Jedvert <magjed@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#18691}
This commit is contained in:
Anders Carlsson
2017-06-20 11:01:34 +02:00
committed by Commit Bot
parent 7f84aeaef6
commit bd2220a9c4
24 changed files with 783 additions and 169 deletions

View File

@ -105,8 +105,11 @@ if (is_ios || is_mac) {
sources = [ sources = [
"objc/Framework/Classes/Video/RTCAVFoundationVideoCapturerInternal.h", "objc/Framework/Classes/Video/RTCAVFoundationVideoCapturerInternal.h",
"objc/Framework/Classes/Video/RTCAVFoundationVideoCapturerInternal.mm", "objc/Framework/Classes/Video/RTCAVFoundationVideoCapturerInternal.mm",
"objc/Framework/Classes/Video/RTCCVPixelBuffer.mm",
"objc/Framework/Classes/Video/RTCDefaultShader.h", "objc/Framework/Classes/Video/RTCDefaultShader.h",
"objc/Framework/Classes/Video/RTCDefaultShader.mm", "objc/Framework/Classes/Video/RTCDefaultShader.mm",
"objc/Framework/Classes/Video/RTCI420Buffer+Private.h",
"objc/Framework/Classes/Video/RTCI420Buffer.mm",
"objc/Framework/Classes/Video/RTCI420TextureCache.h", "objc/Framework/Classes/Video/RTCI420TextureCache.h",
"objc/Framework/Classes/Video/RTCI420TextureCache.mm", "objc/Framework/Classes/Video/RTCI420TextureCache.mm",
"objc/Framework/Classes/Video/RTCOpenGLDefines.h", "objc/Framework/Classes/Video/RTCOpenGLDefines.h",
@ -116,8 +119,11 @@ if (is_ios || is_mac) {
"objc/Framework/Classes/Video/avfoundationformatmapper.mm", "objc/Framework/Classes/Video/avfoundationformatmapper.mm",
"objc/Framework/Classes/Video/avfoundationvideocapturer.h", "objc/Framework/Classes/Video/avfoundationvideocapturer.h",
"objc/Framework/Classes/Video/avfoundationvideocapturer.mm", "objc/Framework/Classes/Video/avfoundationvideocapturer.mm",
"objc/Framework/Classes/Video/objc_frame_buffer.h",
"objc/Framework/Classes/Video/objc_frame_buffer.mm",
"objc/Framework/Classes/Video/objcvideotracksource.h", "objc/Framework/Classes/Video/objcvideotracksource.h",
"objc/Framework/Classes/Video/objcvideotracksource.mm", "objc/Framework/Classes/Video/objcvideotracksource.mm",
"objc/Framework/Headers/WebRTC/RTCVideoFrameBuffer.h",
] ]
libs = [] libs = []
if (is_ios) { if (is_ios) {
@ -142,8 +148,6 @@ if (is_ios || is_mac) {
deps = [ deps = [
":objc_common", ":objc_common",
":objc_corevideoframebuffer",
":objc_videotoolbox",
"../api:libjingle_peerconnection_api", "../api:libjingle_peerconnection_api",
"../base:rtc_base", "../base:rtc_base",
"../common_video", "../common_video",
@ -270,7 +274,6 @@ if (is_ios || is_mac) {
"objc/Framework/Classes/PeerConnection/RTCSessionDescription.mm", "objc/Framework/Classes/PeerConnection/RTCSessionDescription.mm",
"objc/Framework/Classes/PeerConnection/RTCTracing.mm", "objc/Framework/Classes/PeerConnection/RTCTracing.mm",
"objc/Framework/Classes/PeerConnection/RTCVideoCapturer.m", "objc/Framework/Classes/PeerConnection/RTCVideoCapturer.m",
"objc/Framework/Classes/PeerConnection/RTCVideoFrame+Private.h",
"objc/Framework/Classes/PeerConnection/RTCVideoFrame.mm", "objc/Framework/Classes/PeerConnection/RTCVideoFrame.mm",
"objc/Framework/Classes/PeerConnection/RTCVideoRendererAdapter+Private.h", "objc/Framework/Classes/PeerConnection/RTCVideoRendererAdapter+Private.h",
"objc/Framework/Classes/PeerConnection/RTCVideoRendererAdapter.h", "objc/Framework/Classes/PeerConnection/RTCVideoRendererAdapter.h",
@ -307,6 +310,7 @@ if (is_ios || is_mac) {
"objc/Framework/Headers/WebRTC/RTCTracing.h", "objc/Framework/Headers/WebRTC/RTCTracing.h",
"objc/Framework/Headers/WebRTC/RTCVideoCapturer.h", "objc/Framework/Headers/WebRTC/RTCVideoCapturer.h",
"objc/Framework/Headers/WebRTC/RTCVideoFrame.h", "objc/Framework/Headers/WebRTC/RTCVideoFrame.h",
"objc/Framework/Headers/WebRTC/RTCVideoFrameBuffer.h",
"objc/Framework/Headers/WebRTC/RTCVideoRenderer.h", "objc/Framework/Headers/WebRTC/RTCVideoRenderer.h",
"objc/Framework/Headers/WebRTC/RTCVideoSource.h", "objc/Framework/Headers/WebRTC/RTCVideoSource.h",
"objc/Framework/Headers/WebRTC/RTCVideoTrack.h", "objc/Framework/Headers/WebRTC/RTCVideoTrack.h",
@ -335,10 +339,11 @@ if (is_ios || is_mac) {
deps = [ deps = [
":objc_common", ":objc_common",
":objc_corevideoframebuffer",
":objc_video", ":objc_video",
":objc_videotoolbox",
"../api:video_frame_api", "../api:video_frame_api",
"../base:rtc_base", "../base:rtc_base",
"../common_video",
"../media:rtc_media_base", "../media:rtc_media_base",
"../pc:libjingle_peerconnection", "../pc:libjingle_peerconnection",
] ]
@ -454,6 +459,7 @@ if (is_ios || is_mac) {
"objc/Framework/Headers/WebRTC/RTCTracing.h", "objc/Framework/Headers/WebRTC/RTCTracing.h",
"objc/Framework/Headers/WebRTC/RTCVideoCapturer.h", "objc/Framework/Headers/WebRTC/RTCVideoCapturer.h",
"objc/Framework/Headers/WebRTC/RTCVideoFrame.h", "objc/Framework/Headers/WebRTC/RTCVideoFrame.h",
"objc/Framework/Headers/WebRTC/RTCVideoFrameBuffer.h",
"objc/Framework/Headers/WebRTC/RTCVideoRenderer.h", "objc/Framework/Headers/WebRTC/RTCVideoRenderer.h",
"objc/Framework/Headers/WebRTC/RTCVideoSource.h", "objc/Framework/Headers/WebRTC/RTCVideoSource.h",
"objc/Framework/Headers/WebRTC/RTCVideoTrack.h", "objc/Framework/Headers/WebRTC/RTCVideoTrack.h",
@ -545,13 +551,14 @@ if (is_ios || is_mac) {
"objc/Framework/Classes/VideoToolbox/nalu_rewriter.h", "objc/Framework/Classes/VideoToolbox/nalu_rewriter.h",
"objc/Framework/Classes/VideoToolbox/videocodecfactory.h", "objc/Framework/Classes/VideoToolbox/videocodecfactory.h",
"objc/Framework/Classes/VideoToolbox/videocodecfactory.mm", "objc/Framework/Classes/VideoToolbox/videocodecfactory.mm",
"objc/Framework/Headers/WebRTC/RTCVideoFrameBuffer.h",
] ]
configs += [ "..:common_objc" ] configs += [ "..:common_objc" ]
deps = [ deps = [
":objc_common", ":objc_common",
":objc_corevideoframebuffer", ":objc_video",
"../base:rtc_base_approved", "../base:rtc_base_approved",
"../common_video", "../common_video",
"../media:rtc_media", "../media:rtc_media",

View File

@ -9,6 +9,7 @@
*/ */
#import "RTCMTLI420Renderer.h" #import "RTCMTLI420Renderer.h"
#import "WebRTC/RTCVideoFrameBuffer.h"
#import <Metal/Metal.h> #import <Metal/Metal.h>
#import <MetalKit/MetalKit.h> #import <MetalKit/MetalKit.h>
@ -96,6 +97,8 @@ static NSString *const shaderSource = MTL_STRINGIFY(
return NO; return NO;
} }
id<RTCI420Buffer> buffer = [frame.buffer toI420];
// Luma (y) texture. // Luma (y) texture.
if (!_descriptor || (_width != frame.width && _height != frame.height)) { if (!_descriptor || (_width != frame.width && _height != frame.height)) {
_width = frame.width; _width = frame.width;
@ -111,8 +114,8 @@ static NSString *const shaderSource = MTL_STRINGIFY(
// Chroma (u,v) textures // Chroma (u,v) textures
[_yTexture replaceRegion:MTLRegionMake2D(0, 0, _width, _height) [_yTexture replaceRegion:MTLRegionMake2D(0, 0, _width, _height)
mipmapLevel:0 mipmapLevel:0
withBytes:frame.dataY withBytes:buffer.dataY
bytesPerRow:frame.strideY]; bytesPerRow:buffer.strideY];
if (!_chromaDescriptor || if (!_chromaDescriptor ||
(_chromaWidth != frame.width / 2 && _chromaHeight != frame.height / 2)) { (_chromaWidth != frame.width / 2 && _chromaHeight != frame.height / 2)) {
@ -130,12 +133,12 @@ static NSString *const shaderSource = MTL_STRINGIFY(
[_uTexture replaceRegion:MTLRegionMake2D(0, 0, _chromaWidth, _chromaHeight) [_uTexture replaceRegion:MTLRegionMake2D(0, 0, _chromaWidth, _chromaHeight)
mipmapLevel:0 mipmapLevel:0
withBytes:frame.dataU withBytes:buffer.dataU
bytesPerRow:frame.strideU]; bytesPerRow:buffer.strideU];
[_vTexture replaceRegion:MTLRegionMake2D(0, 0, _chromaWidth, _chromaHeight) [_vTexture replaceRegion:MTLRegionMake2D(0, 0, _chromaWidth, _chromaHeight)
mipmapLevel:0 mipmapLevel:0
withBytes:frame.dataV withBytes:buffer.dataV
bytesPerRow:frame.strideV]; bytesPerRow:buffer.strideV];
return (_uTexture != nil) && (_yTexture != nil) && (_vTexture != nil); return (_uTexture != nil) && (_yTexture != nil) && (_vTexture != nil);
} }

View File

@ -15,6 +15,7 @@
#import "WebRTC/RTCLogging.h" #import "WebRTC/RTCLogging.h"
#import "WebRTC/RTCVideoFrame.h" #import "WebRTC/RTCVideoFrame.h"
#import "WebRTC/RTCVideoFrameBuffer.h"
#import "RTCMTLRenderer+Private.h" #import "RTCMTLRenderer+Private.h"
@ -85,7 +86,7 @@ static NSString *const shaderSource = MTL_STRINGIFY(
- (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame { - (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame {
[super setupTexturesForFrame:frame]; [super setupTexturesForFrame:frame];
CVPixelBufferRef pixelBuffer = frame.nativeHandle; CVPixelBufferRef pixelBuffer = ((RTCCVPixelBuffer *)frame.buffer).pixelBuffer;
id<MTLTexture> lumaTexture = nil; id<MTLTexture> lumaTexture = nil;
id<MTLTexture> chromaTexture = nil; id<MTLTexture> chromaTexture = nil;

View File

@ -15,6 +15,7 @@
#import "WebRTC/RTCLogging.h" #import "WebRTC/RTCLogging.h"
#import "WebRTC/RTCVideoFrame.h" #import "WebRTC/RTCVideoFrame.h"
#import "WebRTC/RTCVideoFrameBuffer.h"
#import "RTCMTLI420Renderer.h" #import "RTCMTLI420Renderer.h"
#import "RTCMTLNV12Renderer.h" #import "RTCMTLNV12Renderer.h"
@ -108,7 +109,7 @@
} }
id<RTCMTLRenderer> renderer = nil; id<RTCMTLRenderer> renderer = nil;
if (self.videoFrame.nativeHandle) { if ([self.videoFrame.buffer isKindOfClass:[RTCCVPixelBuffer class]]) {
if (!self.rendererNV12) { if (!self.rendererNV12) {
self.rendererNV12 = [RTCMTLVideoView createNV12Renderer]; self.rendererNV12 = [RTCMTLVideoView createNV12Renderer];
if (![self.rendererNV12 addRenderingDestination:self.metalView]) { if (![self.rendererNV12 addRenderingDestination:self.metalView]) {

View File

@ -12,6 +12,7 @@
#import "WebRTC/RTCCameraVideoCapturer.h" #import "WebRTC/RTCCameraVideoCapturer.h"
#import "WebRTC/RTCLogging.h" #import "WebRTC/RTCLogging.h"
#import "WebRTC/RTCVideoFrameBuffer.h"
#if TARGET_OS_IPHONE #if TARGET_OS_IPHONE
#import "WebRTC/UIDevice+RTCDevice.h" #import "WebRTC/UIDevice+RTCDevice.h"
@ -191,9 +192,10 @@ static inline BOOL IsMediaSubTypeSupported(FourCharCode mediaSubType) {
return; return;
} }
RTCCVPixelBuffer *rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer];
int64_t timeStampNs = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) * int64_t timeStampNs = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) *
kNanosecondsPerSecond; kNanosecondsPerSecond;
RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithPixelBuffer:pixelBuffer RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuffer
rotation:_rotation rotation:_rotation
timeStampNs:timeStampNs]; timeStampNs:timeStampNs];
[self.delegate capturer:self didCaptureVideoFrame:videoFrame]; [self.delegate capturer:self didCaptureVideoFrame:videoFrame];

View File

@ -11,6 +11,7 @@
#import "RTCFileVideoCapturer.h" #import "RTCFileVideoCapturer.h"
#import "WebRTC/RTCLogging.h" #import "WebRTC/RTCLogging.h"
#import "WebRTC/RTCVideoFrameBuffer.h"
@implementation RTCFileVideoCapturer { @implementation RTCFileVideoCapturer {
AVAssetReader *_reader; AVAssetReader *_reader;
@ -133,10 +134,11 @@
return; return;
} }
RTCCVPixelBuffer *rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer];
NSTimeInterval timeStampSeconds = CACurrentMediaTime(); NSTimeInterval timeStampSeconds = CACurrentMediaTime();
int64_t timeStampNs = lroundf(timeStampSeconds * NSEC_PER_SEC); int64_t timeStampNs = lroundf(timeStampSeconds * NSEC_PER_SEC);
RTCVideoFrame *videoFrame = RTCVideoFrame *videoFrame =
[[RTCVideoFrame alloc] initWithPixelBuffer:pixelBuffer rotation:0 timeStampNs:timeStampNs]; [[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuffer rotation:0 timeStampNs:timeStampNs];
CFRelease(sampleBuffer); CFRelease(sampleBuffer);
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{ dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{

View File

@ -1,29 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCVideoFrame.h"
#include "webrtc/api/video/video_frame_buffer.h"
NS_ASSUME_NONNULL_BEGIN
@interface RTCVideoFrame ()
@property(nonatomic, readonly) rtc::scoped_refptr<webrtc::VideoFrameBuffer> videoBuffer;
- (instancetype)initWithVideoBuffer:
(rtc::scoped_refptr<webrtc::VideoFrameBuffer>)videoBuffer
rotation:(RTCVideoRotation)rotation
timeStampNs:(int64_t)timeStampNs
NS_DESIGNATED_INITIALIZER;
@end
NS_ASSUME_NONNULL_END

View File

@ -8,22 +8,22 @@
* be found in the AUTHORS file in the root of the source tree. * be found in the AUTHORS file in the root of the source tree.
*/ */
#import "RTCVideoFrame+Private.h" #import "webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoFrame.h"
#import "webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoFrameBuffer.h"
#include "webrtc/sdk/objc/Framework/Classes/Video/corevideo_frame_buffer.h"
@implementation RTCVideoFrame { @implementation RTCVideoFrame {
rtc::scoped_refptr<webrtc::VideoFrameBuffer> _videoBuffer;
RTCVideoRotation _rotation; RTCVideoRotation _rotation;
int64_t _timeStampNs; int64_t _timeStampNs;
} }
@synthesize buffer = _buffer;
- (int)width { - (int)width {
return _videoBuffer->width(); return _buffer.width;
} }
- (int)height { - (int)height {
return _videoBuffer->height(); return _buffer.height;
} }
- (RTCVideoRotation)rotation { - (RTCVideoRotation)rotation {
@ -31,27 +31,51 @@
} }
- (const uint8_t *)dataY { - (const uint8_t *)dataY {
return _videoBuffer->GetI420()->DataY(); if ([_buffer conformsToProtocol:@protocol(RTCI420Buffer)]) {
return ((id<RTCI420Buffer>)_buffer).dataY;
} else {
return nullptr;
}
} }
- (const uint8_t *)dataU { - (const uint8_t *)dataU {
return _videoBuffer->GetI420()->DataU(); if ([_buffer conformsToProtocol:@protocol(RTCI420Buffer)]) {
return ((id<RTCI420Buffer>)_buffer).dataU;
} else {
return nullptr;
}
} }
- (const uint8_t *)dataV { - (const uint8_t *)dataV {
return _videoBuffer->GetI420()->DataV(); if ([_buffer conformsToProtocol:@protocol(RTCI420Buffer)]) {
return ((id<RTCI420Buffer>)_buffer).dataV;
} else {
return nullptr;
}
} }
- (int)strideY { - (int)strideY {
return _videoBuffer->GetI420()->StrideY(); if ([_buffer conformsToProtocol:@protocol(RTCI420Buffer)]) {
return ((id<RTCI420Buffer>)_buffer).strideY;
} else {
return 0;
}
} }
- (int)strideU { - (int)strideU {
return _videoBuffer->GetI420()->StrideU(); if ([_buffer conformsToProtocol:@protocol(RTCI420Buffer)]) {
return ((id<RTCI420Buffer>)_buffer).strideU;
} else {
return 0;
}
} }
- (int)strideV { - (int)strideV {
return _videoBuffer->GetI420()->StrideV(); if ([_buffer conformsToProtocol:@protocol(RTCI420Buffer)]) {
return ((id<RTCI420Buffer>)_buffer).strideV;
} else {
return 0;
}
} }
- (int64_t)timeStampNs { - (int64_t)timeStampNs {
@ -59,14 +83,15 @@
} }
- (CVPixelBufferRef)nativeHandle { - (CVPixelBufferRef)nativeHandle {
return (_videoBuffer->type() == webrtc::VideoFrameBuffer::Type::kNative) ? if ([_buffer isKindOfClass:[RTCCVPixelBuffer class]]) {
static_cast<webrtc::CoreVideoFrameBuffer *>(_videoBuffer.get())->pixel_buffer() : return ((RTCCVPixelBuffer *)_buffer).pixelBuffer;
nil; } else {
return nullptr;
}
} }
- (RTCVideoFrame *)newI420VideoFrame { - (RTCVideoFrame *)newI420VideoFrame {
return [[RTCVideoFrame alloc] return [[RTCVideoFrame alloc] initWithBuffer:[_buffer toI420]
initWithVideoBuffer:_videoBuffer->ToI420()
rotation:_rotation rotation:_rotation
timeStampNs:_timeStampNs]; timeStampNs:_timeStampNs];
} }
@ -74,9 +99,7 @@
- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer - (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer
rotation:(RTCVideoRotation)rotation rotation:(RTCVideoRotation)rotation
timeStampNs:(int64_t)timeStampNs { timeStampNs:(int64_t)timeStampNs {
rtc::scoped_refptr<webrtc::VideoFrameBuffer> videoBuffer( return [self initWithBuffer:[[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer]
new rtc::RefCountedObject<webrtc::CoreVideoFrameBuffer>(pixelBuffer));
return [self initWithVideoBuffer:videoBuffer
rotation:rotation rotation:rotation
timeStampNs:timeStampNs]; timeStampNs:timeStampNs];
} }
@ -90,33 +113,26 @@
cropY:(int)cropY cropY:(int)cropY
rotation:(RTCVideoRotation)rotation rotation:(RTCVideoRotation)rotation
timeStampNs:(int64_t)timeStampNs { timeStampNs:(int64_t)timeStampNs {
rtc::scoped_refptr<webrtc::VideoFrameBuffer> videoBuffer( RTCCVPixelBuffer *rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer
new rtc::RefCountedObject<webrtc::CoreVideoFrameBuffer>( adaptedWidth:scaledWidth
pixelBuffer, adaptedHeight:scaledHeight
scaledWidth, scaledHeight, cropWidth:cropWidth
cropWidth, cropHeight, cropHeight:cropHeight
cropX, cropY)); cropX:cropX
return [self initWithVideoBuffer:videoBuffer cropY:cropY];
rotation:rotation return [self initWithBuffer:rtcPixelBuffer rotation:rotation timeStampNs:timeStampNs];
timeStampNs:timeStampNs];
} }
#pragma mark - Private - (instancetype)initWithBuffer:(id<RTCVideoFrameBuffer>)buffer
- (instancetype)initWithVideoBuffer:
(rtc::scoped_refptr<webrtc::VideoFrameBuffer>)videoBuffer
rotation:(RTCVideoRotation)rotation rotation:(RTCVideoRotation)rotation
timeStampNs:(int64_t)timeStampNs { timeStampNs:(int64_t)timeStampNs {
if (self = [super init]) { if (self = [super init]) {
_videoBuffer = videoBuffer; _buffer = buffer;
_rotation = rotation; _rotation = rotation;
_timeStampNs = timeStampNs; _timeStampNs = timeStampNs;
} }
return self; return self;
} }
- (rtc::scoped_refptr<webrtc::VideoFrameBuffer>)videoBuffer {
return _videoBuffer;
}
@end @end

View File

@ -8,9 +8,11 @@
* be found in the AUTHORS file in the root of the source tree. * be found in the AUTHORS file in the root of the source tree.
*/ */
#import "RTCI420Buffer+Private.h"
#import "RTCVideoRendererAdapter+Private.h" #import "RTCVideoRendererAdapter+Private.h"
#import "WebRTC/RTCVideoFrame.h"
#import "RTCVideoFrame+Private.h" #import "WebRTC/RTCVideoFrameBuffer.h"
#import "objc_frame_buffer.h"
#include <memory> #include <memory>
@ -25,12 +27,20 @@ class VideoRendererAdapter
} }
void OnFrame(const webrtc::VideoFrame& nativeVideoFrame) override { void OnFrame(const webrtc::VideoFrame& nativeVideoFrame) override {
rtc::scoped_refptr<VideoFrameBuffer> video_frame_buffer = nativeVideoFrame.video_frame_buffer();
id<RTCVideoFrameBuffer> rtc_frame_buffer;
if (video_frame_buffer->type() == VideoFrameBuffer::Type::kNative) {
rtc::scoped_refptr<ObjCFrameBuffer> objc_frame_buffer(
static_cast<ObjCFrameBuffer*>(video_frame_buffer.get()));
rtc_frame_buffer = (id<RTCVideoFrameBuffer>)objc_frame_buffer->wrapped_frame_buffer();
} else {
rtc_frame_buffer = [[RTCI420Buffer alloc] initWithFrameBuffer:video_frame_buffer->ToI420()];
}
RTCVideoFrame* videoFrame = [[RTCVideoFrame alloc] RTCVideoFrame* videoFrame = [[RTCVideoFrame alloc]
initWithVideoBuffer:nativeVideoFrame.video_frame_buffer() initWithBuffer:rtc_frame_buffer
rotation:static_cast<RTCVideoRotation>( rotation:static_cast<RTCVideoRotation>(nativeVideoFrame.rotation())
nativeVideoFrame.rotation()) timeStampNs:nativeVideoFrame.timestamp_us() * rtc::kNumNanosecsPerMicrosec];
timeStampNs:nativeVideoFrame.timestamp_us() *
rtc::kNumNanosecsPerMicrosec];
CGSize current_size = (videoFrame.rotation % 180 == 0) CGSize current_size = (videoFrame.rotation % 180 == 0)
? CGSizeMake(videoFrame.width, videoFrame.height) ? CGSizeMake(videoFrame.width, videoFrame.height)
: CGSizeMake(videoFrame.height, videoFrame.width); : CGSizeMake(videoFrame.height, videoFrame.width);

View File

@ -17,6 +17,7 @@
#import "RTCNV12TextureCache.h" #import "RTCNV12TextureCache.h"
#import "WebRTC/RTCLogging.h" #import "WebRTC/RTCLogging.h"
#import "WebRTC/RTCVideoFrame.h" #import "WebRTC/RTCVideoFrame.h"
#import "WebRTC/RTCVideoFrameBuffer.h"
// RTCDisplayLinkTimer wraps a CADisplayLink and is set to fire every two screen // RTCDisplayLinkTimer wraps a CADisplayLink and is set to fire every two screen
// refreshes, which should be 30fps. We wrap the display link in order to avoid // refreshes, which should be 30fps. We wrap the display link in order to avoid
@ -220,7 +221,7 @@
} }
[self ensureGLContext]; [self ensureGLContext];
glClear(GL_COLOR_BUFFER_BIT); glClear(GL_COLOR_BUFFER_BIT);
if (frame.nativeHandle) { if ([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]) {
if (!_nv12TextureCache) { if (!_nv12TextureCache) {
_nv12TextureCache = [[RTCNV12TextureCache alloc] initWithContext:_glContext]; _nv12TextureCache = [[RTCNV12TextureCache alloc] initWithContext:_glContext];
} }

View File

@ -0,0 +1,188 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCVideoFrameBuffer.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
@implementation RTCCVPixelBuffer {
int _width;
int _height;
int _bufferWidth;
int _bufferHeight;
int _cropWidth;
int _cropHeight;
int _cropX;
int _cropY;
}
@synthesize pixelBuffer = _pixelBuffer;
- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer {
return [self initWithPixelBuffer:pixelBuffer
adaptedWidth:CVPixelBufferGetWidth(pixelBuffer)
adaptedHeight:CVPixelBufferGetHeight(pixelBuffer)
cropWidth:CVPixelBufferGetWidth(pixelBuffer)
cropHeight:CVPixelBufferGetHeight(pixelBuffer)
cropX:0
cropY:0];
}
- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer
adaptedWidth:(int)adaptedWidth
adaptedHeight:(int)adaptedHeight
cropWidth:(int)cropWidth
cropHeight:(int)cropHeight
cropX:(int)cropX
cropY:(int)cropY {
if (self = [super init]) {
_width = adaptedWidth;
_height = adaptedHeight;
_pixelBuffer = pixelBuffer;
_bufferWidth = CVPixelBufferGetWidth(_pixelBuffer);
_bufferHeight = CVPixelBufferGetHeight(_pixelBuffer);
_cropWidth = cropWidth;
_cropHeight = cropHeight;
// Can only crop at even pixels.
_cropX = cropX & ~1;
_cropY = cropY & ~1;
CVBufferRetain(_pixelBuffer);
}
return self;
}
- (void)dealloc {
CVBufferRelease(_pixelBuffer);
}
- (int)width {
return _width;
}
- (int)height {
return _height;
}
- (BOOL)requiresCropping {
return _cropWidth != _bufferWidth || _cropHeight != _bufferHeight;
}
- (BOOL)requiresScalingToWidth:(int)width height:(int)height {
return _cropWidth != width || _cropHeight != height;
}
- (int)bufferSizeForCroppingAndScalingToWidth:(int)width height:(int)height {
int srcChromaWidth = (_cropWidth + 1) / 2;
int srcChromaHeight = (_cropHeight + 1) / 2;
int dstChromaWidth = (width + 1) / 2;
int dstChromaHeight = (height + 1) / 2;
return srcChromaWidth * srcChromaHeight * 2 + dstChromaWidth * dstChromaHeight * 2;
}
- (BOOL)cropAndScaleTo:(CVPixelBufferRef)outputPixelBuffer withTempBuffer:(uint8_t*)tmpBuffer {
// Prepare output pointers.
RTC_DCHECK_EQ(CVPixelBufferGetPixelFormatType(outputPixelBuffer),
kCVPixelFormatType_420YpCbCr8BiPlanarFullRange);
CVReturn cvRet = CVPixelBufferLockBaseAddress(outputPixelBuffer, 0);
if (cvRet != kCVReturnSuccess) {
LOG(LS_ERROR) << "Failed to lock base address: " << cvRet;
return NO;
}
const int dstWidth = CVPixelBufferGetWidth(outputPixelBuffer);
const int dstHeight = CVPixelBufferGetHeight(outputPixelBuffer);
uint8_t* dstY =
reinterpret_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 0));
const int dstYStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 0);
uint8_t* dstUV =
reinterpret_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 1));
const int dstUVStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 1);
// Prepare source pointers.
const OSType srcPixelFormat = CVPixelBufferGetPixelFormatType(_pixelBuffer);
RTC_DCHECK(srcPixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange ||
srcPixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange);
CVPixelBufferLockBaseAddress(_pixelBuffer, kCVPixelBufferLock_ReadOnly);
const uint8_t* srcY =
static_cast<const uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(_pixelBuffer, 0));
const int srcYStride = CVPixelBufferGetBytesPerRowOfPlane(_pixelBuffer, 0);
const uint8_t* srcUV =
static_cast<const uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(_pixelBuffer, 1));
const int srcUVStride = CVPixelBufferGetBytesPerRowOfPlane(_pixelBuffer, 1);
// Crop just by modifying pointers.
srcY += srcYStride * _cropY + _cropX;
srcUV += srcUVStride * (_cropY / 2) + _cropX;
webrtc::NV12Scale(tmpBuffer,
srcY,
srcYStride,
srcUV,
srcUVStride,
_cropWidth,
_cropHeight,
dstY,
dstYStride,
dstUV,
dstUVStride,
dstWidth,
dstHeight);
CVPixelBufferUnlockBaseAddress(_pixelBuffer, kCVPixelBufferLock_ReadOnly);
CVPixelBufferUnlockBaseAddress(outputPixelBuffer, 0);
return YES;
}
- (id<RTCI420Buffer>)toI420 {
const OSType pixelFormat = CVPixelBufferGetPixelFormatType(_pixelBuffer);
RTC_DCHECK(pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange ||
pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange);
CVPixelBufferLockBaseAddress(_pixelBuffer, kCVPixelBufferLock_ReadOnly);
const uint8_t* srcY =
static_cast<const uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(_pixelBuffer, 0));
const int srcYStride = CVPixelBufferGetBytesPerRowOfPlane(_pixelBuffer, 0);
const uint8_t* srcUV =
static_cast<const uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(_pixelBuffer, 1));
const int srcUVStride = CVPixelBufferGetBytesPerRowOfPlane(_pixelBuffer, 1);
// Crop just by modifying pointers.
srcY += srcYStride * _cropY + _cropX;
srcUV += srcUVStride * (_cropY / 2) + _cropX;
// TODO(magjed): Use a frame buffer pool.
webrtc::NV12ToI420Scaler nv12ToI420Scaler;
RTCMutableI420Buffer* i420Buffer =
[[RTCMutableI420Buffer alloc] initWithWidth:[self width] height:[self height]];
nv12ToI420Scaler.NV12ToI420Scale(srcY,
srcYStride,
srcUV,
srcUVStride,
_cropWidth,
_cropHeight,
i420Buffer.mutableDataY,
i420Buffer.strideY,
i420Buffer.mutableDataU,
i420Buffer.strideU,
i420Buffer.mutableDataV,
i420Buffer.strideV,
i420Buffer.width,
i420Buffer.height);
CVPixelBufferUnlockBaseAddress(_pixelBuffer, kCVPixelBufferLock_ReadOnly);
return i420Buffer;
}
@end

View File

@ -0,0 +1,24 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCVideoFrameBuffer.h"
#include "webrtc/api/video/i420_buffer.h"
NS_ASSUME_NONNULL_BEGIN
@interface RTCI420Buffer ()
/** Initialize an RTCI420Buffer with its backing I420BufferInterface. */
- (instancetype)initWithFrameBuffer:(rtc::scoped_refptr<webrtc::I420BufferInterface>)i420Buffer;
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,108 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCVideoFrameBuffer.h"
#include "webrtc/api/video/i420_buffer.h"
@implementation RTCI420Buffer {
@protected
rtc::scoped_refptr<webrtc::I420BufferInterface> _i420Buffer;
}
- (instancetype)initWithWidth:(int)width height:(int)height {
if (self = [super init]) {
_i420Buffer = webrtc::I420Buffer::Create(width, height);
}
return self;
}
- (instancetype)initWithWidth:(int)width
height:(int)height
strideY:(int)strideY
strideU:(int)strideU
strideV:(int)strideV {
if (self = [super init]) {
_i420Buffer = webrtc::I420Buffer::Create(width, height, strideY, strideU, strideV);
}
return self;
}
- (instancetype)initWithFrameBuffer:(rtc::scoped_refptr<webrtc::I420BufferInterface>)i420Buffer {
if (self = [super init]) {
_i420Buffer = i420Buffer;
}
return self;
}
- (int)width {
return _i420Buffer->width();
}
- (int)height {
return _i420Buffer->height();
}
- (int)strideY {
return _i420Buffer->StrideY();
}
- (int)strideU {
return _i420Buffer->StrideU();
}
- (int)strideV {
return _i420Buffer->StrideV();
}
- (int)chromaWidth {
return _i420Buffer->ChromaWidth();
}
- (int)chromaHeight {
return _i420Buffer->ChromaHeight();
}
- (const uint8_t *)dataY {
return _i420Buffer->DataY();
}
- (const uint8_t *)dataU {
return _i420Buffer->DataU();
}
- (const uint8_t *)dataV {
return _i420Buffer->DataV();
}
- (id<RTCI420Buffer>)toI420 {
return self;
}
@end
@implementation RTCMutableI420Buffer
- (uint8_t *)mutableDataY {
return static_cast<webrtc::I420Buffer *>(_i420Buffer.get())->MutableDataY();
}
- (uint8_t *)mutableDataU {
return static_cast<webrtc::I420Buffer *>(_i420Buffer.get())->MutableDataU();
}
- (uint8_t *)mutableDataV {
return static_cast<webrtc::I420Buffer *>(_i420Buffer.get())->MutableDataV();
}
@end

View File

@ -9,6 +9,7 @@
*/ */
#import "RTCI420TextureCache.h" #import "RTCI420TextureCache.h"
#import "WebRTC/RTCVideoFrameBuffer.h"
#if TARGET_OS_IPHONE #if TARGET_OS_IPHONE
#import <OpenGLES/ES3/gl.h> #import <OpenGLES/ES3/gl.h>
@ -123,31 +124,32 @@ static const GLsizei kNumTextures = kNumTexturesPerSet * kNumTextureSets;
- (void)uploadFrameToTextures:(RTCVideoFrame *)frame { - (void)uploadFrameToTextures:(RTCVideoFrame *)frame {
_currentTextureSet = (_currentTextureSet + 1) % kNumTextureSets; _currentTextureSet = (_currentTextureSet + 1) % kNumTextureSets;
const int chromaWidth = (frame.width + 1) / 2; id<RTCI420Buffer> buffer = [frame.buffer toI420];
const int chromaHeight = (frame.height + 1) / 2;
if (frame.strideY != frame.width || const int chromaWidth = buffer.chromaWidth;
frame.strideU != chromaWidth || const int chromaHeight = buffer.chromaHeight;
frame.strideV != chromaWidth) { if (buffer.strideY != frame.width || buffer.strideU != chromaWidth ||
_planeBuffer.resize(frame.width * frame.height); buffer.strideV != chromaWidth) {
_planeBuffer.resize(buffer.width * buffer.height);
} }
[self uploadPlane:frame.dataY [self uploadPlane:buffer.dataY
texture:self.yTexture texture:self.yTexture
width:frame.width width:buffer.width
height:frame.height height:buffer.height
stride:frame.strideY]; stride:buffer.strideY];
[self uploadPlane:frame.dataU [self uploadPlane:buffer.dataU
texture:self.uTexture texture:self.uTexture
width:chromaWidth width:chromaWidth
height:chromaHeight height:chromaHeight
stride:frame.strideU]; stride:buffer.strideU];
[self uploadPlane:frame.dataV [self uploadPlane:buffer.dataV
texture:self.vTexture texture:self.vTexture
width:chromaWidth width:chromaWidth
height:chromaHeight height:chromaHeight
stride:frame.strideV]; stride:buffer.strideV];
} }
@end @end

View File

@ -11,6 +11,7 @@
#import "RTCNV12TextureCache.h" #import "RTCNV12TextureCache.h"
#import "WebRTC/RTCVideoFrame.h" #import "WebRTC/RTCVideoFrame.h"
#import "WebRTC/RTCVideoFrameBuffer.h"
@implementation RTCNV12TextureCache { @implementation RTCNV12TextureCache {
CVOpenGLESTextureCacheRef _textureCache; CVOpenGLESTextureCacheRef _textureCache;
@ -73,8 +74,10 @@
} }
- (BOOL)uploadFrameToTextures:(RTCVideoFrame *)frame { - (BOOL)uploadFrameToTextures:(RTCVideoFrame *)frame {
CVPixelBufferRef pixelBuffer = frame.nativeHandle; NSAssert([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]],
NSParameterAssert(pixelBuffer); @"frame must be CVPixelBuffer backed");
RTCCVPixelBuffer *rtcPixelBuffer = (RTCCVPixelBuffer *)frame.buffer;
CVPixelBufferRef pixelBuffer = rtcPixelBuffer.pixelBuffer;
return [self loadTexture:&_yTextureRef return [self loadTexture:&_yTextureRef
pixelBuffer:pixelBuffer pixelBuffer:pixelBuffer
planeIndex:0 planeIndex:0

View File

@ -15,6 +15,7 @@
#import "RTCAVFoundationVideoCapturerInternal.h" #import "RTCAVFoundationVideoCapturerInternal.h"
#import "RTCDispatcher+Private.h" #import "RTCDispatcher+Private.h"
#import "WebRTC/RTCLogging.h" #import "WebRTC/RTCLogging.h"
#import "WebRTC/RTCVideoFrameBuffer.h"
#include "avfoundationformatmapper.h" #include "avfoundationformatmapper.h"
@ -23,7 +24,7 @@
#include "webrtc/base/checks.h" #include "webrtc/base/checks.h"
#include "webrtc/base/logging.h" #include "webrtc/base/logging.h"
#include "webrtc/base/thread.h" #include "webrtc/base/thread.h"
#include "webrtc/sdk/objc/Framework/Classes/Video/corevideo_frame_buffer.h" #include "webrtc/sdk/objc/Framework/Classes/Video/objc_frame_buffer.h"
namespace webrtc { namespace webrtc {
@ -150,12 +151,15 @@ void AVFoundationVideoCapturer::CaptureSampleBuffer(
return; return;
} }
RTCCVPixelBuffer* rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:image_buffer
adaptedWidth:adapted_width
adaptedHeight:adapted_height
cropWidth:crop_width
cropHeight:crop_height
cropX:crop_x
cropY:crop_y];
rtc::scoped_refptr<VideoFrameBuffer> buffer = rtc::scoped_refptr<VideoFrameBuffer> buffer =
new rtc::RefCountedObject<CoreVideoFrameBuffer>( new rtc::RefCountedObject<ObjCFrameBuffer>(rtcPixelBuffer);
image_buffer,
adapted_width, adapted_height,
crop_width, crop_height,
crop_x, crop_y);
// Applying rotation is only supported for legacy reasons and performance is // Applying rotation is only supported for legacy reasons and performance is
// not critical here. // not critical here.

View File

@ -0,0 +1,44 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_SDK_OBJC_FRAMEWORK_CLASSES_VIDEO_OBJC_FRAME_BUFFER_H_
#define WEBRTC_SDK_OBJC_FRAMEWORK_CLASSES_VIDEO_OBJC_FRAME_BUFFER_H_
#import <CoreVideo/CoreVideo.h>
#include "webrtc/common_video/include/video_frame_buffer.h"
@protocol RTCVideoFrameBuffer;
namespace webrtc {
class ObjCFrameBuffer : public VideoFrameBuffer {
public:
explicit ObjCFrameBuffer(id<RTCVideoFrameBuffer>);
~ObjCFrameBuffer() override;
Type type() const override;
int width() const override;
int height() const override;
rtc::scoped_refptr<I420BufferInterface> ToI420() override;
id<RTCVideoFrameBuffer> wrapped_frame_buffer() const;
private:
id<RTCVideoFrameBuffer> frame_buffer_;
int width_;
int height_;
};
} // namespace webrtc
#endif // WEBRTC_SDK_OBJC_FRAMEWORK_CLASSES_VIDEO_OBJC_FRAME_BUFFER_H_

View File

@ -0,0 +1,78 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/sdk/objc/Framework/Classes/Video/objc_frame_buffer.h"
#import "WebRTC/RTCVideoFrameBuffer.h"
namespace webrtc {
namespace {
/** ObjCFrameBuffer that conforms to I420BufferInterface by wrapping RTCI420Buffer */
class ObjCI420FrameBuffer : public I420BufferInterface {
public:
explicit ObjCI420FrameBuffer(id<RTCI420Buffer> frame_buffer)
: frame_buffer_(frame_buffer), width_(frame_buffer.width), height_(frame_buffer.height) {}
~ObjCI420FrameBuffer() override{};
int width() const override { return width_; }
int height() const override { return height_; }
const uint8_t* DataY() const override { return frame_buffer_.dataY; }
const uint8_t* DataU() const override { return frame_buffer_.dataU; }
const uint8_t* DataV() const override { return frame_buffer_.dataV; }
int StrideY() const override { return frame_buffer_.strideY; }
int StrideU() const override { return frame_buffer_.strideU; }
int StrideV() const override { return frame_buffer_.strideV; }
private:
id<RTCI420Buffer> frame_buffer_;
int width_;
int height_;
};
} // namespace
ObjCFrameBuffer::ObjCFrameBuffer(id<RTCVideoFrameBuffer> frame_buffer)
: frame_buffer_(frame_buffer), width_(frame_buffer.width), height_(frame_buffer.height) {}
ObjCFrameBuffer::~ObjCFrameBuffer() {}
VideoFrameBuffer::Type ObjCFrameBuffer::type() const {
return Type::kNative;
}
int ObjCFrameBuffer::width() const {
return width_;
}
int ObjCFrameBuffer::height() const {
return height_;
}
rtc::scoped_refptr<I420BufferInterface> ObjCFrameBuffer::ToI420() {
rtc::scoped_refptr<I420BufferInterface> buffer =
new rtc::RefCountedObject<ObjCI420FrameBuffer>([frame_buffer_ toI420]);
return buffer;
}
id<RTCVideoFrameBuffer> ObjCFrameBuffer::wrapped_frame_buffer() const {
return frame_buffer_;
}
} // namespace webrtc

View File

@ -10,10 +10,11 @@
#include "webrtc/sdk/objc/Framework/Classes/Video/objcvideotracksource.h" #include "webrtc/sdk/objc/Framework/Classes/Video/objcvideotracksource.h"
#import "RTCVideoFrame+Private.h" #import "WebRTC/RTCVideoFrame.h"
#import "WebRTC/RTCVideoFrameBuffer.h"
#include "webrtc/api/video/i420_buffer.h" #include "webrtc/api/video/i420_buffer.h"
#include "webrtc/sdk/objc/Framework/Classes/Video/corevideo_frame_buffer.h" #include "webrtc/sdk/objc/Framework/Classes/Video/objc_frame_buffer.h"
namespace webrtc { namespace webrtc {
@ -43,18 +44,24 @@ void ObjcVideoTrackSource::OnCapturedFrame(RTCVideoFrame* frame) {
rtc::scoped_refptr<VideoFrameBuffer> buffer; rtc::scoped_refptr<VideoFrameBuffer> buffer;
if (adapted_width == frame.width && adapted_height == frame.height) { if (adapted_width == frame.width && adapted_height == frame.height) {
// No adaption - optimized path. // No adaption - optimized path.
buffer = frame.videoBuffer; buffer = new rtc::RefCountedObject<ObjCFrameBuffer>(frame.buffer);
} else if (frame.nativeHandle) { } else if ([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]) {
// Adapted CVPixelBuffer frame. // Adapted CVPixelBuffer frame.
buffer = new rtc::RefCountedObject<CoreVideoFrameBuffer>( RTCCVPixelBuffer *rtcPixelBuffer = (RTCCVPixelBuffer *)frame.buffer;
static_cast<CVPixelBufferRef>(frame.nativeHandle), adapted_width, adapted_height, buffer = new rtc::RefCountedObject<ObjCFrameBuffer>([[RTCCVPixelBuffer alloc]
crop_width, crop_height, crop_x, crop_y); initWithPixelBuffer:rtcPixelBuffer.pixelBuffer
adaptedWidth:adapted_width
adaptedHeight:adapted_height
cropWidth:crop_width
cropHeight:crop_height
cropX:crop_x
cropY:crop_y]);
} else { } else {
// Adapted I420 frame. // Adapted I420 frame.
// TODO(magjed): Optimize this I420 path. // TODO(magjed): Optimize this I420 path.
rtc::scoped_refptr<I420Buffer> i420_buffer = I420Buffer::Create(adapted_width, adapted_height); rtc::scoped_refptr<I420Buffer> i420_buffer = I420Buffer::Create(adapted_width, adapted_height);
i420_buffer->CropAndScaleFrom( buffer = new rtc::RefCountedObject<ObjCFrameBuffer>(frame.buffer);
*frame.videoBuffer->ToI420(), crop_x, crop_y, crop_width, crop_height); i420_buffer->CropAndScaleFrom(*buffer->ToI420(), crop_x, crop_y, crop_width, crop_height);
buffer = i420_buffer; buffer = i420_buffer;
} }

View File

@ -18,9 +18,11 @@
#include "webrtc/base/checks.h" #include "webrtc/base/checks.h"
#include "webrtc/base/logging.h" #include "webrtc/base/logging.h"
#include "webrtc/common_video/include/video_frame.h" #include "webrtc/common_video/include/video_frame.h"
#include "webrtc/sdk/objc/Framework/Classes/Video/corevideo_frame_buffer.h" #include "webrtc/sdk/objc/Framework/Classes/Video/objc_frame_buffer.h"
#include "webrtc/sdk/objc/Framework/Classes/VideoToolbox/nalu_rewriter.h" #include "webrtc/sdk/objc/Framework/Classes/VideoToolbox/nalu_rewriter.h"
#import "WebRTC/RTCVideoFrameBuffer.h"
#if defined(WEBRTC_IOS) #if defined(WEBRTC_IOS)
#import "Common/RTCUIApplicationStatusObserver.h" #import "Common/RTCUIApplicationStatusObserver.h"
#endif #endif
@ -64,8 +66,8 @@ void VTDecompressionOutputCallback(void* decoder,
return; return;
} }
// TODO(tkchin): Handle CVO properly. // TODO(tkchin): Handle CVO properly.
rtc::scoped_refptr<VideoFrameBuffer> buffer = rtc::scoped_refptr<VideoFrameBuffer> buffer = new rtc::RefCountedObject<ObjCFrameBuffer>(
new rtc::RefCountedObject<CoreVideoFrameBuffer>(image_buffer); [[RTCCVPixelBuffer alloc] initWithPixelBuffer:image_buffer]);
VideoFrame decoded_frame(buffer, decode_params->timestamp, VideoFrame decoded_frame(buffer, decode_params->timestamp,
CMTimeGetSeconds(timestamp) * kMsPerSec, CMTimeGetSeconds(timestamp) * kMsPerSec,
kVideoRotation_0); kVideoRotation_0);

View File

@ -19,11 +19,12 @@
#import "Common/RTCUIApplicationStatusObserver.h" #import "Common/RTCUIApplicationStatusObserver.h"
#import "WebRTC/UIDevice+RTCDevice.h" #import "WebRTC/UIDevice+RTCDevice.h"
#endif #endif
#import "WebRTC/RTCVideoFrameBuffer.h"
#include "libyuv/convert_from.h" #include "libyuv/convert_from.h"
#include "webrtc/base/checks.h" #include "webrtc/base/checks.h"
#include "webrtc/base/logging.h" #include "webrtc/base/logging.h"
#include "webrtc/common_video/h264/profile_level_id.h" #include "webrtc/common_video/h264/profile_level_id.h"
#include "webrtc/sdk/objc/Framework/Classes/Video/corevideo_frame_buffer.h" #include "webrtc/sdk/objc/Framework/Classes/Video/objc_frame_buffer.h"
#include "webrtc/sdk/objc/Framework/Classes/VideoToolbox/nalu_rewriter.h" #include "webrtc/sdk/objc/Framework/Classes/VideoToolbox/nalu_rewriter.h"
#include "webrtc/system_wrappers/include/clock.h" #include "webrtc/system_wrappers/include/clock.h"
@ -411,16 +412,22 @@ int H264VideoToolboxEncoder::Encode(
} }
#endif #endif
CVPixelBufferRef pixel_buffer; CVPixelBufferRef pixel_buffer = nullptr;
if (frame.video_frame_buffer()->type() == VideoFrameBuffer::Type::kNative) { if (frame.video_frame_buffer()->type() == VideoFrameBuffer::Type::kNative) {
rtc::scoped_refptr<CoreVideoFrameBuffer> core_video_frame_buffer( // Native frame.
static_cast<CoreVideoFrameBuffer*>(frame.video_frame_buffer().get())); rtc::scoped_refptr<ObjCFrameBuffer> objc_frame_buffer(
if (!core_video_frame_buffer->RequiresCropping()) { static_cast<ObjCFrameBuffer*>(frame.video_frame_buffer().get()));
pixel_buffer = core_video_frame_buffer->pixel_buffer(); id<RTCVideoFrameBuffer> wrapped_frame_buffer =
(id<RTCVideoFrameBuffer>)objc_frame_buffer->wrapped_frame_buffer();
if ([wrapped_frame_buffer isKindOfClass:[RTCCVPixelBuffer class]]) {
RTCCVPixelBuffer* rtc_pixel_buffer = (RTCCVPixelBuffer*)wrapped_frame_buffer;
if (![rtc_pixel_buffer requiresCropping]) {
// This pixel buffer might have a higher resolution than what the // This pixel buffer might have a higher resolution than what the
// compression session is configured to. The compression session can // compression session is configured to. The compression session can
// handle that and will output encoded frames in the configured // handle that and will output encoded frames in the configured
// resolution regardless of the input pixel buffer resolution. // resolution regardless of the input pixel buffer resolution.
pixel_buffer = rtc_pixel_buffer.pixelBuffer;
CVBufferRetain(pixel_buffer); CVBufferRetain(pixel_buffer);
} else { } else {
// Cropping required, we need to crop and scale to a new pixel buffer. // Cropping required, we need to crop and scale to a new pixel buffer.
@ -428,12 +435,26 @@ int H264VideoToolboxEncoder::Encode(
if (!pixel_buffer) { if (!pixel_buffer) {
return WEBRTC_VIDEO_CODEC_ERROR; return WEBRTC_VIDEO_CODEC_ERROR;
} }
if (!core_video_frame_buffer->CropAndScaleTo(&nv12_scale_buffer_, int dst_width = CVPixelBufferGetWidth(pixel_buffer);
pixel_buffer)) { int dst_height = CVPixelBufferGetHeight(pixel_buffer);
if ([rtc_pixel_buffer requiresScalingToWidth:dst_width height:dst_height]) {
int size =
[rtc_pixel_buffer bufferSizeForCroppingAndScalingToWidth:dst_width height:dst_height];
nv12_scale_buffer_.resize(size);
} else {
nv12_scale_buffer_.clear();
}
nv12_scale_buffer_.shrink_to_fit();
if (![rtc_pixel_buffer cropAndScaleTo:pixel_buffer
withTempBuffer:nv12_scale_buffer_.data()]) {
return WEBRTC_VIDEO_CODEC_ERROR; return WEBRTC_VIDEO_CODEC_ERROR;
} }
} }
} else { }
}
if (!pixel_buffer) {
// We did not have a native frame, or the ObjCVideoFrame wrapped a non-native frame
pixel_buffer = internal::CreatePixelBuffer(pixel_buffer_pool); pixel_buffer = internal::CreatePixelBuffer(pixel_buffer_pool);
if (!pixel_buffer) { if (!pixel_buffer) {
return WEBRTC_VIDEO_CODEC_ERROR; return WEBRTC_VIDEO_CODEC_ERROR;

View File

@ -22,6 +22,8 @@ typedef NS_ENUM(NSInteger, RTCVideoRotation) {
RTCVideoRotation_270 = 270, RTCVideoRotation_270 = 270,
}; };
@protocol RTCVideoFrameBuffer;
// RTCVideoFrame is an ObjectiveC version of webrtc::VideoFrame. // RTCVideoFrame is an ObjectiveC version of webrtc::VideoFrame.
RTC_EXPORT RTC_EXPORT
@interface RTCVideoFrame : NSObject @interface RTCVideoFrame : NSObject
@ -36,27 +38,35 @@ RTC_EXPORT
* is null. It is always possible to get such a frame by calling * is null. It is always possible to get such a frame by calling
* newI420VideoFrame. * newI420VideoFrame.
*/ */
@property(nonatomic, readonly, nullable) const uint8_t *dataY; @property(nonatomic, readonly, nullable)
@property(nonatomic, readonly, nullable) const uint8_t *dataU; const uint8_t *dataY DEPRECATED_MSG_ATTRIBUTE("use [buffer toI420]");
@property(nonatomic, readonly, nullable) const uint8_t *dataV; @property(nonatomic, readonly, nullable)
@property(nonatomic, readonly) int strideY; const uint8_t *dataU DEPRECATED_MSG_ATTRIBUTE("use [buffer toI420]");
@property(nonatomic, readonly) int strideU; @property(nonatomic, readonly, nullable)
@property(nonatomic, readonly) int strideV; const uint8_t *dataV DEPRECATED_MSG_ATTRIBUTE("use [buffer toI420]");
@property(nonatomic, readonly) int strideY DEPRECATED_MSG_ATTRIBUTE("use [buffer toI420]");
@property(nonatomic, readonly) int strideU DEPRECATED_MSG_ATTRIBUTE("use [buffer toI420]");
@property(nonatomic, readonly) int strideV DEPRECATED_MSG_ATTRIBUTE("use [buffer toI420]");
/** Timestamp in nanoseconds. */ /** Timestamp in nanoseconds. */
@property(nonatomic, readonly) int64_t timeStampNs; @property(nonatomic, readonly) int64_t timeStampNs;
/** The native handle should be a pixel buffer on iOS. */ /** The native handle should be a pixel buffer on iOS. */
@property(nonatomic, readonly) CVPixelBufferRef nativeHandle; @property(nonatomic, readonly)
CVPixelBufferRef nativeHandle DEPRECATED_MSG_ATTRIBUTE("use buffer instead");
@property(nonatomic, readonly) id<RTCVideoFrameBuffer> buffer;
- (instancetype)init NS_UNAVAILABLE; - (instancetype)init NS_UNAVAILABLE;
- (instancetype)new NS_UNAVAILABLE; - (instancetype)new NS_UNAVAILABLE;
/** Initialize an RTCVideoFrame from a pixel buffer, rotation, and timestamp. /** Initialize an RTCVideoFrame from a pixel buffer, rotation, and timestamp.
* Deprecated - initialize with a RTCCVPixelBuffer instead
*/ */
- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer - (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer
rotation:(RTCVideoRotation)rotation rotation:(RTCVideoRotation)rotation
timeStampNs:(int64_t)timeStampNs; timeStampNs:(int64_t)timeStampNs
DEPRECATED_MSG_ATTRIBUTE("use initWithBuffer instead");
/** Initialize an RTCVideoFrame from a pixel buffer combined with cropping and /** Initialize an RTCVideoFrame from a pixel buffer combined with cropping and
* scaling. Cropping will be applied first on the pixel buffer, followed by * scaling. Cropping will be applied first on the pixel buffer, followed by
@ -70,6 +80,13 @@ RTC_EXPORT
cropX:(int)cropX cropX:(int)cropX
cropY:(int)cropY cropY:(int)cropY
rotation:(RTCVideoRotation)rotation rotation:(RTCVideoRotation)rotation
timeStampNs:(int64_t)timeStampNs
DEPRECATED_MSG_ATTRIBUTE("use initWithBuffer instead");
/** Initialize an RTCVideoFrame from a frame buffer, rotation, and timestamp.
*/
- (instancetype)initWithBuffer:(id<RTCVideoFrameBuffer>)frameBuffer
rotation:(RTCVideoRotation)rotation
timeStampNs:(int64_t)timeStampNs; timeStampNs:(int64_t)timeStampNs;
/** Return a frame that is guaranteed to be I420, i.e. it is possible to access /** Return a frame that is guaranteed to be I420, i.e. it is possible to access

View File

@ -0,0 +1,99 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <AVFoundation/AVFoundation.h>
#import <WebRTC/RTCMacros.h>
NS_ASSUME_NONNULL_BEGIN
@protocol RTCI420Buffer;
// RTCVideoFrameBuffer is an ObjectiveC version of webrtc::VideoFrameBuffer.
RTC_EXPORT
@protocol RTCVideoFrameBuffer <NSObject>
@property(nonatomic, readonly) int width;
@property(nonatomic, readonly) int height;
- (id<RTCI420Buffer>)toI420;
@end
/** Protocol for RTCVideoFrameBuffers containing YUV planar data. */
@protocol RTCYUVPlanarBuffer <RTCVideoFrameBuffer>
@property(nonatomic, readonly) int chromaWidth;
@property(nonatomic, readonly) int chromaHeight;
@property(nonatomic, readonly) const uint8_t *dataY;
@property(nonatomic, readonly) const uint8_t *dataU;
@property(nonatomic, readonly) const uint8_t *dataV;
@property(nonatomic, readonly) int strideY;
@property(nonatomic, readonly) int strideU;
@property(nonatomic, readonly) int strideV;
- (instancetype)initWithWidth:(int)width height:(int)height;
- (instancetype)initWithWidth:(int)width
height:(int)height
strideY:(int)strideY
strideU:(int)strideU
strideV:(int)strideV;
@end
/** Extension of the YUV planar data buffer with mutable data access */
@protocol RTCMutableYUVPlanarBuffer <RTCYUVPlanarBuffer>
@property(nonatomic, readonly) uint8_t *mutableDataY;
@property(nonatomic, readonly) uint8_t *mutableDataU;
@property(nonatomic, readonly) uint8_t *mutableDataV;
@end
/** Protocol for RTCYUVPlanarBuffers containing I420 data */
@protocol RTCI420Buffer <RTCYUVPlanarBuffer>
@end
/** Extension of the I420 buffer with mutable data access */
@protocol RTCMutableI420Buffer <RTCI420Buffer, RTCMutableYUVPlanarBuffer>
@end
/** RTCVideoFrameBuffer containing a CVPixelBufferRef */
@interface RTCCVPixelBuffer : NSObject <RTCVideoFrameBuffer>
@property(nonatomic, readonly) CVPixelBufferRef pixelBuffer;
- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer;
- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer
adaptedWidth:(int)adaptedWidth
adaptedHeight:(int)adaptedHeight
cropWidth:(int)cropWidth
cropHeight:(int)cropHeight
cropX:(int)cropX
cropY:(int)cropY;
- (BOOL)requiresCropping;
- (BOOL)requiresScalingToWidth:(int)width height:(int)height;
- (int)bufferSizeForCroppingAndScalingToWidth:(int)width height:(int)height;
/** The minimum size of the |tmpBuffer| must be the number of bytes returned from the
* bufferSizeForCroppingAndScalingToWidth:height: method.
*/
- (BOOL)cropAndScaleTo:(CVPixelBufferRef)outputPixelBuffer withTempBuffer:(uint8_t *)tmpBuffer;
@end
/** RTCI420Buffer implements the RTCI420Buffer protocol */
@interface RTCI420Buffer : NSObject <RTCI420Buffer>
@end
/** Mutable version of RTCI420Buffer */
@interface RTCMutableI420Buffer : RTCI420Buffer <RTCMutableI420Buffer>
@end
NS_ASSUME_NONNULL_END

View File

@ -15,6 +15,7 @@
#include <Metal/RTCMTLNV12Renderer.h> #include <Metal/RTCMTLNV12Renderer.h>
#include <WebRTC/RTCMTLVideoView.h> #include <WebRTC/RTCMTLVideoView.h>
#include <WebRTC/RTCVideoFrameBuffer.h>
// Extension of RTCMTLVideoView for testing purposes. // Extension of RTCMTLVideoView for testing purposes.
@interface RTCMTLVideoView (Testing) @interface RTCMTLVideoView (Testing)
@ -59,12 +60,14 @@
self.frameMock = nil; self.frameMock = nil;
} }
- (id)frameMockWithNativeHandle:(BOOL)hasNativeHandle { - (id)frameMockWithCVPixelBuffer:(BOOL)hasCVPixelBuffer {
id frameMock = OCMClassMock([RTCVideoFrame class]); id frameMock = OCMClassMock([RTCVideoFrame class]);
if (hasNativeHandle) { if (hasCVPixelBuffer) {
OCMStub([frameMock nativeHandle]).andReturn((CVPixelBufferRef)[OCMArg anyPointer]); OCMStub([frameMock buffer])
.andReturn(
[[RTCCVPixelBuffer alloc] initWithPixelBuffer:(CVPixelBufferRef)[OCMArg anyPointer]]);
} else { } else {
OCMStub([frameMock nativeHandle]).andReturn((CVPixelBufferRef) nullptr); OCMStub([frameMock buffer]).andReturn([[RTCI420Buffer alloc] initWithWidth:200 height:200]);
} }
return frameMock; return frameMock;
} }
@ -99,7 +102,7 @@
RTCMTLVideoView *realView = [[RTCMTLVideoView alloc] init]; RTCMTLVideoView *realView = [[RTCMTLVideoView alloc] init];
self.frameMock = OCMClassMock([RTCVideoFrame class]); self.frameMock = OCMClassMock([RTCVideoFrame class]);
[[self.frameMock reject] nativeHandle]; [[self.frameMock reject] buffer];
[[self.classMock reject] createNV12Renderer]; [[self.classMock reject] createNV12Renderer];
[[self.classMock reject] createI420Renderer]; [[self.classMock reject] createI420Renderer];
@ -116,7 +119,7 @@
// given // given
OCMStub([self.classMock isMetalAvailable]).andReturn(YES); OCMStub([self.classMock isMetalAvailable]).andReturn(YES);
self.rendererI420Mock = [self rendererMockWithSuccessfulSetup:YES]; self.rendererI420Mock = [self rendererMockWithSuccessfulSetup:YES];
self.frameMock = [self frameMockWithNativeHandle:NO]; self.frameMock = [self frameMockWithCVPixelBuffer:NO];
OCMExpect([self.rendererI420Mock drawFrame:self.frameMock]); OCMExpect([self.rendererI420Mock drawFrame:self.frameMock]);
OCMExpect([self.classMock createI420Renderer]).andReturn(self.rendererI420Mock); OCMExpect([self.classMock createI420Renderer]).andReturn(self.rendererI420Mock);
@ -137,7 +140,7 @@
// given // given
OCMStub([self.classMock isMetalAvailable]).andReturn(YES); OCMStub([self.classMock isMetalAvailable]).andReturn(YES);
self.rendererNV12Mock = [self rendererMockWithSuccessfulSetup:YES]; self.rendererNV12Mock = [self rendererMockWithSuccessfulSetup:YES];
self.frameMock = [self frameMockWithNativeHandle:YES]; self.frameMock = [self frameMockWithCVPixelBuffer:YES];
OCMExpect([self.rendererNV12Mock drawFrame:self.frameMock]); OCMExpect([self.rendererNV12Mock drawFrame:self.frameMock]);
OCMExpect([self.classMock createNV12Renderer]).andReturn(self.rendererNV12Mock); OCMExpect([self.classMock createNV12Renderer]).andReturn(self.rendererNV12Mock);