Revert "Support more formats in RTCVideoFrame"

This reverts commit bd2220a9c496ef2e8567b68d4be9435a110bdc34.

Reason for revert: Broke external clients

Original change's description:
> Support more formats in RTCVideoFrame
> 
> Implement Obj-C version of webrtc::VideoFrameBuffer and use that in
> RTCVideoFrame.
> 
> Bug: webrtc:7785
> Change-Id: I49f42bcf451dd6769b3a79a65fe7b400dce22677
> Reviewed-on: https://chromium-review.googlesource.com/536773
> Commit-Queue: Anders Carlsson <andersc@webrtc.org>
> Reviewed-by: Magnus Jedvert <magjed@webrtc.org>
> Cr-Commit-Position: refs/heads/master@{#18691}

TBR=magjed@webrtc.org,andersc@webrtc.org

Change-Id: Id765dd9543ed0613a6b2de108b268c3501025fcd
No-Presubmit: true
No-Tree-Checks: true
No-Try: true
Bug: webrtc:7785
Reviewed-on: https://chromium-review.googlesource.com/542837
Reviewed-by: Anders Carlsson <andersc@webrtc.org>
Commit-Queue: Anders Carlsson <andersc@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#18697}
This commit is contained in:
Anders Carlsson
2017-06-21 08:41:26 +00:00
committed by Commit Bot
parent 0f15f926e3
commit 0789dab2cb
24 changed files with 169 additions and 783 deletions

View File

@ -12,7 +12,6 @@
#import "WebRTC/RTCCameraVideoCapturer.h"
#import "WebRTC/RTCLogging.h"
#import "WebRTC/RTCVideoFrameBuffer.h"
#if TARGET_OS_IPHONE
#import "WebRTC/UIDevice+RTCDevice.h"
@ -192,12 +191,11 @@ static inline BOOL IsMediaSubTypeSupported(FourCharCode mediaSubType) {
return;
}
RTCCVPixelBuffer *rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer];
int64_t timeStampNs = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) *
kNanosecondsPerSecond;
RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuffer
rotation:_rotation
timeStampNs:timeStampNs];
RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithPixelBuffer:pixelBuffer
rotation:_rotation
timeStampNs:timeStampNs];
[self.delegate capturer:self didCaptureVideoFrame:videoFrame];
}

View File

@ -11,7 +11,6 @@
#import "RTCFileVideoCapturer.h"
#import "WebRTC/RTCLogging.h"
#import "WebRTC/RTCVideoFrameBuffer.h"
@implementation RTCFileVideoCapturer {
AVAssetReader *_reader;
@ -134,11 +133,10 @@
return;
}
RTCCVPixelBuffer *rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer];
NSTimeInterval timeStampSeconds = CACurrentMediaTime();
int64_t timeStampNs = lroundf(timeStampSeconds * NSEC_PER_SEC);
RTCVideoFrame *videoFrame =
[[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuffer rotation:0 timeStampNs:timeStampNs];
[[RTCVideoFrame alloc] initWithPixelBuffer:pixelBuffer rotation:0 timeStampNs:timeStampNs];
CFRelease(sampleBuffer);
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{

View File

@ -0,0 +1,29 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCVideoFrame.h"
#include "webrtc/api/video/video_frame_buffer.h"
NS_ASSUME_NONNULL_BEGIN
@interface RTCVideoFrame ()
@property(nonatomic, readonly) rtc::scoped_refptr<webrtc::VideoFrameBuffer> videoBuffer;
- (instancetype)initWithVideoBuffer:
(rtc::scoped_refptr<webrtc::VideoFrameBuffer>)videoBuffer
rotation:(RTCVideoRotation)rotation
timeStampNs:(int64_t)timeStampNs
NS_DESIGNATED_INITIALIZER;
@end
NS_ASSUME_NONNULL_END

View File

@ -8,22 +8,22 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#import "webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoFrame.h"
#import "webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoFrameBuffer.h"
#import "RTCVideoFrame+Private.h"
#include "webrtc/sdk/objc/Framework/Classes/Video/corevideo_frame_buffer.h"
@implementation RTCVideoFrame {
rtc::scoped_refptr<webrtc::VideoFrameBuffer> _videoBuffer;
RTCVideoRotation _rotation;
int64_t _timeStampNs;
}
@synthesize buffer = _buffer;
- (int)width {
return _buffer.width;
return _videoBuffer->width();
}
- (int)height {
return _buffer.height;
return _videoBuffer->height();
}
- (RTCVideoRotation)rotation {
@ -31,51 +31,27 @@
}
- (const uint8_t *)dataY {
if ([_buffer conformsToProtocol:@protocol(RTCI420Buffer)]) {
return ((id<RTCI420Buffer>)_buffer).dataY;
} else {
return nullptr;
}
return _videoBuffer->GetI420()->DataY();
}
- (const uint8_t *)dataU {
if ([_buffer conformsToProtocol:@protocol(RTCI420Buffer)]) {
return ((id<RTCI420Buffer>)_buffer).dataU;
} else {
return nullptr;
}
return _videoBuffer->GetI420()->DataU();
}
- (const uint8_t *)dataV {
if ([_buffer conformsToProtocol:@protocol(RTCI420Buffer)]) {
return ((id<RTCI420Buffer>)_buffer).dataV;
} else {
return nullptr;
}
return _videoBuffer->GetI420()->DataV();
}
- (int)strideY {
if ([_buffer conformsToProtocol:@protocol(RTCI420Buffer)]) {
return ((id<RTCI420Buffer>)_buffer).strideY;
} else {
return 0;
}
return _videoBuffer->GetI420()->StrideY();
}
- (int)strideU {
if ([_buffer conformsToProtocol:@protocol(RTCI420Buffer)]) {
return ((id<RTCI420Buffer>)_buffer).strideU;
} else {
return 0;
}
return _videoBuffer->GetI420()->StrideU();
}
- (int)strideV {
if ([_buffer conformsToProtocol:@protocol(RTCI420Buffer)]) {
return ((id<RTCI420Buffer>)_buffer).strideV;
} else {
return 0;
}
return _videoBuffer->GetI420()->StrideV();
}
- (int64_t)timeStampNs {
@ -83,25 +59,26 @@
}
- (CVPixelBufferRef)nativeHandle {
if ([_buffer isKindOfClass:[RTCCVPixelBuffer class]]) {
return ((RTCCVPixelBuffer *)_buffer).pixelBuffer;
} else {
return nullptr;
}
return (_videoBuffer->type() == webrtc::VideoFrameBuffer::Type::kNative) ?
static_cast<webrtc::CoreVideoFrameBuffer *>(_videoBuffer.get())->pixel_buffer() :
nil;
}
- (RTCVideoFrame *)newI420VideoFrame {
return [[RTCVideoFrame alloc] initWithBuffer:[_buffer toI420]
rotation:_rotation
timeStampNs:_timeStampNs];
return [[RTCVideoFrame alloc]
initWithVideoBuffer:_videoBuffer->ToI420()
rotation:_rotation
timeStampNs:_timeStampNs];
}
- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer
rotation:(RTCVideoRotation)rotation
timeStampNs:(int64_t)timeStampNs {
return [self initWithBuffer:[[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer]
rotation:rotation
timeStampNs:timeStampNs];
rtc::scoped_refptr<webrtc::VideoFrameBuffer> videoBuffer(
new rtc::RefCountedObject<webrtc::CoreVideoFrameBuffer>(pixelBuffer));
return [self initWithVideoBuffer:videoBuffer
rotation:rotation
timeStampNs:timeStampNs];
}
- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer
@ -113,26 +90,33 @@
cropY:(int)cropY
rotation:(RTCVideoRotation)rotation
timeStampNs:(int64_t)timeStampNs {
RTCCVPixelBuffer *rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer
adaptedWidth:scaledWidth
adaptedHeight:scaledHeight
cropWidth:cropWidth
cropHeight:cropHeight
cropX:cropX
cropY:cropY];
return [self initWithBuffer:rtcPixelBuffer rotation:rotation timeStampNs:timeStampNs];
rtc::scoped_refptr<webrtc::VideoFrameBuffer> videoBuffer(
new rtc::RefCountedObject<webrtc::CoreVideoFrameBuffer>(
pixelBuffer,
scaledWidth, scaledHeight,
cropWidth, cropHeight,
cropX, cropY));
return [self initWithVideoBuffer:videoBuffer
rotation:rotation
timeStampNs:timeStampNs];
}
- (instancetype)initWithBuffer:(id<RTCVideoFrameBuffer>)buffer
rotation:(RTCVideoRotation)rotation
timeStampNs:(int64_t)timeStampNs {
#pragma mark - Private
- (instancetype)initWithVideoBuffer:
(rtc::scoped_refptr<webrtc::VideoFrameBuffer>)videoBuffer
rotation:(RTCVideoRotation)rotation
timeStampNs:(int64_t)timeStampNs {
if (self = [super init]) {
_buffer = buffer;
_videoBuffer = videoBuffer;
_rotation = rotation;
_timeStampNs = timeStampNs;
}
return self;
}
- (rtc::scoped_refptr<webrtc::VideoFrameBuffer>)videoBuffer {
return _videoBuffer;
}
@end

View File

@ -8,11 +8,9 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCI420Buffer+Private.h"
#import "RTCVideoRendererAdapter+Private.h"
#import "WebRTC/RTCVideoFrame.h"
#import "WebRTC/RTCVideoFrameBuffer.h"
#import "objc_frame_buffer.h"
#import "RTCVideoFrame+Private.h"
#include <memory>
@ -27,20 +25,12 @@ class VideoRendererAdapter
}
void OnFrame(const webrtc::VideoFrame& nativeVideoFrame) override {
rtc::scoped_refptr<VideoFrameBuffer> video_frame_buffer = nativeVideoFrame.video_frame_buffer();
id<RTCVideoFrameBuffer> rtc_frame_buffer;
if (video_frame_buffer->type() == VideoFrameBuffer::Type::kNative) {
rtc::scoped_refptr<ObjCFrameBuffer> objc_frame_buffer(
static_cast<ObjCFrameBuffer*>(video_frame_buffer.get()));
rtc_frame_buffer = (id<RTCVideoFrameBuffer>)objc_frame_buffer->wrapped_frame_buffer();
} else {
rtc_frame_buffer = [[RTCI420Buffer alloc] initWithFrameBuffer:video_frame_buffer->ToI420()];
}
RTCVideoFrame* videoFrame = [[RTCVideoFrame alloc]
initWithBuffer:rtc_frame_buffer
rotation:static_cast<RTCVideoRotation>(nativeVideoFrame.rotation())
timeStampNs:nativeVideoFrame.timestamp_us() * rtc::kNumNanosecsPerMicrosec];
initWithVideoBuffer:nativeVideoFrame.video_frame_buffer()
rotation:static_cast<RTCVideoRotation>(
nativeVideoFrame.rotation())
timeStampNs:nativeVideoFrame.timestamp_us() *
rtc::kNumNanosecsPerMicrosec];
CGSize current_size = (videoFrame.rotation % 180 == 0)
? CGSizeMake(videoFrame.width, videoFrame.height)
: CGSizeMake(videoFrame.height, videoFrame.width);