Reland "Add unit tests for RTCCVPixelBuffer and ObjCVideoTrackSource."
This is a reland of 4ea50c2b421ae3e40d1d02b8eb8c5802288b181e Original change's description: > Add unit tests for RTCCVPixelBuffer and ObjCVideoTrackSource. > > This CL also fixes a couple of bugs found in the toI420 method for > RTCCVPixelBuffers backed by RGB CVPixelBuffers. > > Bug: webrtc:9007 > Change-Id: I19ab8177f4b124a503cfda9f0166bd960f668982 > Reviewed-on: https://webrtc-review.googlesource.com/64940 > Commit-Queue: Anders Carlsson <andersc@webrtc.org> > Reviewed-by: Kári Helgason <kthelgason@webrtc.org> > Cr-Commit-Position: refs/heads/master@{#22656} Bug: webrtc:9007 Change-Id: I2a787c64f8d23ffc4ef2419fc258d965f8a9480b Reviewed-on: https://webrtc-review.googlesource.com/66341 Reviewed-by: Kári Helgason <kthelgason@webrtc.org> Commit-Queue: Anders Carlsson <andersc@webrtc.org> Cr-Commit-Position: refs/heads/master@{#22706}
This commit is contained in:

committed by
Commit Bot

parent
74395345e8
commit
fe9d8178df
17
sdk/BUILD.gn
17
sdk/BUILD.gn
@ -669,22 +669,33 @@ if (is_ios || is_mac) {
|
|||||||
]
|
]
|
||||||
|
|
||||||
sources = [
|
sources = [
|
||||||
|
"objc/Framework/UnitTests/ObjCVideoTrackSource_xctest.mm",
|
||||||
|
"objc/Framework/UnitTests/RTCCVPixelBuffer_xctest.mm",
|
||||||
"objc/Framework/UnitTests/RTCCallbackLogger_xctest.m",
|
"objc/Framework/UnitTests/RTCCallbackLogger_xctest.m",
|
||||||
"objc/Framework/UnitTests/RTCDoNotPutCPlusPlusInFrameworkHeaders_xctest.m",
|
"objc/Framework/UnitTests/RTCDoNotPutCPlusPlusInFrameworkHeaders_xctest.m",
|
||||||
"objc/Framework/UnitTests/RTCFileVideoCapturer_xctest.mm",
|
"objc/Framework/UnitTests/RTCFileVideoCapturer_xctest.mm",
|
||||||
|
"objc/Framework/UnitTests/frame_buffer_helpers.h",
|
||||||
|
"objc/Framework/UnitTests/frame_buffer_helpers.mm",
|
||||||
]
|
]
|
||||||
|
|
||||||
deps = [
|
deps = [
|
||||||
":common_objc",
|
":common_objc",
|
||||||
":framework_objc",
|
":framework_objc",
|
||||||
|
":native_api",
|
||||||
|
":native_video",
|
||||||
":videocapture_objc",
|
":videocapture_objc",
|
||||||
|
":videoframebuffer_objc",
|
||||||
":videosource_objc",
|
":videosource_objc",
|
||||||
":videotoolbox_objc",
|
":videotoolbox_objc",
|
||||||
"../../system_wrappers:system_wrappers_default",
|
"../../system_wrappers:system_wrappers_default",
|
||||||
|
"../api:video_frame_api_i420",
|
||||||
|
"../common_video:common_video",
|
||||||
"../media:rtc_media_base",
|
"../media:rtc_media_base",
|
||||||
|
"../media:rtc_media_tests_utils",
|
||||||
"../modules:module_api",
|
"../modules:module_api",
|
||||||
"../rtc_base:rtc_base",
|
"../rtc_base:rtc_base",
|
||||||
"../rtc_base:rtc_base_tests_utils",
|
"../rtc_base:rtc_base_tests_utils",
|
||||||
|
"//third_party/libyuv",
|
||||||
]
|
]
|
||||||
|
|
||||||
if (rtc_use_metal_rendering) {
|
if (rtc_use_metal_rendering) {
|
||||||
@ -698,6 +709,12 @@ if (is_ios || is_mac) {
|
|||||||
]
|
]
|
||||||
|
|
||||||
include_dirs += [ "$root_out_dir/WebRTC.framework/Headers/" ]
|
include_dirs += [ "$root_out_dir/WebRTC.framework/Headers/" ]
|
||||||
|
|
||||||
|
if (!build_with_chromium && is_clang) {
|
||||||
|
# Suppress warnings from the Chromium Clang plugin
|
||||||
|
# (bugs.webrtc.org/163).
|
||||||
|
suppressed_configs += [ "//build/config/clang:find_bad_constructs" ]
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
bundle_data("sdk_unittests_bundle_data") {
|
bundle_data("sdk_unittests_bundle_data") {
|
||||||
|
@ -28,6 +28,8 @@
|
|||||||
@synthesize pixelBuffer = _pixelBuffer;
|
@synthesize pixelBuffer = _pixelBuffer;
|
||||||
@synthesize cropX = _cropX;
|
@synthesize cropX = _cropX;
|
||||||
@synthesize cropY = _cropY;
|
@synthesize cropY = _cropY;
|
||||||
|
@synthesize cropWidth = _cropWidth;
|
||||||
|
@synthesize cropHeight = _cropHeight;
|
||||||
|
|
||||||
+ (NSSet<NSNumber*>*)supportedPixelFormats {
|
+ (NSSet<NSNumber*>*)supportedPixelFormats {
|
||||||
return [NSSet setWithObjects:@(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange),
|
return [NSSet setWithObjects:@(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange),
|
||||||
@ -112,16 +114,29 @@
|
|||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
- (BOOL)cropAndScaleTo:(CVPixelBufferRef)outputPixelBuffer withTempBuffer:(uint8_t*)tmpBuffer {
|
- (BOOL)cropAndScaleTo:(CVPixelBufferRef)outputPixelBuffer
|
||||||
|
withTempBuffer:(nullable uint8_t*)tmpBuffer {
|
||||||
const OSType srcPixelFormat = CVPixelBufferGetPixelFormatType(_pixelBuffer);
|
const OSType srcPixelFormat = CVPixelBufferGetPixelFormatType(_pixelBuffer);
|
||||||
|
const OSType dstPixelFormat = CVPixelBufferGetPixelFormatType(outputPixelBuffer);
|
||||||
|
|
||||||
switch (srcPixelFormat) {
|
switch (srcPixelFormat) {
|
||||||
case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange:
|
case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange:
|
||||||
case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: {
|
case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: {
|
||||||
|
size_t dstWidth = CVPixelBufferGetWidth(outputPixelBuffer);
|
||||||
|
size_t dstHeight = CVPixelBufferGetHeight(outputPixelBuffer);
|
||||||
|
if (dstWidth > 0 && dstHeight > 0) {
|
||||||
|
RTC_DCHECK(dstPixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange ||
|
||||||
|
dstPixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange);
|
||||||
|
if ([self requiresScalingToWidth:dstWidth height:dstHeight]) {
|
||||||
|
RTC_DCHECK(tmpBuffer);
|
||||||
|
}
|
||||||
[self cropAndScaleNV12To:outputPixelBuffer withTempBuffer:tmpBuffer];
|
[self cropAndScaleNV12To:outputPixelBuffer withTempBuffer:tmpBuffer];
|
||||||
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case kCVPixelFormatType_32BGRA:
|
case kCVPixelFormatType_32BGRA:
|
||||||
case kCVPixelFormatType_32ARGB: {
|
case kCVPixelFormatType_32ARGB: {
|
||||||
|
RTC_DCHECK(srcPixelFormat == dstPixelFormat);
|
||||||
[self cropAndScaleARGBTo:outputPixelBuffer];
|
[self cropAndScaleARGBTo:outputPixelBuffer];
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@ -143,10 +158,10 @@
|
|||||||
case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange:
|
case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange:
|
||||||
case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: {
|
case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: {
|
||||||
const uint8_t* srcY =
|
const uint8_t* srcY =
|
||||||
static_cast<const uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(_pixelBuffer, 0));
|
static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(_pixelBuffer, 0));
|
||||||
const int srcYStride = CVPixelBufferGetBytesPerRowOfPlane(_pixelBuffer, 0);
|
const int srcYStride = CVPixelBufferGetBytesPerRowOfPlane(_pixelBuffer, 0);
|
||||||
const uint8_t* srcUV =
|
const uint8_t* srcUV =
|
||||||
static_cast<const uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(_pixelBuffer, 1));
|
static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(_pixelBuffer, 1));
|
||||||
const int srcUVStride = CVPixelBufferGetBytesPerRowOfPlane(_pixelBuffer, 1);
|
const int srcUVStride = CVPixelBufferGetBytesPerRowOfPlane(_pixelBuffer, 1);
|
||||||
|
|
||||||
// Crop just by modifying pointers.
|
// Crop just by modifying pointers.
|
||||||
@ -173,32 +188,52 @@
|
|||||||
}
|
}
|
||||||
case kCVPixelFormatType_32BGRA:
|
case kCVPixelFormatType_32BGRA:
|
||||||
case kCVPixelFormatType_32ARGB: {
|
case kCVPixelFormatType_32ARGB: {
|
||||||
const uint8_t* src =
|
CVPixelBufferRef scaledPixelBuffer = NULL;
|
||||||
static_cast<const uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(_pixelBuffer, 0));
|
CVPixelBufferRef sourcePixelBuffer = NULL;
|
||||||
|
if ([self requiresCropping] ||
|
||||||
|
[self requiresScalingToWidth:i420Buffer.width height:i420Buffer.height]) {
|
||||||
|
CVPixelBufferCreate(
|
||||||
|
NULL, i420Buffer.width, i420Buffer.height, pixelFormat, NULL, &scaledPixelBuffer);
|
||||||
|
[self cropAndScaleTo:scaledPixelBuffer withTempBuffer:NULL];
|
||||||
|
|
||||||
uint32 libyuvPixelFormat = 0;
|
CVPixelBufferLockBaseAddress(scaledPixelBuffer, kCVPixelBufferLock_ReadOnly);
|
||||||
if (pixelFormat == kCVPixelFormatType_32BGRA) {
|
sourcePixelBuffer = scaledPixelBuffer;
|
||||||
libyuvPixelFormat = libyuv::FOURCC_ARGB;
|
} else {
|
||||||
} else if (pixelFormat == kCVPixelFormatType_32ARGB) {
|
sourcePixelBuffer = _pixelBuffer;
|
||||||
libyuvPixelFormat = libyuv::FOURCC_ABGR;
|
|
||||||
}
|
}
|
||||||
|
const uint8_t* src = static_cast<uint8_t*>(CVPixelBufferGetBaseAddress(sourcePixelBuffer));
|
||||||
|
const size_t bytesPerRow = CVPixelBufferGetBytesPerRow(sourcePixelBuffer);
|
||||||
|
|
||||||
libyuv::ConvertToI420(src,
|
if (pixelFormat == kCVPixelFormatType_32BGRA) {
|
||||||
0,
|
// Corresponds to libyuv::FOURCC_ARGB
|
||||||
|
libyuv::ARGBToI420(src,
|
||||||
|
bytesPerRow,
|
||||||
i420Buffer.mutableDataY,
|
i420Buffer.mutableDataY,
|
||||||
i420Buffer.strideY,
|
i420Buffer.strideY,
|
||||||
i420Buffer.mutableDataU,
|
i420Buffer.mutableDataU,
|
||||||
i420Buffer.strideU,
|
i420Buffer.strideU,
|
||||||
i420Buffer.mutableDataV,
|
i420Buffer.mutableDataV,
|
||||||
i420Buffer.strideV,
|
i420Buffer.strideV,
|
||||||
_cropX,
|
|
||||||
_cropY,
|
|
||||||
_cropWidth,
|
|
||||||
_cropHeight,
|
|
||||||
i420Buffer.width,
|
i420Buffer.width,
|
||||||
i420Buffer.height,
|
i420Buffer.height);
|
||||||
libyuv::kRotate0,
|
} else if (pixelFormat == kCVPixelFormatType_32ARGB) {
|
||||||
libyuvPixelFormat);
|
// Corresponds to libyuv::FOURCC_BGRA
|
||||||
|
libyuv::BGRAToI420(src,
|
||||||
|
bytesPerRow,
|
||||||
|
i420Buffer.mutableDataY,
|
||||||
|
i420Buffer.strideY,
|
||||||
|
i420Buffer.mutableDataU,
|
||||||
|
i420Buffer.strideU,
|
||||||
|
i420Buffer.mutableDataV,
|
||||||
|
i420Buffer.strideV,
|
||||||
|
i420Buffer.width,
|
||||||
|
i420Buffer.height);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (scaledPixelBuffer) {
|
||||||
|
CVPixelBufferUnlockBaseAddress(scaledPixelBuffer, kCVPixelBufferLock_ReadOnly);
|
||||||
|
CVBufferRelease(scaledPixelBuffer);
|
||||||
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
default: { RTC_NOTREACHED() << "Unsupported pixel format."; }
|
default: { RTC_NOTREACHED() << "Unsupported pixel format."; }
|
||||||
@ -226,11 +261,9 @@
|
|||||||
|
|
||||||
// Prepare source pointers.
|
// Prepare source pointers.
|
||||||
CVPixelBufferLockBaseAddress(_pixelBuffer, kCVPixelBufferLock_ReadOnly);
|
CVPixelBufferLockBaseAddress(_pixelBuffer, kCVPixelBufferLock_ReadOnly);
|
||||||
const uint8_t* srcY =
|
const uint8_t* srcY = static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(_pixelBuffer, 0));
|
||||||
static_cast<const uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(_pixelBuffer, 0));
|
|
||||||
const int srcYStride = CVPixelBufferGetBytesPerRowOfPlane(_pixelBuffer, 0);
|
const int srcYStride = CVPixelBufferGetBytesPerRowOfPlane(_pixelBuffer, 0);
|
||||||
const uint8_t* srcUV =
|
const uint8_t* srcUV = static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(_pixelBuffer, 1));
|
||||||
static_cast<const uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(_pixelBuffer, 1));
|
|
||||||
const int srcUVStride = CVPixelBufferGetBytesPerRowOfPlane(_pixelBuffer, 1);
|
const int srcUVStride = CVPixelBufferGetBytesPerRowOfPlane(_pixelBuffer, 1);
|
||||||
|
|
||||||
// Crop just by modifying pointers.
|
// Crop just by modifying pointers.
|
||||||
@ -264,13 +297,12 @@
|
|||||||
const int dstWidth = CVPixelBufferGetWidth(outputPixelBuffer);
|
const int dstWidth = CVPixelBufferGetWidth(outputPixelBuffer);
|
||||||
const int dstHeight = CVPixelBufferGetHeight(outputPixelBuffer);
|
const int dstHeight = CVPixelBufferGetHeight(outputPixelBuffer);
|
||||||
|
|
||||||
uint8_t* dst =
|
uint8_t* dst = reinterpret_cast<uint8_t*>(CVPixelBufferGetBaseAddress(outputPixelBuffer));
|
||||||
reinterpret_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 0));
|
const int dstStride = CVPixelBufferGetBytesPerRow(outputPixelBuffer);
|
||||||
const int dstStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 0);
|
|
||||||
|
|
||||||
// Prepare source pointers.
|
// Prepare source pointers.
|
||||||
CVPixelBufferLockBaseAddress(_pixelBuffer, kCVPixelBufferLock_ReadOnly);
|
CVPixelBufferLockBaseAddress(_pixelBuffer, kCVPixelBufferLock_ReadOnly);
|
||||||
const uint8_t* src = static_cast<const uint8_t*>(CVPixelBufferGetBaseAddress(_pixelBuffer));
|
const uint8_t* src = static_cast<uint8_t*>(CVPixelBufferGetBaseAddress(_pixelBuffer));
|
||||||
const int srcStride = CVPixelBufferGetBytesPerRow(_pixelBuffer);
|
const int srcStride = CVPixelBufferGetBytesPerRow(_pixelBuffer);
|
||||||
|
|
||||||
// Crop just by modifying pointers.
|
// Crop just by modifying pointers.
|
||||||
|
@ -89,6 +89,10 @@
|
|||||||
return self;
|
return self;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
- (rtc::scoped_refptr<webrtc::I420BufferInterface>)nativeI420Buffer {
|
||||||
|
return _i420Buffer;
|
||||||
|
}
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
||||||
@implementation RTCMutableI420Buffer
|
@implementation RTCMutableI420Buffer
|
||||||
|
@ -71,6 +71,8 @@ RTC_EXPORT
|
|||||||
@property(nonatomic, readonly) CVPixelBufferRef pixelBuffer;
|
@property(nonatomic, readonly) CVPixelBufferRef pixelBuffer;
|
||||||
@property(nonatomic, readonly) int cropX;
|
@property(nonatomic, readonly) int cropX;
|
||||||
@property(nonatomic, readonly) int cropY;
|
@property(nonatomic, readonly) int cropY;
|
||||||
|
@property(nonatomic, readonly) int cropWidth;
|
||||||
|
@property(nonatomic, readonly) int cropHeight;
|
||||||
|
|
||||||
+ (NSSet<NSNumber *> *)supportedPixelFormats;
|
+ (NSSet<NSNumber *> *)supportedPixelFormats;
|
||||||
|
|
||||||
@ -86,10 +88,13 @@ RTC_EXPORT
|
|||||||
- (BOOL)requiresCropping;
|
- (BOOL)requiresCropping;
|
||||||
- (BOOL)requiresScalingToWidth:(int)width height:(int)height;
|
- (BOOL)requiresScalingToWidth:(int)width height:(int)height;
|
||||||
- (int)bufferSizeForCroppingAndScalingToWidth:(int)width height:(int)height;
|
- (int)bufferSizeForCroppingAndScalingToWidth:(int)width height:(int)height;
|
||||||
|
|
||||||
/** The minimum size of the |tmpBuffer| must be the number of bytes returned from the
|
/** The minimum size of the |tmpBuffer| must be the number of bytes returned from the
|
||||||
* bufferSizeForCroppingAndScalingToWidth:height: method.
|
* bufferSizeForCroppingAndScalingToWidth:height: method.
|
||||||
|
* If that size is 0, the |tmpBuffer| may be nil.
|
||||||
*/
|
*/
|
||||||
- (BOOL)cropAndScaleTo:(CVPixelBufferRef)outputPixelBuffer withTempBuffer:(uint8_t *)tmpBuffer;
|
- (BOOL)cropAndScaleTo:(CVPixelBufferRef)outputPixelBuffer
|
||||||
|
withTempBuffer:(nullable uint8_t *)tmpBuffer;
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
||||||
|
364
sdk/objc/Framework/UnitTests/ObjCVideoTrackSource_xctest.mm
Normal file
364
sdk/objc/Framework/UnitTests/ObjCVideoTrackSource_xctest.mm
Normal file
@ -0,0 +1,364 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
|
||||||
|
*
|
||||||
|
* Use of this source code is governed by a BSD-style license
|
||||||
|
* that can be found in the LICENSE file in the root of the source
|
||||||
|
* tree. An additional intellectual property rights grant can be found
|
||||||
|
* in the file PATENTS. All contributing project authors may
|
||||||
|
* be found in the AUTHORS file in the root of the source tree.
|
||||||
|
*/
|
||||||
|
|
||||||
|
#import <Foundation/Foundation.h>
|
||||||
|
#import <XCTest/XCTest.h>
|
||||||
|
|
||||||
|
#include "sdk/objc/Framework/Native/src/objc_video_track_source.h"
|
||||||
|
|
||||||
|
#import "Video/RTCI420Buffer+Private.h"
|
||||||
|
#import "WebRTC/RTCVideoFrame.h"
|
||||||
|
#import "WebRTC/RTCVideoFrameBuffer.h"
|
||||||
|
#include "common_video/libyuv/include/webrtc_libyuv.h"
|
||||||
|
#include "media/base/fakevideorenderer.h"
|
||||||
|
#include "rtc_base/refcountedobject.h"
|
||||||
|
#include "rtc_base/scoped_ref_ptr.h"
|
||||||
|
#include "sdk/objc/Framework/Native/api/video_frame.h"
|
||||||
|
#import "sdk/objc/Framework/UnitTests/frame_buffer_helpers.h"
|
||||||
|
|
||||||
|
typedef void (^VideoSinkCallback)(RTCVideoFrame *);
|
||||||
|
|
||||||
|
namespace {
|
||||||
|
|
||||||
|
class ObjCCallbackVideoSink : public rtc::VideoSinkInterface<webrtc::VideoFrame> {
|
||||||
|
public:
|
||||||
|
ObjCCallbackVideoSink(VideoSinkCallback callback) : callback_(callback) {}
|
||||||
|
|
||||||
|
virtual void OnFrame(const webrtc::VideoFrame &frame) {
|
||||||
|
callback_(NativeToObjCVideoFrame(frame));
|
||||||
|
}
|
||||||
|
|
||||||
|
private:
|
||||||
|
VideoSinkCallback callback_;
|
||||||
|
};
|
||||||
|
|
||||||
|
} // namespace
|
||||||
|
|
||||||
|
@interface ObjCVideoTrackSourceTests : XCTestCase
|
||||||
|
@end
|
||||||
|
|
||||||
|
@implementation ObjCVideoTrackSourceTests {
|
||||||
|
rtc::scoped_refptr<webrtc::ObjCVideoTrackSource> _video_source;
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)setUp {
|
||||||
|
_video_source = new rtc::RefCountedObject<webrtc::ObjCVideoTrackSource>();
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)tearDown {
|
||||||
|
_video_source = NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)testOnCapturedFrameAdaptsFrame {
|
||||||
|
CVPixelBufferRef pixelBufferRef = NULL;
|
||||||
|
CVPixelBufferCreate(
|
||||||
|
NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
|
||||||
|
|
||||||
|
RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef];
|
||||||
|
|
||||||
|
RTCVideoFrame *frame =
|
||||||
|
[[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0];
|
||||||
|
|
||||||
|
cricket::FakeVideoRenderer *video_renderer = new cricket::FakeVideoRenderer();
|
||||||
|
const rtc::VideoSinkWants video_sink_wants;
|
||||||
|
rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface = _video_source;
|
||||||
|
video_source_interface->AddOrUpdateSink(video_renderer, video_sink_wants);
|
||||||
|
|
||||||
|
_video_source->OnOutputFormatRequest(640, 360, 30);
|
||||||
|
_video_source->OnCapturedFrame(frame);
|
||||||
|
|
||||||
|
XCTAssertEqual(video_renderer->num_rendered_frames(), 1);
|
||||||
|
XCTAssertEqual(video_renderer->width(), 360);
|
||||||
|
XCTAssertEqual(video_renderer->height(), 640);
|
||||||
|
|
||||||
|
CVBufferRelease(pixelBufferRef);
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)testOnCapturedFrameWithoutAdaptation {
|
||||||
|
CVPixelBufferRef pixelBufferRef = NULL;
|
||||||
|
CVPixelBufferCreate(
|
||||||
|
NULL, 360, 640, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
|
||||||
|
|
||||||
|
RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef];
|
||||||
|
RTCVideoFrame *frame =
|
||||||
|
[[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0];
|
||||||
|
|
||||||
|
XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"];
|
||||||
|
ObjCCallbackVideoSink callback_video_sink(^void(RTCVideoFrame *outputFrame) {
|
||||||
|
XCTAssertEqual(frame.width, outputFrame.width);
|
||||||
|
XCTAssertEqual(frame.height, outputFrame.height);
|
||||||
|
|
||||||
|
RTCCVPixelBuffer *outputBuffer = outputFrame.buffer;
|
||||||
|
XCTAssertEqual(buffer.cropX, outputBuffer.cropX);
|
||||||
|
XCTAssertEqual(buffer.cropY, outputBuffer.cropY);
|
||||||
|
XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer);
|
||||||
|
|
||||||
|
[callbackExpectation fulfill];
|
||||||
|
});
|
||||||
|
|
||||||
|
const rtc::VideoSinkWants video_sink_wants;
|
||||||
|
rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface = _video_source;
|
||||||
|
video_source_interface->AddOrUpdateSink(&callback_video_sink, video_sink_wants);
|
||||||
|
|
||||||
|
_video_source->OnOutputFormatRequest(640, 360, 30);
|
||||||
|
_video_source->OnCapturedFrame(frame);
|
||||||
|
|
||||||
|
[self waitForExpectations:@[ callbackExpectation ] timeout:10.0];
|
||||||
|
CVBufferRelease(pixelBufferRef);
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)testOnCapturedFrameCVPixelBufferNeedsAdaptation {
|
||||||
|
CVPixelBufferRef pixelBufferRef = NULL;
|
||||||
|
CVPixelBufferCreate(
|
||||||
|
NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
|
||||||
|
|
||||||
|
RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef];
|
||||||
|
RTCVideoFrame *frame =
|
||||||
|
[[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0];
|
||||||
|
|
||||||
|
XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"];
|
||||||
|
ObjCCallbackVideoSink callback_video_sink(^void(RTCVideoFrame *outputFrame) {
|
||||||
|
XCTAssertEqual(outputFrame.width, 360);
|
||||||
|
XCTAssertEqual(outputFrame.height, 640);
|
||||||
|
|
||||||
|
RTCCVPixelBuffer *outputBuffer = outputFrame.buffer;
|
||||||
|
XCTAssertEqual(outputBuffer.cropX, 0);
|
||||||
|
XCTAssertEqual(outputBuffer.cropY, 0);
|
||||||
|
XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer);
|
||||||
|
|
||||||
|
[callbackExpectation fulfill];
|
||||||
|
});
|
||||||
|
|
||||||
|
const rtc::VideoSinkWants video_sink_wants;
|
||||||
|
rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface = _video_source;
|
||||||
|
video_source_interface->AddOrUpdateSink(&callback_video_sink, video_sink_wants);
|
||||||
|
|
||||||
|
_video_source->OnOutputFormatRequest(640, 360, 30);
|
||||||
|
_video_source->OnCapturedFrame(frame);
|
||||||
|
|
||||||
|
[self waitForExpectations:@[ callbackExpectation ] timeout:10.0];
|
||||||
|
CVBufferRelease(pixelBufferRef);
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)testOnCapturedFrameCVPixelBufferNeedsCropping {
|
||||||
|
CVPixelBufferRef pixelBufferRef = NULL;
|
||||||
|
CVPixelBufferCreate(
|
||||||
|
NULL, 380, 640, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
|
||||||
|
|
||||||
|
RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef];
|
||||||
|
RTCVideoFrame *frame =
|
||||||
|
[[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0];
|
||||||
|
|
||||||
|
XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"];
|
||||||
|
ObjCCallbackVideoSink callback_video_sink(^void(RTCVideoFrame *outputFrame) {
|
||||||
|
XCTAssertEqual(outputFrame.width, 360);
|
||||||
|
XCTAssertEqual(outputFrame.height, 640);
|
||||||
|
|
||||||
|
RTCCVPixelBuffer *outputBuffer = outputFrame.buffer;
|
||||||
|
XCTAssertEqual(outputBuffer.cropX, 10);
|
||||||
|
XCTAssertEqual(outputBuffer.cropY, 0);
|
||||||
|
XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer);
|
||||||
|
|
||||||
|
[callbackExpectation fulfill];
|
||||||
|
});
|
||||||
|
|
||||||
|
const rtc::VideoSinkWants video_sink_wants;
|
||||||
|
rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface = _video_source;
|
||||||
|
video_source_interface->AddOrUpdateSink(&callback_video_sink, video_sink_wants);
|
||||||
|
|
||||||
|
_video_source->OnOutputFormatRequest(640, 360, 30);
|
||||||
|
_video_source->OnCapturedFrame(frame);
|
||||||
|
|
||||||
|
[self waitForExpectations:@[ callbackExpectation ] timeout:10.0];
|
||||||
|
CVBufferRelease(pixelBufferRef);
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)testOnCapturedFramePreAdaptedCVPixelBufferNeedsAdaptation {
|
||||||
|
CVPixelBufferRef pixelBufferRef = NULL;
|
||||||
|
CVPixelBufferCreate(
|
||||||
|
NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
|
||||||
|
|
||||||
|
RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef
|
||||||
|
adaptedWidth:700
|
||||||
|
adaptedHeight:700
|
||||||
|
cropWidth:720
|
||||||
|
cropHeight:1280
|
||||||
|
cropX:0
|
||||||
|
cropY:0];
|
||||||
|
RTCVideoFrame *frame =
|
||||||
|
[[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0];
|
||||||
|
|
||||||
|
XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"];
|
||||||
|
ObjCCallbackVideoSink callback_video_sink(^void(RTCVideoFrame *outputFrame) {
|
||||||
|
XCTAssertEqual(outputFrame.width, 297);
|
||||||
|
XCTAssertEqual(outputFrame.height, 525);
|
||||||
|
|
||||||
|
RTCCVPixelBuffer *outputBuffer = outputFrame.buffer;
|
||||||
|
XCTAssertEqual(outputBuffer.cropX, 152);
|
||||||
|
XCTAssertEqual(outputBuffer.cropY, 0);
|
||||||
|
XCTAssertEqual(outputBuffer.cropWidth, 396);
|
||||||
|
XCTAssertEqual(outputBuffer.cropHeight, 700);
|
||||||
|
XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer);
|
||||||
|
|
||||||
|
[callbackExpectation fulfill];
|
||||||
|
});
|
||||||
|
|
||||||
|
const rtc::VideoSinkWants video_sink_wants;
|
||||||
|
rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface = _video_source;
|
||||||
|
video_source_interface->AddOrUpdateSink(&callback_video_sink, video_sink_wants);
|
||||||
|
|
||||||
|
_video_source->OnOutputFormatRequest(640, 360, 30);
|
||||||
|
_video_source->OnCapturedFrame(frame);
|
||||||
|
|
||||||
|
[self waitForExpectations:@[ callbackExpectation ] timeout:10.0];
|
||||||
|
CVBufferRelease(pixelBufferRef);
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)testOnCapturedFramePreCroppedCVPixelBufferNeedsCropping {
|
||||||
|
CVPixelBufferRef pixelBufferRef = NULL;
|
||||||
|
CVPixelBufferCreate(
|
||||||
|
NULL, 380, 640, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
|
||||||
|
|
||||||
|
RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef
|
||||||
|
adaptedWidth:370
|
||||||
|
adaptedHeight:640
|
||||||
|
cropWidth:370
|
||||||
|
cropHeight:640
|
||||||
|
cropX:10
|
||||||
|
cropY:0];
|
||||||
|
RTCVideoFrame *frame =
|
||||||
|
[[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0];
|
||||||
|
|
||||||
|
XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"];
|
||||||
|
ObjCCallbackVideoSink callback_video_sink(^void(RTCVideoFrame *outputFrame) {
|
||||||
|
XCTAssertEqual(outputFrame.width, 360);
|
||||||
|
XCTAssertEqual(outputFrame.height, 640);
|
||||||
|
|
||||||
|
RTCCVPixelBuffer *outputBuffer = outputFrame.buffer;
|
||||||
|
XCTAssertEqual(outputBuffer.cropX, 14);
|
||||||
|
XCTAssertEqual(outputBuffer.cropY, 0);
|
||||||
|
XCTAssertEqual(outputBuffer.cropWidth, 360);
|
||||||
|
XCTAssertEqual(outputBuffer.cropHeight, 640);
|
||||||
|
XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer);
|
||||||
|
|
||||||
|
[callbackExpectation fulfill];
|
||||||
|
});
|
||||||
|
|
||||||
|
const rtc::VideoSinkWants video_sink_wants;
|
||||||
|
rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface = _video_source;
|
||||||
|
video_source_interface->AddOrUpdateSink(&callback_video_sink, video_sink_wants);
|
||||||
|
|
||||||
|
_video_source->OnOutputFormatRequest(640, 360, 30);
|
||||||
|
_video_source->OnCapturedFrame(frame);
|
||||||
|
|
||||||
|
[self waitForExpectations:@[ callbackExpectation ] timeout:10.0];
|
||||||
|
CVBufferRelease(pixelBufferRef);
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)testOnCapturedFrameSmallerPreCroppedCVPixelBufferNeedsCropping {
|
||||||
|
CVPixelBufferRef pixelBufferRef = NULL;
|
||||||
|
CVPixelBufferCreate(
|
||||||
|
NULL, 380, 640, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
|
||||||
|
|
||||||
|
RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef
|
||||||
|
adaptedWidth:300
|
||||||
|
adaptedHeight:640
|
||||||
|
cropWidth:300
|
||||||
|
cropHeight:640
|
||||||
|
cropX:40
|
||||||
|
cropY:0];
|
||||||
|
RTCVideoFrame *frame =
|
||||||
|
[[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0];
|
||||||
|
|
||||||
|
XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"];
|
||||||
|
ObjCCallbackVideoSink callback_video_sink(^void(RTCVideoFrame *outputFrame) {
|
||||||
|
XCTAssertEqual(outputFrame.width, 300);
|
||||||
|
XCTAssertEqual(outputFrame.height, 533);
|
||||||
|
|
||||||
|
RTCCVPixelBuffer *outputBuffer = outputFrame.buffer;
|
||||||
|
XCTAssertEqual(outputBuffer.cropX, 40);
|
||||||
|
XCTAssertEqual(outputBuffer.cropY, 52);
|
||||||
|
XCTAssertEqual(outputBuffer.cropWidth, 300);
|
||||||
|
XCTAssertEqual(outputBuffer.cropHeight, 533);
|
||||||
|
XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer);
|
||||||
|
|
||||||
|
[callbackExpectation fulfill];
|
||||||
|
});
|
||||||
|
|
||||||
|
const rtc::VideoSinkWants video_sink_wants;
|
||||||
|
rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface = _video_source;
|
||||||
|
video_source_interface->AddOrUpdateSink(&callback_video_sink, video_sink_wants);
|
||||||
|
|
||||||
|
_video_source->OnOutputFormatRequest(640, 360, 30);
|
||||||
|
_video_source->OnCapturedFrame(frame);
|
||||||
|
|
||||||
|
[self waitForExpectations:@[ callbackExpectation ] timeout:10.0];
|
||||||
|
CVBufferRelease(pixelBufferRef);
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)testOnCapturedFrameI420BufferNeedsAdaptation {
|
||||||
|
rtc::scoped_refptr<webrtc::I420Buffer> i420Buffer = CreateI420Gradient(720, 1280);
|
||||||
|
RTCI420Buffer *buffer = [[RTCI420Buffer alloc] initWithFrameBuffer:i420Buffer];
|
||||||
|
RTCVideoFrame *frame =
|
||||||
|
[[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0];
|
||||||
|
|
||||||
|
XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"];
|
||||||
|
ObjCCallbackVideoSink callback_video_sink(^void(RTCVideoFrame *outputFrame) {
|
||||||
|
XCTAssertEqual(outputFrame.width, 360);
|
||||||
|
XCTAssertEqual(outputFrame.height, 640);
|
||||||
|
|
||||||
|
RTCI420Buffer *outputBuffer = (RTCI420Buffer *)outputFrame.buffer;
|
||||||
|
|
||||||
|
double psnr = I420PSNR(*[buffer nativeI420Buffer], *[outputBuffer nativeI420Buffer]);
|
||||||
|
XCTAssertEqual(psnr, webrtc::kPerfectPSNR);
|
||||||
|
|
||||||
|
[callbackExpectation fulfill];
|
||||||
|
});
|
||||||
|
|
||||||
|
const rtc::VideoSinkWants video_sink_wants;
|
||||||
|
rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface = _video_source;
|
||||||
|
video_source_interface->AddOrUpdateSink(&callback_video_sink, video_sink_wants);
|
||||||
|
|
||||||
|
_video_source->OnOutputFormatRequest(640, 360, 30);
|
||||||
|
_video_source->OnCapturedFrame(frame);
|
||||||
|
|
||||||
|
[self waitForExpectations:@[ callbackExpectation ] timeout:10.0];
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)testOnCapturedFrameI420BufferNeedsCropping {
|
||||||
|
rtc::scoped_refptr<webrtc::I420Buffer> i420Buffer = CreateI420Gradient(380, 640);
|
||||||
|
RTCI420Buffer *buffer = [[RTCI420Buffer alloc] initWithFrameBuffer:i420Buffer];
|
||||||
|
RTCVideoFrame *frame =
|
||||||
|
[[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0];
|
||||||
|
|
||||||
|
XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"];
|
||||||
|
ObjCCallbackVideoSink callback_video_sink(^void(RTCVideoFrame *outputFrame) {
|
||||||
|
XCTAssertEqual(outputFrame.width, 360);
|
||||||
|
XCTAssertEqual(outputFrame.height, 640);
|
||||||
|
|
||||||
|
RTCI420Buffer *outputBuffer = (RTCI420Buffer *)outputFrame.buffer;
|
||||||
|
|
||||||
|
double psnr = I420PSNR(*[buffer nativeI420Buffer], *[outputBuffer nativeI420Buffer]);
|
||||||
|
XCTAssertGreaterThanOrEqual(psnr, 40);
|
||||||
|
|
||||||
|
[callbackExpectation fulfill];
|
||||||
|
});
|
||||||
|
|
||||||
|
const rtc::VideoSinkWants video_sink_wants;
|
||||||
|
rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface = _video_source;
|
||||||
|
video_source_interface->AddOrUpdateSink(&callback_video_sink, video_sink_wants);
|
||||||
|
|
||||||
|
_video_source->OnOutputFormatRequest(640, 360, 30);
|
||||||
|
_video_source->OnCapturedFrame(frame);
|
||||||
|
|
||||||
|
[self waitForExpectations:@[ callbackExpectation ] timeout:10.0];
|
||||||
|
}
|
||||||
|
|
||||||
|
@end
|
264
sdk/objc/Framework/UnitTests/RTCCVPixelBuffer_xctest.mm
Normal file
264
sdk/objc/Framework/UnitTests/RTCCVPixelBuffer_xctest.mm
Normal file
@ -0,0 +1,264 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
|
||||||
|
*
|
||||||
|
* Use of this source code is governed by a BSD-style license
|
||||||
|
* that can be found in the LICENSE file in the root of the source
|
||||||
|
* tree. An additional intellectual property rights grant can be found
|
||||||
|
* in the file PATENTS. All contributing project authors may
|
||||||
|
* be found in the AUTHORS file in the root of the source tree.
|
||||||
|
*/
|
||||||
|
|
||||||
|
#import <Foundation/Foundation.h>
|
||||||
|
#import <XCTest/XCTest.h>
|
||||||
|
|
||||||
|
#import "Video/RTCI420Buffer+Private.h"
|
||||||
|
#import "WebRTC/RTCVideoFrame.h"
|
||||||
|
#import "WebRTC/RTCVideoFrameBuffer.h"
|
||||||
|
#include "common_video/libyuv/include/webrtc_libyuv.h"
|
||||||
|
#import "sdk/objc/Framework/UnitTests/frame_buffer_helpers.h"
|
||||||
|
#include "third_party/libyuv/include/libyuv.h"
|
||||||
|
|
||||||
|
@interface RTCCVPixelBufferTests : XCTestCase
|
||||||
|
@end
|
||||||
|
|
||||||
|
@implementation RTCCVPixelBufferTests {
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)testRequiresCroppingNoCrop {
|
||||||
|
CVPixelBufferRef pixelBufferRef = NULL;
|
||||||
|
CVPixelBufferCreate(
|
||||||
|
NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
|
||||||
|
RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef];
|
||||||
|
|
||||||
|
XCTAssertFalse([buffer requiresCropping]);
|
||||||
|
|
||||||
|
CVBufferRelease(pixelBufferRef);
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)testRequiresCroppingWithCrop {
|
||||||
|
CVPixelBufferRef pixelBufferRef = NULL;
|
||||||
|
CVPixelBufferCreate(
|
||||||
|
NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
|
||||||
|
RTCCVPixelBuffer *croppedBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef
|
||||||
|
adaptedWidth:720
|
||||||
|
adaptedHeight:1280
|
||||||
|
cropWidth:360
|
||||||
|
cropHeight:640
|
||||||
|
cropX:100
|
||||||
|
cropY:100];
|
||||||
|
|
||||||
|
XCTAssertTrue([croppedBuffer requiresCropping]);
|
||||||
|
|
||||||
|
CVBufferRelease(pixelBufferRef);
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)testRequiresScalingNoScale {
|
||||||
|
CVPixelBufferRef pixelBufferRef = NULL;
|
||||||
|
CVPixelBufferCreate(
|
||||||
|
NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
|
||||||
|
|
||||||
|
RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef];
|
||||||
|
XCTAssertFalse([buffer requiresScalingToWidth:720 height:1280]);
|
||||||
|
|
||||||
|
CVBufferRelease(pixelBufferRef);
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)testRequiresScalingWithScale {
|
||||||
|
CVPixelBufferRef pixelBufferRef = NULL;
|
||||||
|
CVPixelBufferCreate(
|
||||||
|
NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
|
||||||
|
|
||||||
|
RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef];
|
||||||
|
XCTAssertTrue([buffer requiresScalingToWidth:360 height:640]);
|
||||||
|
|
||||||
|
CVBufferRelease(pixelBufferRef);
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)testRequiresScalingWithScaleAndMatchingCrop {
|
||||||
|
CVPixelBufferRef pixelBufferRef = NULL;
|
||||||
|
CVPixelBufferCreate(
|
||||||
|
NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
|
||||||
|
|
||||||
|
RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef
|
||||||
|
adaptedWidth:720
|
||||||
|
adaptedHeight:1280
|
||||||
|
cropWidth:360
|
||||||
|
cropHeight:640
|
||||||
|
cropX:100
|
||||||
|
cropY:100];
|
||||||
|
XCTAssertFalse([buffer requiresScalingToWidth:360 height:640]);
|
||||||
|
|
||||||
|
CVBufferRelease(pixelBufferRef);
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)testBufferSize_NV12 {
|
||||||
|
CVPixelBufferRef pixelBufferRef = NULL;
|
||||||
|
CVPixelBufferCreate(
|
||||||
|
NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
|
||||||
|
|
||||||
|
RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef];
|
||||||
|
XCTAssertEqual([buffer bufferSizeForCroppingAndScalingToWidth:360 height:640], 576000);
|
||||||
|
|
||||||
|
CVBufferRelease(pixelBufferRef);
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)testBufferSize_RGB {
|
||||||
|
CVPixelBufferRef pixelBufferRef = NULL;
|
||||||
|
CVPixelBufferCreate(NULL, 720, 1280, kCVPixelFormatType_32BGRA, NULL, &pixelBufferRef);
|
||||||
|
|
||||||
|
RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef];
|
||||||
|
XCTAssertEqual([buffer bufferSizeForCroppingAndScalingToWidth:360 height:640], 0);
|
||||||
|
|
||||||
|
CVBufferRelease(pixelBufferRef);
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)testCropAndScale_NV12 {
|
||||||
|
[self cropAndScaleTestWithNV12];
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)testCropAndScaleNoOp_NV12 {
|
||||||
|
[self cropAndScaleTestWithNV12InputFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
|
||||||
|
outputFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
|
||||||
|
outputSize:CGSizeMake(720, 1280)];
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)testCropAndScale_NV12FullToVideo {
|
||||||
|
[self cropAndScaleTestWithNV12InputFormat:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange
|
||||||
|
outputFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange];
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)testCropAndScaleZeroSizeFrame_NV12 {
|
||||||
|
[self cropAndScaleTestWithNV12InputFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
|
||||||
|
outputFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
|
||||||
|
outputSize:CGSizeMake(0, 0)];
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)testCropAndScale_32BGRA {
|
||||||
|
[self cropAndScaleTestWithRGBPixelFormat:kCVPixelFormatType_32BGRA];
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)testCropAndScale_32ARGB {
|
||||||
|
[self cropAndScaleTestWithRGBPixelFormat:kCVPixelFormatType_32ARGB];
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)testToI420_NV12 {
|
||||||
|
[self toI420WithPixelFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange];
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)testToI420_32BGRA {
|
||||||
|
[self toI420WithPixelFormat:kCVPixelFormatType_32BGRA];
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)testToI420_32ARGB {
|
||||||
|
[self toI420WithPixelFormat:kCVPixelFormatType_32ARGB];
|
||||||
|
}
|
||||||
|
|
||||||
|
#pragma mark - Shared test code
|
||||||
|
|
||||||
|
- (void)cropAndScaleTestWithNV12 {
|
||||||
|
[self cropAndScaleTestWithNV12InputFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
|
||||||
|
outputFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange];
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)cropAndScaleTestWithNV12InputFormat:(OSType)inputFormat outputFormat:(OSType)outputFormat {
|
||||||
|
[self cropAndScaleTestWithNV12InputFormat:(OSType)inputFormat
|
||||||
|
outputFormat:(OSType)outputFormat
|
||||||
|
outputSize:CGSizeMake(360, 640)];
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)cropAndScaleTestWithNV12InputFormat:(OSType)inputFormat
|
||||||
|
outputFormat:(OSType)outputFormat
|
||||||
|
outputSize:(CGSize)outputSize {
|
||||||
|
CVPixelBufferRef pixelBufferRef = NULL;
|
||||||
|
CVPixelBufferCreate(NULL, 720, 1280, inputFormat, NULL, &pixelBufferRef);
|
||||||
|
|
||||||
|
rtc::scoped_refptr<webrtc::I420Buffer> i420Buffer = CreateI420Gradient(720, 1280);
|
||||||
|
CopyI420BufferToCVPixelBuffer(i420Buffer, pixelBufferRef);
|
||||||
|
|
||||||
|
RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef];
|
||||||
|
XCTAssertEqual(buffer.width, 720);
|
||||||
|
XCTAssertEqual(buffer.height, 1280);
|
||||||
|
|
||||||
|
CVPixelBufferRef outputPixelBufferRef = NULL;
|
||||||
|
CVPixelBufferCreate(
|
||||||
|
NULL, outputSize.width, outputSize.height, outputFormat, NULL, &outputPixelBufferRef);
|
||||||
|
|
||||||
|
std::vector<uint8_t> frameScaleBuffer;
|
||||||
|
if ([buffer requiresScalingToWidth:outputSize.width height:outputSize.height]) {
|
||||||
|
int size =
|
||||||
|
[buffer bufferSizeForCroppingAndScalingToWidth:outputSize.width height:outputSize.height];
|
||||||
|
frameScaleBuffer.resize(size);
|
||||||
|
} else {
|
||||||
|
frameScaleBuffer.clear();
|
||||||
|
}
|
||||||
|
frameScaleBuffer.shrink_to_fit();
|
||||||
|
|
||||||
|
[buffer cropAndScaleTo:outputPixelBufferRef withTempBuffer:frameScaleBuffer.data()];
|
||||||
|
|
||||||
|
RTCCVPixelBuffer *scaledBuffer =
|
||||||
|
[[RTCCVPixelBuffer alloc] initWithPixelBuffer:outputPixelBufferRef];
|
||||||
|
XCTAssertEqual(scaledBuffer.width, outputSize.width);
|
||||||
|
XCTAssertEqual(scaledBuffer.height, outputSize.height);
|
||||||
|
|
||||||
|
if (outputSize.width > 0 && outputSize.height > 0) {
|
||||||
|
RTCI420Buffer *originalBufferI420 = [buffer toI420];
|
||||||
|
RTCI420Buffer *scaledBufferI420 = [scaledBuffer toI420];
|
||||||
|
double psnr =
|
||||||
|
I420PSNR(*[originalBufferI420 nativeI420Buffer], *[scaledBufferI420 nativeI420Buffer]);
|
||||||
|
XCTAssertEqual(psnr, webrtc::kPerfectPSNR);
|
||||||
|
}
|
||||||
|
|
||||||
|
CVBufferRelease(pixelBufferRef);
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)cropAndScaleTestWithRGBPixelFormat:(OSType)pixelFormat {
|
||||||
|
CVPixelBufferRef pixelBufferRef = NULL;
|
||||||
|
CVPixelBufferCreate(NULL, 720, 1280, pixelFormat, NULL, &pixelBufferRef);
|
||||||
|
|
||||||
|
DrawGradientInRGBPixelBuffer(pixelBufferRef);
|
||||||
|
|
||||||
|
RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef];
|
||||||
|
XCTAssertEqual(buffer.width, 720);
|
||||||
|
XCTAssertEqual(buffer.height, 1280);
|
||||||
|
|
||||||
|
CVPixelBufferRef outputPixelBufferRef = NULL;
|
||||||
|
CVPixelBufferCreate(NULL, 360, 640, pixelFormat, NULL, &outputPixelBufferRef);
|
||||||
|
[buffer cropAndScaleTo:outputPixelBufferRef withTempBuffer:NULL];
|
||||||
|
|
||||||
|
RTCCVPixelBuffer *scaledBuffer =
|
||||||
|
[[RTCCVPixelBuffer alloc] initWithPixelBuffer:outputPixelBufferRef];
|
||||||
|
XCTAssertEqual(scaledBuffer.width, 360);
|
||||||
|
XCTAssertEqual(scaledBuffer.height, 640);
|
||||||
|
|
||||||
|
RTCI420Buffer *originalBufferI420 = [buffer toI420];
|
||||||
|
RTCI420Buffer *scaledBufferI420 = [scaledBuffer toI420];
|
||||||
|
double psnr =
|
||||||
|
I420PSNR(*[originalBufferI420 nativeI420Buffer], *[scaledBufferI420 nativeI420Buffer]);
|
||||||
|
XCTAssertEqual(psnr, webrtc::kPerfectPSNR);
|
||||||
|
|
||||||
|
CVBufferRelease(pixelBufferRef);
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)toI420WithPixelFormat:(OSType)pixelFormat {
|
||||||
|
rtc::scoped_refptr<webrtc::I420Buffer> i420Buffer = CreateI420Gradient(360, 640);
|
||||||
|
|
||||||
|
CVPixelBufferRef pixelBufferRef = NULL;
|
||||||
|
CVPixelBufferCreate(NULL, 360, 640, pixelFormat, NULL, &pixelBufferRef);
|
||||||
|
|
||||||
|
CopyI420BufferToCVPixelBuffer(i420Buffer, pixelBufferRef);
|
||||||
|
|
||||||
|
RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef];
|
||||||
|
RTCI420Buffer *fromCVPixelBuffer = [buffer toI420];
|
||||||
|
|
||||||
|
double psnr = I420PSNR(*i420Buffer, *[fromCVPixelBuffer nativeI420Buffer]);
|
||||||
|
double target = webrtc::kPerfectPSNR;
|
||||||
|
if (pixelFormat != kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange) {
|
||||||
|
// libyuv's I420ToRGB functions seem to lose some quality.
|
||||||
|
target = 19.0;
|
||||||
|
}
|
||||||
|
XCTAssertGreaterThanOrEqual(psnr, target);
|
||||||
|
|
||||||
|
CVBufferRelease(pixelBufferRef);
|
||||||
|
}
|
||||||
|
|
||||||
|
@end
|
22
sdk/objc/Framework/UnitTests/frame_buffer_helpers.h
Normal file
22
sdk/objc/Framework/UnitTests/frame_buffer_helpers.h
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
|
||||||
|
*
|
||||||
|
* Use of this source code is governed by a BSD-style license
|
||||||
|
* that can be found in the LICENSE file in the root of the source
|
||||||
|
* tree. An additional intellectual property rights grant can be found
|
||||||
|
* in the file PATENTS. All contributing project authors may
|
||||||
|
* be found in the AUTHORS file in the root of the source tree.
|
||||||
|
*/
|
||||||
|
|
||||||
|
#import <AVFoundation/AVFoundation.h>
|
||||||
|
|
||||||
|
#include "api/video/i420_buffer.h"
|
||||||
|
|
||||||
|
void DrawGradientInRGBPixelBuffer(CVPixelBufferRef pixelBuffer);
|
||||||
|
|
||||||
|
rtc::scoped_refptr<webrtc::I420Buffer> CreateI420Gradient(int width,
|
||||||
|
int height);
|
||||||
|
|
||||||
|
void CopyI420BufferToCVPixelBuffer(
|
||||||
|
rtc::scoped_refptr<webrtc::I420Buffer> i420Buffer,
|
||||||
|
CVPixelBufferRef pixelBuffer);
|
123
sdk/objc/Framework/UnitTests/frame_buffer_helpers.mm
Normal file
123
sdk/objc/Framework/UnitTests/frame_buffer_helpers.mm
Normal file
@ -0,0 +1,123 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
|
||||||
|
*
|
||||||
|
* Use of this source code is governed by a BSD-style license
|
||||||
|
* that can be found in the LICENSE file in the root of the source
|
||||||
|
* tree. An additional intellectual property rights grant can be found
|
||||||
|
* in the file PATENTS. All contributing project authors may
|
||||||
|
* be found in the AUTHORS file in the root of the source tree.
|
||||||
|
*/
|
||||||
|
|
||||||
|
#include "sdk/objc/Framework/UnitTests/frame_buffer_helpers.h"
|
||||||
|
|
||||||
|
#include "third_party/libyuv/include/libyuv.h"
|
||||||
|
|
||||||
|
void DrawGradientInRGBPixelBuffer(CVPixelBufferRef pixelBuffer) {
|
||||||
|
CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
|
||||||
|
void* baseAddr = CVPixelBufferGetBaseAddress(pixelBuffer);
|
||||||
|
size_t width = CVPixelBufferGetWidth(pixelBuffer);
|
||||||
|
size_t height = CVPixelBufferGetHeight(pixelBuffer);
|
||||||
|
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
|
||||||
|
CGContextRef cgContext = CGBitmapContextCreate(baseAddr,
|
||||||
|
width,
|
||||||
|
height,
|
||||||
|
8,
|
||||||
|
CVPixelBufferGetBytesPerRow(pixelBuffer),
|
||||||
|
colorSpace,
|
||||||
|
kCGImageAlphaNoneSkipLast);
|
||||||
|
|
||||||
|
// Create a gradient
|
||||||
|
CGFloat colors[] = {
|
||||||
|
1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0,
|
||||||
|
};
|
||||||
|
CGGradientRef gradient = CGGradientCreateWithColorComponents(colorSpace, colors, NULL, 4);
|
||||||
|
|
||||||
|
CGContextDrawLinearGradient(
|
||||||
|
cgContext, gradient, CGPointMake(0, 0), CGPointMake(width, height), 0);
|
||||||
|
CGGradientRelease(gradient);
|
||||||
|
|
||||||
|
CGImageRef cgImage = CGBitmapContextCreateImage(cgContext);
|
||||||
|
CGContextRelease(cgContext);
|
||||||
|
CGImageRelease(cgImage);
|
||||||
|
CGColorSpaceRelease(colorSpace);
|
||||||
|
|
||||||
|
CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
|
||||||
|
}
|
||||||
|
|
||||||
|
rtc::scoped_refptr<webrtc::I420Buffer> CreateI420Gradient(int width, int height) {
|
||||||
|
rtc::scoped_refptr<webrtc::I420Buffer> buffer(webrtc::I420Buffer::Create(width, height));
|
||||||
|
// Initialize with gradient, Y = 128(x/w + y/h), U = 256 x/w, V = 256 y/h
|
||||||
|
for (int x = 0; x < width; x++) {
|
||||||
|
for (int y = 0; y < height; y++) {
|
||||||
|
buffer->MutableDataY()[x + y * width] = 128 * (x * height + y * width) / (width * height);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
int chroma_width = buffer->ChromaWidth();
|
||||||
|
int chroma_height = buffer->ChromaHeight();
|
||||||
|
for (int x = 0; x < chroma_width; x++) {
|
||||||
|
for (int y = 0; y < chroma_height; y++) {
|
||||||
|
buffer->MutableDataU()[x + y * chroma_width] = 255 * x / (chroma_width - 1);
|
||||||
|
buffer->MutableDataV()[x + y * chroma_width] = 255 * y / (chroma_height - 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return buffer;
|
||||||
|
}
|
||||||
|
|
||||||
|
void CopyI420BufferToCVPixelBuffer(rtc::scoped_refptr<webrtc::I420Buffer> i420Buffer,
|
||||||
|
CVPixelBufferRef pixelBuffer) {
|
||||||
|
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
|
||||||
|
|
||||||
|
const OSType pixelFormat = CVPixelBufferGetPixelFormatType(pixelBuffer);
|
||||||
|
if (pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange ||
|
||||||
|
pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) {
|
||||||
|
// NV12
|
||||||
|
uint8_t* dstY = static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0));
|
||||||
|
const int dstYStride = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0);
|
||||||
|
uint8_t* dstUV = static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1));
|
||||||
|
const int dstUVStride = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1);
|
||||||
|
|
||||||
|
libyuv::I420ToNV12(i420Buffer->DataY(),
|
||||||
|
i420Buffer->StrideY(),
|
||||||
|
i420Buffer->DataU(),
|
||||||
|
i420Buffer->StrideU(),
|
||||||
|
i420Buffer->DataV(),
|
||||||
|
i420Buffer->StrideV(),
|
||||||
|
dstY,
|
||||||
|
dstYStride,
|
||||||
|
dstUV,
|
||||||
|
dstUVStride,
|
||||||
|
i420Buffer->width(),
|
||||||
|
i420Buffer->height());
|
||||||
|
} else {
|
||||||
|
uint8_t* dst = static_cast<uint8_t*>(CVPixelBufferGetBaseAddress(pixelBuffer));
|
||||||
|
const int bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
|
||||||
|
|
||||||
|
if (pixelFormat == kCVPixelFormatType_32BGRA) {
|
||||||
|
// Corresponds to libyuv::FOURCC_ARGB
|
||||||
|
libyuv::I420ToARGB(i420Buffer->DataY(),
|
||||||
|
i420Buffer->StrideY(),
|
||||||
|
i420Buffer->DataU(),
|
||||||
|
i420Buffer->StrideU(),
|
||||||
|
i420Buffer->DataV(),
|
||||||
|
i420Buffer->StrideV(),
|
||||||
|
dst,
|
||||||
|
bytesPerRow,
|
||||||
|
i420Buffer->width(),
|
||||||
|
i420Buffer->height());
|
||||||
|
} else if (pixelFormat == kCVPixelFormatType_32ARGB) {
|
||||||
|
// Corresponds to libyuv::FOURCC_BGRA
|
||||||
|
libyuv::I420ToBGRA(i420Buffer->DataY(),
|
||||||
|
i420Buffer->StrideY(),
|
||||||
|
i420Buffer->DataU(),
|
||||||
|
i420Buffer->StrideU(),
|
||||||
|
i420Buffer->DataV(),
|
||||||
|
i420Buffer->StrideV(),
|
||||||
|
dst,
|
||||||
|
bytesPerRow,
|
||||||
|
i420Buffer->width(),
|
||||||
|
i420Buffer->height());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
|
||||||
|
}
|
Reference in New Issue
Block a user