Add unit tests for RTCCVPixelBuffer and ObjCVideoTrackSource.

This CL also fixes a couple of bugs found in the toI420 method for
RTCCVPixelBuffers backed by RGB CVPixelBuffers.

Bug: webrtc:9007
Change-Id: I19ab8177f4b124a503cfda9f0166bd960f668982
Reviewed-on: https://webrtc-review.googlesource.com/64940
Commit-Queue: Anders Carlsson <andersc@webrtc.org>
Reviewed-by: Kári Helgason <kthelgason@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#22656}
This commit is contained in:
Anders Carlsson
2018-03-28 16:18:04 +02:00
committed by Commit Bot
parent 730560a65c
commit 4ea50c2b42
8 changed files with 815 additions and 33 deletions

View File

@ -0,0 +1,364 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <Foundation/Foundation.h>
#import <XCTest/XCTest.h>
#include "sdk/objc/Framework/Native/src/objc_video_track_source.h"
#import "Video/RTCI420Buffer+Private.h"
#import "WebRTC/RTCVideoFrame.h"
#import "WebRTC/RTCVideoFrameBuffer.h"
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "media/base/fakevideorenderer.h"
#include "rtc_base/refcountedobject.h"
#include "rtc_base/scoped_ref_ptr.h"
#include "sdk/objc/Framework/Native/api/video_frame.h"
#import "sdk/objc/Framework/UnitTests/frame_buffer_helpers.h"
typedef void (^VideoSinkCallback)(RTCVideoFrame *);
namespace {
class ObjCCallbackVideoSink : public rtc::VideoSinkInterface<webrtc::VideoFrame> {
public:
ObjCCallbackVideoSink(VideoSinkCallback callback) : callback_(callback) {}
virtual void OnFrame(const webrtc::VideoFrame &frame) {
callback_(NativeToObjCVideoFrame(frame));
}
private:
VideoSinkCallback callback_;
};
} // namespace
@interface ObjCVideoTrackSourceTests : XCTestCase
@end
@implementation ObjCVideoTrackSourceTests {
rtc::scoped_refptr<webrtc::ObjCVideoTrackSource> _video_source;
}
- (void)setUp {
_video_source = new rtc::RefCountedObject<webrtc::ObjCVideoTrackSource>();
}
- (void)tearDown {
_video_source = NULL;
}
- (void)testOnCapturedFrameAdaptsFrame {
CVPixelBufferRef pixelBufferRef = NULL;
CVPixelBufferCreate(
NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef];
RTCVideoFrame *frame =
[[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0];
cricket::FakeVideoRenderer *video_renderer = new cricket::FakeVideoRenderer();
const rtc::VideoSinkWants video_sink_wants;
rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface = _video_source;
video_source_interface->AddOrUpdateSink(video_renderer, video_sink_wants);
_video_source->OnOutputFormatRequest(640, 360, 30);
_video_source->OnCapturedFrame(frame);
XCTAssertEqual(video_renderer->num_rendered_frames(), 1);
XCTAssertEqual(video_renderer->width(), 360);
XCTAssertEqual(video_renderer->height(), 640);
CVBufferRelease(pixelBufferRef);
}
- (void)testOnCapturedFrameWithoutAdaptation {
CVPixelBufferRef pixelBufferRef = NULL;
CVPixelBufferCreate(
NULL, 360, 640, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef];
RTCVideoFrame *frame =
[[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0];
XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"];
ObjCCallbackVideoSink callback_video_sink(^void(RTCVideoFrame *outputFrame) {
XCTAssertEqual(frame.width, outputFrame.width);
XCTAssertEqual(frame.height, outputFrame.height);
RTCCVPixelBuffer *outputBuffer = outputFrame.buffer;
XCTAssertEqual(buffer.cropX, outputBuffer.cropX);
XCTAssertEqual(buffer.cropY, outputBuffer.cropY);
XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer);
[callbackExpectation fulfill];
});
const rtc::VideoSinkWants video_sink_wants;
rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface = _video_source;
video_source_interface->AddOrUpdateSink(&callback_video_sink, video_sink_wants);
_video_source->OnOutputFormatRequest(640, 360, 30);
_video_source->OnCapturedFrame(frame);
[self waitForExpectations:@[ callbackExpectation ] timeout:10.0];
CVBufferRelease(pixelBufferRef);
}
- (void)testOnCapturedFrameCVPixelBufferNeedsAdaptation {
CVPixelBufferRef pixelBufferRef = NULL;
CVPixelBufferCreate(
NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef];
RTCVideoFrame *frame =
[[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0];
XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"];
ObjCCallbackVideoSink callback_video_sink(^void(RTCVideoFrame *outputFrame) {
XCTAssertEqual(outputFrame.width, 360);
XCTAssertEqual(outputFrame.height, 640);
RTCCVPixelBuffer *outputBuffer = outputFrame.buffer;
XCTAssertEqual(outputBuffer.cropX, 0);
XCTAssertEqual(outputBuffer.cropY, 0);
XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer);
[callbackExpectation fulfill];
});
const rtc::VideoSinkWants video_sink_wants;
rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface = _video_source;
video_source_interface->AddOrUpdateSink(&callback_video_sink, video_sink_wants);
_video_source->OnOutputFormatRequest(640, 360, 30);
_video_source->OnCapturedFrame(frame);
[self waitForExpectations:@[ callbackExpectation ] timeout:10.0];
CVBufferRelease(pixelBufferRef);
}
- (void)testOnCapturedFrameCVPixelBufferNeedsCropping {
CVPixelBufferRef pixelBufferRef = NULL;
CVPixelBufferCreate(
NULL, 380, 640, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef];
RTCVideoFrame *frame =
[[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0];
XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"];
ObjCCallbackVideoSink callback_video_sink(^void(RTCVideoFrame *outputFrame) {
XCTAssertEqual(outputFrame.width, 360);
XCTAssertEqual(outputFrame.height, 640);
RTCCVPixelBuffer *outputBuffer = outputFrame.buffer;
XCTAssertEqual(outputBuffer.cropX, 10);
XCTAssertEqual(outputBuffer.cropY, 0);
XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer);
[callbackExpectation fulfill];
});
const rtc::VideoSinkWants video_sink_wants;
rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface = _video_source;
video_source_interface->AddOrUpdateSink(&callback_video_sink, video_sink_wants);
_video_source->OnOutputFormatRequest(640, 360, 30);
_video_source->OnCapturedFrame(frame);
[self waitForExpectations:@[ callbackExpectation ] timeout:10.0];
CVBufferRelease(pixelBufferRef);
}
- (void)testOnCapturedFramePreAdaptedCVPixelBufferNeedsAdaptation {
CVPixelBufferRef pixelBufferRef = NULL;
CVPixelBufferCreate(
NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef
adaptedWidth:700
adaptedHeight:700
cropWidth:720
cropHeight:1280
cropX:0
cropY:0];
RTCVideoFrame *frame =
[[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0];
XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"];
ObjCCallbackVideoSink callback_video_sink(^void(RTCVideoFrame *outputFrame) {
XCTAssertEqual(outputFrame.width, 297);
XCTAssertEqual(outputFrame.height, 525);
RTCCVPixelBuffer *outputBuffer = outputFrame.buffer;
XCTAssertEqual(outputBuffer.cropX, 152);
XCTAssertEqual(outputBuffer.cropY, 0);
XCTAssertEqual(outputBuffer.cropWidth, 396);
XCTAssertEqual(outputBuffer.cropHeight, 700);
XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer);
[callbackExpectation fulfill];
});
const rtc::VideoSinkWants video_sink_wants;
rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface = _video_source;
video_source_interface->AddOrUpdateSink(&callback_video_sink, video_sink_wants);
_video_source->OnOutputFormatRequest(640, 360, 30);
_video_source->OnCapturedFrame(frame);
[self waitForExpectations:@[ callbackExpectation ] timeout:10.0];
CVBufferRelease(pixelBufferRef);
}
- (void)testOnCapturedFramePreCroppedCVPixelBufferNeedsCropping {
CVPixelBufferRef pixelBufferRef = NULL;
CVPixelBufferCreate(
NULL, 380, 640, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef
adaptedWidth:370
adaptedHeight:640
cropWidth:370
cropHeight:640
cropX:10
cropY:0];
RTCVideoFrame *frame =
[[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0];
XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"];
ObjCCallbackVideoSink callback_video_sink(^void(RTCVideoFrame *outputFrame) {
XCTAssertEqual(outputFrame.width, 360);
XCTAssertEqual(outputFrame.height, 640);
RTCCVPixelBuffer *outputBuffer = outputFrame.buffer;
XCTAssertEqual(outputBuffer.cropX, 14);
XCTAssertEqual(outputBuffer.cropY, 0);
XCTAssertEqual(outputBuffer.cropWidth, 360);
XCTAssertEqual(outputBuffer.cropHeight, 640);
XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer);
[callbackExpectation fulfill];
});
const rtc::VideoSinkWants video_sink_wants;
rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface = _video_source;
video_source_interface->AddOrUpdateSink(&callback_video_sink, video_sink_wants);
_video_source->OnOutputFormatRequest(640, 360, 30);
_video_source->OnCapturedFrame(frame);
[self waitForExpectations:@[ callbackExpectation ] timeout:10.0];
CVBufferRelease(pixelBufferRef);
}
- (void)testOnCapturedFrameSmallerPreCroppedCVPixelBufferNeedsCropping {
CVPixelBufferRef pixelBufferRef = NULL;
CVPixelBufferCreate(
NULL, 380, 640, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef
adaptedWidth:300
adaptedHeight:640
cropWidth:300
cropHeight:640
cropX:40
cropY:0];
RTCVideoFrame *frame =
[[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0];
XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"];
ObjCCallbackVideoSink callback_video_sink(^void(RTCVideoFrame *outputFrame) {
XCTAssertEqual(outputFrame.width, 300);
XCTAssertEqual(outputFrame.height, 533);
RTCCVPixelBuffer *outputBuffer = outputFrame.buffer;
XCTAssertEqual(outputBuffer.cropX, 40);
XCTAssertEqual(outputBuffer.cropY, 52);
XCTAssertEqual(outputBuffer.cropWidth, 300);
XCTAssertEqual(outputBuffer.cropHeight, 533);
XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer);
[callbackExpectation fulfill];
});
const rtc::VideoSinkWants video_sink_wants;
rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface = _video_source;
video_source_interface->AddOrUpdateSink(&callback_video_sink, video_sink_wants);
_video_source->OnOutputFormatRequest(640, 360, 30);
_video_source->OnCapturedFrame(frame);
[self waitForExpectations:@[ callbackExpectation ] timeout:10.0];
CVBufferRelease(pixelBufferRef);
}
- (void)testOnCapturedFrameI420BufferNeedsAdaptation {
rtc::scoped_refptr<webrtc::I420Buffer> i420Buffer = CreateI420Gradient(720, 1280);
RTCI420Buffer *buffer = [[RTCI420Buffer alloc] initWithFrameBuffer:i420Buffer];
RTCVideoFrame *frame =
[[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0];
XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"];
ObjCCallbackVideoSink callback_video_sink(^void(RTCVideoFrame *outputFrame) {
XCTAssertEqual(outputFrame.width, 360);
XCTAssertEqual(outputFrame.height, 640);
RTCI420Buffer *outputBuffer = (RTCI420Buffer *)outputFrame.buffer;
double psnr = I420PSNR(*[buffer nativeI420Buffer], *[outputBuffer nativeI420Buffer]);
XCTAssertEqual(psnr, webrtc::kPerfectPSNR);
[callbackExpectation fulfill];
});
const rtc::VideoSinkWants video_sink_wants;
rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface = _video_source;
video_source_interface->AddOrUpdateSink(&callback_video_sink, video_sink_wants);
_video_source->OnOutputFormatRequest(640, 360, 30);
_video_source->OnCapturedFrame(frame);
[self waitForExpectations:@[ callbackExpectation ] timeout:10.0];
}
- (void)testOnCapturedFrameI420BufferNeedsCropping {
rtc::scoped_refptr<webrtc::I420Buffer> i420Buffer = CreateI420Gradient(380, 640);
RTCI420Buffer *buffer = [[RTCI420Buffer alloc] initWithFrameBuffer:i420Buffer];
RTCVideoFrame *frame =
[[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0];
XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"];
ObjCCallbackVideoSink callback_video_sink(^void(RTCVideoFrame *outputFrame) {
XCTAssertEqual(outputFrame.width, 360);
XCTAssertEqual(outputFrame.height, 640);
RTCI420Buffer *outputBuffer = (RTCI420Buffer *)outputFrame.buffer;
double psnr = I420PSNR(*[buffer nativeI420Buffer], *[outputBuffer nativeI420Buffer]);
XCTAssertGreaterThanOrEqual(psnr, 40);
[callbackExpectation fulfill];
});
const rtc::VideoSinkWants video_sink_wants;
rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface = _video_source;
video_source_interface->AddOrUpdateSink(&callback_video_sink, video_sink_wants);
_video_source->OnOutputFormatRequest(640, 360, 30);
_video_source->OnCapturedFrame(frame);
[self waitForExpectations:@[ callbackExpectation ] timeout:10.0];
}
@end

View File

@ -0,0 +1,227 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <Foundation/Foundation.h>
#import <XCTest/XCTest.h>
#import "Video/RTCI420Buffer+Private.h"
#import "WebRTC/RTCVideoFrame.h"
#import "WebRTC/RTCVideoFrameBuffer.h"
#include "common_video/libyuv/include/webrtc_libyuv.h"
#import "sdk/objc/Framework/UnitTests/frame_buffer_helpers.h"
#include "third_party/libyuv/include/libyuv.h"
@interface RTCCVPixelBufferTests : XCTestCase
@end
@implementation RTCCVPixelBufferTests {
}
- (void)testRequiresCroppingNoCrop {
CVPixelBufferRef pixelBufferRef = NULL;
CVPixelBufferCreate(
NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef];
XCTAssertFalse([buffer requiresCropping]);
CVBufferRelease(pixelBufferRef);
}
- (void)testRequiresCroppingWithCrop {
CVPixelBufferRef pixelBufferRef = NULL;
CVPixelBufferCreate(
NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
RTCCVPixelBuffer *croppedBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef
adaptedWidth:720
adaptedHeight:1280
cropWidth:360
cropHeight:640
cropX:100
cropY:100];
XCTAssertTrue([croppedBuffer requiresCropping]);
CVBufferRelease(pixelBufferRef);
}
- (void)testRequiresScalingNoScale {
CVPixelBufferRef pixelBufferRef = NULL;
CVPixelBufferCreate(
NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef];
XCTAssertFalse([buffer requiresScalingToWidth:720 height:1280]);
CVBufferRelease(pixelBufferRef);
}
- (void)testRequiresScalingWithScale {
CVPixelBufferRef pixelBufferRef = NULL;
CVPixelBufferCreate(
NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef];
XCTAssertTrue([buffer requiresScalingToWidth:360 height:640]);
CVBufferRelease(pixelBufferRef);
}
- (void)testRequiresScalingWithScaleAndMatchingCrop {
CVPixelBufferRef pixelBufferRef = NULL;
CVPixelBufferCreate(
NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef
adaptedWidth:720
adaptedHeight:1280
cropWidth:360
cropHeight:640
cropX:100
cropY:100];
XCTAssertFalse([buffer requiresScalingToWidth:360 height:640]);
CVBufferRelease(pixelBufferRef);
}
- (void)testBufferSize_NV12 {
CVPixelBufferRef pixelBufferRef = NULL;
CVPixelBufferCreate(
NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef];
XCTAssertEqual([buffer bufferSizeForCroppingAndScalingToWidth:360 height:640], 576000);
CVBufferRelease(pixelBufferRef);
}
- (void)testBufferSize_RGB {
CVPixelBufferRef pixelBufferRef = NULL;
CVPixelBufferCreate(NULL, 720, 1280, kCVPixelFormatType_32BGRA, NULL, &pixelBufferRef);
RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef];
XCTAssertEqual([buffer bufferSizeForCroppingAndScalingToWidth:360 height:640], 0);
CVBufferRelease(pixelBufferRef);
}
- (void)testCropAndScale_NV12 {
[self cropAndScaleTestWithNV12];
}
- (void)testCropAndScale_32BGRA {
[self cropAndScaleTestWithRGBPixelFormat:kCVPixelFormatType_32BGRA];
}
- (void)testCropAndScale_32ARGB {
[self cropAndScaleTestWithRGBPixelFormat:kCVPixelFormatType_32ARGB];
}
- (void)testToI420_NV12 {
[self toI420WithPixelFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange];
}
- (void)testToI420_32BGRA {
[self toI420WithPixelFormat:kCVPixelFormatType_32BGRA];
}
- (void)testToI420_32ARGB {
[self toI420WithPixelFormat:kCVPixelFormatType_32ARGB];
}
#pragma mark - Shared test code
- (void)cropAndScaleTestWithNV12 {
CVPixelBufferRef pixelBufferRef = NULL;
CVPixelBufferCreate(
NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
rtc::scoped_refptr<webrtc::I420Buffer> i420Buffer = CreateI420Gradient(720, 1280);
CopyI420BufferToCVPixelBuffer(i420Buffer, pixelBufferRef);
RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef];
XCTAssertEqual(buffer.width, 720);
XCTAssertEqual(buffer.height, 1280);
CVPixelBufferRef outputPixelBufferRef = NULL;
CVPixelBufferCreate(
NULL, 360, 640, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &outputPixelBufferRef);
std::vector<uint8_t> frameScaleBuffer;
int size = [buffer bufferSizeForCroppingAndScalingToWidth:360 height:640];
frameScaleBuffer.resize(size);
[buffer cropAndScaleTo:outputPixelBufferRef withTempBuffer:frameScaleBuffer.data()];
RTCCVPixelBuffer *scaledBuffer =
[[RTCCVPixelBuffer alloc] initWithPixelBuffer:outputPixelBufferRef];
XCTAssertEqual(scaledBuffer.width, 360);
XCTAssertEqual(scaledBuffer.height, 640);
RTCI420Buffer *originalBufferI420 = [buffer toI420];
RTCI420Buffer *scaledBufferI420 = [scaledBuffer toI420];
double psnr =
I420PSNR(*[originalBufferI420 nativeI420Buffer], *[scaledBufferI420 nativeI420Buffer]);
XCTAssertEqual(psnr, webrtc::kPerfectPSNR);
CVBufferRelease(pixelBufferRef);
}
- (void)cropAndScaleTestWithRGBPixelFormat:(OSType)pixelFormat {
CVPixelBufferRef pixelBufferRef = NULL;
CVPixelBufferCreate(NULL, 720, 1280, pixelFormat, NULL, &pixelBufferRef);
DrawGradientInRGBPixelBuffer(pixelBufferRef);
RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef];
XCTAssertEqual(buffer.width, 720);
XCTAssertEqual(buffer.height, 1280);
CVPixelBufferRef outputPixelBufferRef = NULL;
CVPixelBufferCreate(NULL, 360, 640, pixelFormat, NULL, &outputPixelBufferRef);
[buffer cropAndScaleTo:outputPixelBufferRef withTempBuffer:NULL];
RTCCVPixelBuffer *scaledBuffer =
[[RTCCVPixelBuffer alloc] initWithPixelBuffer:outputPixelBufferRef];
XCTAssertEqual(scaledBuffer.width, 360);
XCTAssertEqual(scaledBuffer.height, 640);
RTCI420Buffer *originalBufferI420 = [buffer toI420];
RTCI420Buffer *scaledBufferI420 = [scaledBuffer toI420];
double psnr =
I420PSNR(*[originalBufferI420 nativeI420Buffer], *[scaledBufferI420 nativeI420Buffer]);
XCTAssertEqual(psnr, webrtc::kPerfectPSNR);
CVBufferRelease(pixelBufferRef);
}
- (void)toI420WithPixelFormat:(OSType)pixelFormat {
rtc::scoped_refptr<webrtc::I420Buffer> i420Buffer = CreateI420Gradient(360, 640);
CVPixelBufferRef pixelBufferRef = NULL;
CVPixelBufferCreate(NULL, 360, 640, pixelFormat, NULL, &pixelBufferRef);
CopyI420BufferToCVPixelBuffer(i420Buffer, pixelBufferRef);
RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef];
RTCI420Buffer *fromCVPixelBuffer = [buffer toI420];
double psnr = I420PSNR(*i420Buffer, *[fromCVPixelBuffer nativeI420Buffer]);
double target = webrtc::kPerfectPSNR;
if (pixelFormat != kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange) {
// libyuv's I420ToRGB functions seem to lose some quality.
target = 19.0;
}
XCTAssertGreaterThanOrEqual(psnr, target);
CVBufferRelease(pixelBufferRef);
}
@end

View File

@ -0,0 +1,22 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <AVFoundation/AVFoundation.h>
#include "api/video/i420_buffer.h"
void DrawGradientInRGBPixelBuffer(CVPixelBufferRef pixelBuffer);
rtc::scoped_refptr<webrtc::I420Buffer> CreateI420Gradient(int width,
int height);
void CopyI420BufferToCVPixelBuffer(
rtc::scoped_refptr<webrtc::I420Buffer> i420Buffer,
CVPixelBufferRef pixelBuffer);

View File

@ -0,0 +1,122 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "sdk/objc/Framework/UnitTests/frame_buffer_helpers.h"
#include "third_party/libyuv/include/libyuv.h"
void DrawGradientInRGBPixelBuffer(CVPixelBufferRef pixelBuffer) {
CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
void* baseAddr = CVPixelBufferGetBaseAddress(pixelBuffer);
size_t width = CVPixelBufferGetWidth(pixelBuffer);
size_t height = CVPixelBufferGetHeight(pixelBuffer);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef cgContext = CGBitmapContextCreate(baseAddr,
width,
height,
8,
CVPixelBufferGetBytesPerRow(pixelBuffer),
colorSpace,
kCGImageAlphaNoneSkipLast);
// Create a gradient
CGFloat colors[] = {
1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0,
};
CGGradientRef gradient = CGGradientCreateWithColorComponents(colorSpace, colors, NULL, 4);
CGContextDrawLinearGradient(
cgContext, gradient, CGPointMake(0, 0), CGPointMake(width, height), 0);
CGGradientRelease(gradient);
CGImageRef cgImage = CGBitmapContextCreateImage(cgContext);
CGContextRelease(cgContext);
CGImageRelease(cgImage);
CGColorSpaceRelease(colorSpace);
CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
}
rtc::scoped_refptr<webrtc::I420Buffer> CreateI420Gradient(int width, int height) {
rtc::scoped_refptr<webrtc::I420Buffer> buffer(webrtc::I420Buffer::Create(width, height));
// Initialize with gradient, Y = 128(x/w + y/h), U = 256 x/w, V = 256 y/h
for (int x = 0; x < width; x++) {
for (int y = 0; y < height; y++) {
buffer->MutableDataY()[x + y * width] = 128 * (x * height + y * width) / (width * height);
}
}
int chroma_width = buffer->ChromaWidth();
int chroma_height = buffer->ChromaHeight();
for (int x = 0; x < chroma_width; x++) {
for (int y = 0; y < chroma_height; y++) {
buffer->MutableDataU()[x + y * chroma_width] = 255 * x / (chroma_width - 1);
buffer->MutableDataV()[x + y * chroma_width] = 255 * y / (chroma_height - 1);
}
}
return buffer;
}
void CopyI420BufferToCVPixelBuffer(rtc::scoped_refptr<webrtc::I420Buffer> i420Buffer,
CVPixelBufferRef pixelBuffer) {
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
const OSType pixelFormat = CVPixelBufferGetPixelFormatType(pixelBuffer);
if (pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange) {
// NV12
uint8_t* dstY = static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0));
const int dstYStride = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0);
uint8_t* dstUV = static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1));
const int dstUVStride = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1);
libyuv::I420ToNV12(i420Buffer->DataY(),
i420Buffer->StrideY(),
i420Buffer->DataU(),
i420Buffer->StrideU(),
i420Buffer->DataV(),
i420Buffer->StrideV(),
dstY,
dstYStride,
dstUV,
dstUVStride,
i420Buffer->width(),
i420Buffer->height());
} else {
uint8_t* dst = static_cast<uint8_t*>(CVPixelBufferGetBaseAddress(pixelBuffer));
const int bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
if (pixelFormat == kCVPixelFormatType_32BGRA) {
// Corresponds to libyuv::FOURCC_ARGB
libyuv::I420ToARGB(i420Buffer->DataY(),
i420Buffer->StrideY(),
i420Buffer->DataU(),
i420Buffer->StrideU(),
i420Buffer->DataV(),
i420Buffer->StrideV(),
dst,
bytesPerRow,
i420Buffer->width(),
i420Buffer->height());
} else if (pixelFormat == kCVPixelFormatType_32ARGB) {
// Corresponds to libyuv::FOURCC_BGRA
libyuv::I420ToBGRA(i420Buffer->DataY(),
i420Buffer->StrideY(),
i420Buffer->DataU(),
i420Buffer->StrideU(),
i420Buffer->DataV(),
i420Buffer->StrideV(),
dst,
bytesPerRow,
i420Buffer->width(),
i420Buffer->height());
}
}
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
}