Moving src/webrtc into src/.

In order to eliminate the WebRTC Subtree mirror in Chromium, 
WebRTC is moving the content of the src/webrtc directory up
to the src/ directory.

NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
TBR=tommi@webrtc.org

Bug: chromium:611808
Change-Id: Iac59c5b51b950f174119565bac87955a7994bc38
Reviewed-on: https://webrtc-review.googlesource.com/1560
Commit-Queue: Mirko Bonadei <mbonadei@webrtc.org>
Reviewed-by: Henrik Kjellander <kjellander@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#19845}
This commit is contained in:
Mirko Bonadei
2017-09-15 06:15:48 +02:00
committed by Commit Bot
parent 6674846b4a
commit bb547203bf
4576 changed files with 1092 additions and 1196 deletions

View File

@ -0,0 +1,338 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <Foundation/Foundation.h>
#import <OCMock/OCMock.h>
#include "webrtc/rtc_base/gunit.h"
#import "RTCAudioSession+Private.h"
#import "WebRTC/RTCAudioSession.h"
#import "WebRTC/RTCAudioSessionConfiguration.h"
@interface RTCAudioSessionTestDelegate : NSObject <RTCAudioSessionDelegate>
@property (nonatomic, readonly) float outputVolume;
@end
@implementation RTCAudioSessionTestDelegate
@synthesize outputVolume = _outputVolume;
- (instancetype)init {
if (self = [super init]) {
_outputVolume = -1;
}
return self;
}
- (void)audioSessionDidBeginInterruption:(RTCAudioSession *)session {
}
- (void)audioSessionDidEndInterruption:(RTCAudioSession *)session
shouldResumeSession:(BOOL)shouldResumeSession {
}
- (void)audioSessionDidChangeRoute:(RTCAudioSession *)session
reason:(AVAudioSessionRouteChangeReason)reason
previousRoute:(AVAudioSessionRouteDescription *)previousRoute {
}
- (void)audioSessionMediaServerTerminated:(RTCAudioSession *)session {
}
- (void)audioSessionMediaServerReset:(RTCAudioSession *)session {
}
- (void)audioSessionShouldConfigure:(RTCAudioSession *)session {
}
- (void)audioSessionShouldUnconfigure:(RTCAudioSession *)session {
}
- (void)audioSession:(RTCAudioSession *)audioSession
didChangeOutputVolume:(float)outputVolume {
_outputVolume = outputVolume;
}
@end
// A delegate that adds itself to the audio session on init and removes itself
// in its dealloc.
@interface RTCTestRemoveOnDeallocDelegate : RTCAudioSessionTestDelegate
@end
@implementation RTCTestRemoveOnDeallocDelegate
- (instancetype)init {
if (self = [super init]) {
RTCAudioSession *session = [RTCAudioSession sharedInstance];
[session addDelegate:self];
}
return self;
}
- (void)dealloc {
RTCAudioSession *session = [RTCAudioSession sharedInstance];
[session removeDelegate:self];
}
@end
@interface RTCAudioSessionTest : NSObject
- (void)testLockForConfiguration;
@end
@implementation RTCAudioSessionTest
- (void)testLockForConfiguration {
RTCAudioSession *session = [RTCAudioSession sharedInstance];
for (size_t i = 0; i < 2; i++) {
[session lockForConfiguration];
EXPECT_TRUE(session.isLocked);
}
for (size_t i = 0; i < 2; i++) {
EXPECT_TRUE(session.isLocked);
[session unlockForConfiguration];
}
EXPECT_FALSE(session.isLocked);
}
- (void)testAddAndRemoveDelegates {
RTCAudioSession *session = [RTCAudioSession sharedInstance];
NSMutableArray *delegates = [NSMutableArray array];
const size_t count = 5;
for (size_t i = 0; i < count; ++i) {
RTCAudioSessionTestDelegate *delegate =
[[RTCAudioSessionTestDelegate alloc] init];
[session addDelegate:delegate];
[delegates addObject:delegate];
EXPECT_EQ(i + 1, session.delegates.size());
}
[delegates enumerateObjectsUsingBlock:^(RTCAudioSessionTestDelegate *obj,
NSUInteger idx,
BOOL *stop) {
[session removeDelegate:obj];
}];
EXPECT_EQ(0u, session.delegates.size());
}
- (void)testPushDelegate {
RTCAudioSession *session = [RTCAudioSession sharedInstance];
NSMutableArray *delegates = [NSMutableArray array];
const size_t count = 2;
for (size_t i = 0; i < count; ++i) {
RTCAudioSessionTestDelegate *delegate =
[[RTCAudioSessionTestDelegate alloc] init];
[session addDelegate:delegate];
[delegates addObject:delegate];
}
// Test that it gets added to the front of the list.
RTCAudioSessionTestDelegate *pushedDelegate =
[[RTCAudioSessionTestDelegate alloc] init];
[session pushDelegate:pushedDelegate];
EXPECT_TRUE(pushedDelegate == session.delegates[0]);
// Test that it stays at the front of the list.
for (size_t i = 0; i < count; ++i) {
RTCAudioSessionTestDelegate *delegate =
[[RTCAudioSessionTestDelegate alloc] init];
[session addDelegate:delegate];
[delegates addObject:delegate];
}
EXPECT_TRUE(pushedDelegate == session.delegates[0]);
// Test that the next one goes to the front too.
pushedDelegate = [[RTCAudioSessionTestDelegate alloc] init];
[session pushDelegate:pushedDelegate];
EXPECT_TRUE(pushedDelegate == session.delegates[0]);
}
// Tests that delegates added to the audio session properly zero out. This is
// checking an implementation detail (that vectors of __weak work as expected).
- (void)testZeroingWeakDelegate {
RTCAudioSession *session = [RTCAudioSession sharedInstance];
@autoreleasepool {
// Add a delegate to the session. There should be one delegate at this
// point.
RTCAudioSessionTestDelegate *delegate =
[[RTCAudioSessionTestDelegate alloc] init];
[session addDelegate:delegate];
EXPECT_EQ(1u, session.delegates.size());
EXPECT_TRUE(session.delegates[0]);
}
// The previously created delegate should've de-alloced, leaving a nil ptr.
EXPECT_FALSE(session.delegates[0]);
RTCAudioSessionTestDelegate *delegate =
[[RTCAudioSessionTestDelegate alloc] init];
[session addDelegate:delegate];
// On adding a new delegate, nil ptrs should've been cleared.
EXPECT_EQ(1u, session.delegates.size());
EXPECT_TRUE(session.delegates[0]);
}
// Tests that we don't crash when removing delegates in dealloc.
// Added as a regression test.
- (void)testRemoveDelegateOnDealloc {
@autoreleasepool {
RTCTestRemoveOnDeallocDelegate *delegate =
[[RTCTestRemoveOnDeallocDelegate alloc] init];
EXPECT_TRUE(delegate);
}
RTCAudioSession *session = [RTCAudioSession sharedInstance];
EXPECT_EQ(0u, session.delegates.size());
}
- (void)testAudioSessionActivation {
RTCAudioSession *audioSession = [RTCAudioSession sharedInstance];
EXPECT_EQ(0, audioSession.activationCount);
[audioSession audioSessionDidActivate:[AVAudioSession sharedInstance]];
EXPECT_EQ(1, audioSession.activationCount);
[audioSession audioSessionDidDeactivate:[AVAudioSession sharedInstance]];
EXPECT_EQ(0, audioSession.activationCount);
}
// Hack - fixes OCMVerify link error
// Link error is: Undefined symbols for architecture i386:
// "OCMMakeLocation(objc_object*, char const*, int)", referenced from:
// -[RTCAudioSessionTest testConfigureWebRTCSession] in RTCAudioSessionTest.o
// ld: symbol(s) not found for architecture i386
// REASON: https://github.com/erikdoe/ocmock/issues/238
OCMLocation *OCMMakeLocation(id testCase, const char *fileCString, int line){
return [OCMLocation locationWithTestCase:testCase
file:[NSString stringWithUTF8String:fileCString]
line:line];
}
- (void)testConfigureWebRTCSession {
NSError *error = nil;
void (^setActiveBlock)(NSInvocation *invocation) = ^(NSInvocation *invocation) {
__autoreleasing NSError **retError;
[invocation getArgument:&retError atIndex:4];
*retError = [NSError errorWithDomain:@"AVAudioSession"
code:AVAudioSessionErrorInsufficientPriority
userInfo:nil];
BOOL failure = NO;
[invocation setReturnValue:&failure];
};
id mockAVAudioSession = OCMPartialMock([AVAudioSession sharedInstance]);
OCMStub([[mockAVAudioSession ignoringNonObjectArgs]
setActive:YES withOptions:0 error:((NSError __autoreleasing **)[OCMArg anyPointer])]).
andDo(setActiveBlock);
id mockAudioSession = OCMPartialMock([RTCAudioSession sharedInstance]);
OCMStub([mockAudioSession session]).andReturn(mockAVAudioSession);
RTCAudioSession *audioSession = mockAudioSession;
EXPECT_EQ(0, audioSession.activationCount);
[audioSession lockForConfiguration];
EXPECT_TRUE([audioSession checkLock:nil]);
// configureWebRTCSession is forced to fail in the above mock interface,
// so activationCount should remain 0
OCMExpect([[mockAVAudioSession ignoringNonObjectArgs]
setActive:YES withOptions:0 error:((NSError __autoreleasing **)[OCMArg anyPointer])]).
andDo(setActiveBlock);
OCMExpect([mockAudioSession session]).andReturn(mockAVAudioSession);
EXPECT_FALSE([audioSession configureWebRTCSession:&error]);
EXPECT_EQ(0, audioSession.activationCount);
id session = audioSession.session;
EXPECT_EQ(session, mockAVAudioSession);
EXPECT_EQ(NO, [mockAVAudioSession setActive:YES withOptions:0 error:&error]);
[audioSession unlockForConfiguration];
OCMVerify([mockAudioSession session]);
OCMVerify([[mockAVAudioSession ignoringNonObjectArgs] setActive:YES withOptions:0 error:&error]);
OCMVerify([[mockAVAudioSession ignoringNonObjectArgs] setActive:NO withOptions:0 error:&error]);
[mockAVAudioSession stopMocking];
[mockAudioSession stopMocking];
}
- (void)testAudioVolumeDidNotify {
RTCAudioSession *session = [RTCAudioSession sharedInstance];
RTCAudioSessionTestDelegate *delegate =
[[RTCAudioSessionTestDelegate alloc] init];
[session addDelegate:delegate];
[session observeValueForKeyPath:@"outputVolume"
ofObject:[AVAudioSession sharedInstance]
change:
@{NSKeyValueChangeNewKey :
@([AVAudioSession sharedInstance].outputVolume) }
context:nil];
EXPECT_NE(delegate.outputVolume, -1);
EXPECT_EQ([AVAudioSession sharedInstance].outputVolume, delegate.outputVolume);
}
@end
namespace webrtc {
class AudioSessionTest : public ::testing::Test {
protected:
void TearDown() {
RTCAudioSession *session = [RTCAudioSession sharedInstance];
for (id<RTCAudioSessionDelegate> delegate : session.delegates) {
[session removeDelegate:delegate];
}
}
};
TEST_F(AudioSessionTest, LockForConfiguration) {
RTCAudioSessionTest *test = [[RTCAudioSessionTest alloc] init];
[test testLockForConfiguration];
}
TEST_F(AudioSessionTest, AddAndRemoveDelegates) {
RTCAudioSessionTest *test = [[RTCAudioSessionTest alloc] init];
[test testAddAndRemoveDelegates];
}
TEST_F(AudioSessionTest, PushDelegate) {
RTCAudioSessionTest *test = [[RTCAudioSessionTest alloc] init];
[test testPushDelegate];
}
TEST_F(AudioSessionTest, ZeroingWeakDelegate) {
RTCAudioSessionTest *test = [[RTCAudioSessionTest alloc] init];
[test testZeroingWeakDelegate];
}
TEST_F(AudioSessionTest, RemoveDelegateOnDealloc) {
RTCAudioSessionTest *test = [[RTCAudioSessionTest alloc] init];
[test testRemoveDelegateOnDealloc];
}
TEST_F(AudioSessionTest, AudioSessionActivation) {
RTCAudioSessionTest *test = [[RTCAudioSessionTest alloc] init];
[test testAudioSessionActivation];
}
TEST_F(AudioSessionTest, ConfigureWebRTCSession) {
RTCAudioSessionTest *test = [[RTCAudioSessionTest alloc] init];
[test testConfigureWebRTCSession];
}
TEST_F(AudioSessionTest, AudioVolumeDidNotify) {
RTCAudioSessionTest *test = [[RTCAudioSessionTest alloc] init];
[test testAudioVolumeDidNotify];
}
} // namespace webrtc

View File

@ -0,0 +1,448 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <OCMock/OCMock.h>
#if TARGET_OS_IPHONE
#import <UIKit/UIKit.h>
#endif
#include "webrtc/rtc_base/gunit.h"
#import <WebRTC/RTCCameraVideoCapturer.h>
#import <WebRTC/RTCDispatcher.h>
#import <WebRTC/RTCVideoFrame.h>
#import "AVCaptureSession+DevicePosition.h"
#if TARGET_OS_IPHONE
// Helper method.
CMSampleBufferRef createTestSampleBufferRef() {
// This image is already in the testing bundle.
UIImage *image = [UIImage imageNamed:@"Default.png"];
CGSize size = image.size;
CGImageRef imageRef = [image CGImage];
CVPixelBufferRef pixelBuffer = nullptr;
CVPixelBufferCreate(kCFAllocatorDefault, size.width, size.height, kCVPixelFormatType_32ARGB, nil,
&pixelBuffer);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
// We don't care about bitsPerComponent and bytesPerRow so arbitrary value of 8 for both.
CGContextRef context = CGBitmapContextCreate(nil, size.width, size.height, 8, 8 * size.width,
rgbColorSpace, kCGImageAlphaPremultipliedFirst);
CGContextDrawImage(
context, CGRectMake(0, 0, CGImageGetWidth(imageRef), CGImageGetHeight(imageRef)), imageRef);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
// We don't really care about the timing.
CMSampleTimingInfo timing = {kCMTimeInvalid, kCMTimeInvalid, kCMTimeInvalid};
CMVideoFormatDescriptionRef description = nullptr;
CMVideoFormatDescriptionCreateForImageBuffer(NULL, pixelBuffer, &description);
CMSampleBufferRef sampleBuffer = nullptr;
CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, YES, NULL, NULL, description,
&timing, &sampleBuffer);
CFRelease(pixelBuffer);
return sampleBuffer;
}
#endif
@interface RTCCameraVideoCapturer (Tests)<AVCaptureVideoDataOutputSampleBufferDelegate>
@end
@interface RTCCameraVideoCapturerTests : NSObject
@property(nonatomic, strong) id delegateMock;
@property(nonatomic, strong) id deviceMock;
@property(nonatomic, strong) id captureConnectionMock;
@property(nonatomic, strong) RTCCameraVideoCapturer *capturer;
@end
@implementation RTCCameraVideoCapturerTests
@synthesize delegateMock = _delegateMock;
@synthesize captureConnectionMock = _captureConnectionMock;
@synthesize capturer = _capturer;
@synthesize deviceMock = _deviceMock;
- (void)setup {
self.delegateMock = OCMProtocolMock(@protocol(RTCVideoCapturerDelegate));
self.captureConnectionMock = OCMClassMock([AVCaptureConnection class]);
self.capturer = [[RTCCameraVideoCapturer alloc] initWithDelegate:self.delegateMock];
self.deviceMock = [self createDeviceMock];
}
- (void)tearDown {
[self.delegateMock stopMocking];
[self.deviceMock stopMocking];
self.delegateMock = nil;
self.deviceMock = nil;
self.capturer = nil;
}
#pragma mark - utils
- (id)createDeviceMock {
return OCMClassMock([AVCaptureDevice class]);
}
#pragma mark - test cases
- (void)testSetupSession {
AVCaptureSession *session = self.capturer.captureSession;
EXPECT_TRUE(session != nil);
#if TARGET_OS_IPHONE
EXPECT_EQ(session.sessionPreset, AVCaptureSessionPresetInputPriority);
EXPECT_EQ(session.usesApplicationAudioSession, NO);
#endif
EXPECT_EQ(session.outputs.count, 1u);
}
- (void)testSetupSessionOutput {
AVCaptureVideoDataOutput *videoOutput = self.capturer.captureSession.outputs[0];
EXPECT_EQ(videoOutput.alwaysDiscardsLateVideoFrames, NO);
EXPECT_EQ(videoOutput.sampleBufferDelegate, self.capturer);
}
- (void)testSupportedFormatsForDevice {
// given
id validFormat1 = OCMClassMock([AVCaptureDeviceFormat class]);
CMVideoFormatDescriptionRef format;
// We don't care about width and heigth so arbitrary 123 and 456 values.
int width = 123;
int height = 456;
CMVideoFormatDescriptionCreate(nil, kCVPixelFormatType_420YpCbCr8PlanarFullRange, width, height,
nil, &format);
OCMStub([validFormat1 formatDescription]).andReturn(format);
id validFormat2 = OCMClassMock([AVCaptureDeviceFormat class]);
CMVideoFormatDescriptionCreate(nil, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, width,
height, nil, &format);
OCMStub([validFormat2 formatDescription]).andReturn(format);
id invalidFormat = OCMClassMock([AVCaptureDeviceFormat class]);
CMVideoFormatDescriptionCreate(nil, kCVPixelFormatType_422YpCbCr8_yuvs, width, height, nil,
&format);
OCMStub([invalidFormat formatDescription]).andReturn(format);
NSArray *formats = @[ validFormat1, validFormat2, invalidFormat ];
OCMStub([self.deviceMock formats]).andReturn(formats);
// when
NSArray *supportedFormats = [RTCCameraVideoCapturer supportedFormatsForDevice:self.deviceMock];
// then
EXPECT_EQ(supportedFormats.count, 2u);
EXPECT_TRUE([supportedFormats containsObject:validFormat1]);
EXPECT_TRUE([supportedFormats containsObject:validFormat2]);
// cleanup
[validFormat1 stopMocking];
[validFormat2 stopMocking];
[invalidFormat stopMocking];
validFormat1 = nil;
validFormat2 = nil;
invalidFormat = nil;
}
- (void)testCaptureDevices {
OCMStub([self.deviceMock devicesWithMediaType:AVMediaTypeVideo]).andReturn(@[ [NSObject new] ]);
OCMStub([self.deviceMock devicesWithMediaType:AVMediaTypeAudio]).andReturn(@[ [NSObject new] ]);
NSArray *captureDevices = [RTCCameraVideoCapturer captureDevices];
EXPECT_EQ(captureDevices.count, 1u);
}
- (void)testDelegateCallbackNotCalledWhenInvalidBuffer {
// given
CMSampleBufferRef sampleBuffer = nullptr;
[[self.delegateMock reject] capturer:[OCMArg any] didCaptureVideoFrame:[OCMArg any]];
// when
[self.capturer captureOutput:self.capturer.captureSession.outputs[0]
didOutputSampleBuffer:sampleBuffer
fromConnection:self.captureConnectionMock];
// then
[self.delegateMock verify];
}
- (void)testDelegateCallbackWithValidBufferAndOrientationUpdate {
#if TARGET_OS_IPHONE
// given
UIDevice *currentDeviceMock = OCMClassMock([UIDevice class]);
// UpsideDown -> RTCVideoRotation_270.
OCMStub(currentDeviceMock.orientation).andReturn(UIDeviceOrientationPortraitUpsideDown);
id classMock = OCMClassMock([UIDevice class]);
OCMStub([classMock currentDevice]).andReturn(currentDeviceMock);
CMSampleBufferRef sampleBuffer = createTestSampleBufferRef();
// then
[[self.delegateMock expect] capturer:self.capturer
didCaptureVideoFrame:[OCMArg checkWithBlock:^BOOL(RTCVideoFrame *expectedFrame) {
EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_270);
return YES;
}]];
// when
NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
[center postNotificationName:UIDeviceOrientationDidChangeNotification object:nil];
// We need to wait for the dispatch to finish.
WAIT(0, 1000);
[self.capturer captureOutput:self.capturer.captureSession.outputs[0]
didOutputSampleBuffer:sampleBuffer
fromConnection:self.captureConnectionMock];
[self.delegateMock verify];
[(id)currentDeviceMock stopMocking];
currentDeviceMock = nil;
[classMock stopMocking];
classMock = nil;
CFRelease(sampleBuffer);
#endif
}
- (void)testRotationCamera:(AVCaptureDevicePosition)camera
withOrientation:(UIDeviceOrientation)deviceOrientation {
#if TARGET_OS_IPHONE
// Mock the AVCaptureConnection as we will get the camera position from the connection's
// input ports.
AVCaptureDeviceInput *inputPortMock = OCMClassMock([AVCaptureDeviceInput class]);
AVCaptureInputPort *captureInputPort = OCMClassMock([AVCaptureInputPort class]);
NSArray *inputPortsArrayMock = @[captureInputPort];
AVCaptureDevice *captureDeviceMock = OCMClassMock([AVCaptureDevice class]);
OCMStub(((AVCaptureConnection *)self.captureConnectionMock).inputPorts).
andReturn(inputPortsArrayMock);
OCMStub(captureInputPort.input).andReturn(inputPortMock);
OCMStub(inputPortMock.device).andReturn(captureDeviceMock);
OCMStub(captureDeviceMock.position).andReturn(camera);
// UpsideDown -> RTCVideoRotation_0.
UIDevice *currentDeviceMock = OCMClassMock([UIDevice class]);
OCMStub(currentDeviceMock.orientation).andReturn(deviceOrientation);
id classMock = OCMClassMock([UIDevice class]);
OCMStub([classMock currentDevice]).andReturn(currentDeviceMock);
CMSampleBufferRef sampleBuffer = createTestSampleBufferRef();
[[self.delegateMock expect] capturer:self.capturer
didCaptureVideoFrame:[OCMArg checkWithBlock:^BOOL(RTCVideoFrame *expectedFrame) {
if (camera == AVCaptureDevicePositionFront) {
if (deviceOrientation == UIDeviceOrientationLandscapeLeft) {
EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_180);
} else if (deviceOrientation == UIDeviceOrientationLandscapeRight) {
EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_0);
}
} else if (camera == AVCaptureDevicePositionBack) {
if (deviceOrientation == UIDeviceOrientationLandscapeLeft) {
EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_0);
} else if (deviceOrientation == UIDeviceOrientationLandscapeRight) {
EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_180);
}
}
return YES;
}]];
NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
[center postNotificationName:UIDeviceOrientationDidChangeNotification object:nil];
// We need to wait for the dispatch to finish.
WAIT(0, 1000);
[self.capturer captureOutput:self.capturer.captureSession.outputs[0]
didOutputSampleBuffer:sampleBuffer
fromConnection:self.captureConnectionMock];
[self.delegateMock verify];
[(id)currentDeviceMock stopMocking];
currentDeviceMock = nil;
[classMock stopMocking];
classMock = nil;
CFRelease(sampleBuffer);
#endif
}
- (void)setExif:(CMSampleBufferRef)sampleBuffer {
CFMutableDictionaryRef exif = CFDictionaryCreateMutable(kCFAllocatorDefault, 0, NULL, NULL);
CFDictionarySetValue(exif, CFSTR("LensModel"), CFSTR("iPhone SE back camera 4.15mm f/2.2"));
CMSetAttachment(sampleBuffer, CFSTR("{Exif}"), exif, kCMAttachmentMode_ShouldPropagate);
}
- (void)testRotationFrame {
#if TARGET_OS_IPHONE
// Mock the AVCaptureConnection as we will get the camera position from the connection's
// input ports.
AVCaptureDeviceInput *inputPortMock = OCMClassMock([AVCaptureDeviceInput class]);
AVCaptureInputPort *captureInputPort = OCMClassMock([AVCaptureInputPort class]);
NSArray *inputPortsArrayMock = @[captureInputPort];
AVCaptureDevice *captureDeviceMock = OCMClassMock([AVCaptureDevice class]);
OCMStub(((AVCaptureConnection *)self.captureConnectionMock).inputPorts).
andReturn(inputPortsArrayMock);
OCMStub(captureInputPort.input).andReturn(inputPortMock);
OCMStub(inputPortMock.device).andReturn(captureDeviceMock);
OCMStub(captureDeviceMock.position).andReturn(AVCaptureDevicePositionFront);
// UpsideDown -> RTCVideoRotation_0.
UIDevice *currentDeviceMock = OCMClassMock([UIDevice class]);
OCMStub(currentDeviceMock.orientation).andReturn(UIDeviceOrientationLandscapeLeft);
id classMock = OCMClassMock([UIDevice class]);
OCMStub([classMock currentDevice]).andReturn(currentDeviceMock);
CMSampleBufferRef sampleBuffer = createTestSampleBufferRef();
[[self.delegateMock expect] capturer:self.capturer
didCaptureVideoFrame:[OCMArg checkWithBlock:^BOOL(RTCVideoFrame *expectedFrame) {
// Front camera and landscape left should return 180. But the frame says its from the back
// camera, so rotation should be 0.
EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_0);
return YES;
}]];
NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
[center postNotificationName:UIDeviceOrientationDidChangeNotification object:nil];
// We need to wait for the dispatch to finish.
WAIT(0, 1000);
[self setExif:sampleBuffer];
[self.capturer captureOutput:self.capturer.captureSession.outputs[0]
didOutputSampleBuffer:sampleBuffer
fromConnection:self.captureConnectionMock];
[self.delegateMock verify];
[(id)currentDeviceMock stopMocking];
currentDeviceMock = nil;
[classMock stopMocking];
classMock = nil;
CFRelease(sampleBuffer);
#endif
}
- (void)testImageExif {
#if TARGET_OS_IPHONE
CMSampleBufferRef sampleBuffer = createTestSampleBufferRef();
[self setExif:sampleBuffer];
AVCaptureDevicePosition cameraPosition = [AVCaptureSession
devicePositionForSampleBuffer:sampleBuffer];
EXPECT_EQ(cameraPosition, AVCaptureDevicePositionBack);
#endif
}
@end
// TODO(kthelgason): Reenable these tests on simulator.
// See bugs.webrtc.org/7813
#if TARGET_IPHONE_SIMULATOR
#define MAYBE_TEST(f, name) TEST(f, DISABLED_##name)
#else
#define MAYBE_TEST TEST
#endif
MAYBE_TEST(RTCCameraVideoCapturerTests, SetupSession) {
RTCCameraVideoCapturerTests *test = [[RTCCameraVideoCapturerTests alloc] init];
[test setup];
[test testSetupSession];
[test tearDown];
}
MAYBE_TEST(RTCCameraVideoCapturerTests, SetupSessionOutput) {
RTCCameraVideoCapturerTests *test = [[RTCCameraVideoCapturerTests alloc] init];
[test setup];
[test testSetupSessionOutput];
[test tearDown];
}
MAYBE_TEST(RTCCameraVideoCapturerTests, SupportedFormatsForDevice) {
RTCCameraVideoCapturerTests *test = [[RTCCameraVideoCapturerTests alloc] init];
[test setup];
[test testSupportedFormatsForDevice];
[test tearDown];
}
MAYBE_TEST(RTCCameraVideoCapturerTests, CaptureDevices) {
RTCCameraVideoCapturerTests *test = [[RTCCameraVideoCapturerTests alloc] init];
[test setup];
[test testCaptureDevices];
[test tearDown];
}
MAYBE_TEST(RTCCameraVideoCapturerTests, DelegateCallbackNotCalledWhenInvalidBuffer) {
RTCCameraVideoCapturerTests *test = [[RTCCameraVideoCapturerTests alloc] init];
[test setup];
[test testDelegateCallbackNotCalledWhenInvalidBuffer];
[test tearDown];
}
MAYBE_TEST(RTCCameraVideoCapturerTests, DelegateCallbackWithValidBufferAndOrientationUpdate) {
RTCCameraVideoCapturerTests *test = [[RTCCameraVideoCapturerTests alloc] init];
[test setup];
[test testDelegateCallbackWithValidBufferAndOrientationUpdate];
[test tearDown];
}
MAYBE_TEST(RTCCameraVideoCapturerTests, RotationCameraBackLandscapeLeft) {
RTCCameraVideoCapturerTests *test = [[RTCCameraVideoCapturerTests alloc] init];
[test setup];
[test testRotationCamera:AVCaptureDevicePositionBack
withOrientation:UIDeviceOrientationLandscapeLeft];
[test tearDown];
}
MAYBE_TEST(RTCCameraVideoCapturerTests, RotationCameraFrontLandscapeLeft) {
RTCCameraVideoCapturerTests *test = [[RTCCameraVideoCapturerTests alloc] init];
[test setup];
[test testRotationCamera:AVCaptureDevicePositionFront
withOrientation:UIDeviceOrientationLandscapeLeft];
[test tearDown];
}
MAYBE_TEST(RTCCameraVideoCapturerTests, RotationCameraBackLandscapeRight) {
RTCCameraVideoCapturerTests *test = [[RTCCameraVideoCapturerTests alloc] init];
[test setup];
[test testRotationCamera:AVCaptureDevicePositionBack
withOrientation:UIDeviceOrientationLandscapeRight];
[test tearDown];
}
MAYBE_TEST(RTCCameraVideoCapturerTests, RotationCameraFrontLandscapeRight) {
RTCCameraVideoCapturerTests *test = [[RTCCameraVideoCapturerTests alloc] init];
[test setup];
[test testRotationCamera:AVCaptureDevicePositionFront
withOrientation:UIDeviceOrientationLandscapeRight];
[test tearDown];
}
MAYBE_TEST(RTCCameraVideoCapturerTests, RotationCameraFrame) {
RTCCameraVideoCapturerTests *test = [[RTCCameraVideoCapturerTests alloc] init];
[test setup];
[test testRotationFrame];
[test tearDown];
}
MAYBE_TEST(RTCCameraVideoCapturerTests, ImageExif) {
RTCCameraVideoCapturerTests *test = [[RTCCameraVideoCapturerTests alloc] init];
[test setup];
[test testImageExif];
[test tearDown];
}

View File

@ -0,0 +1,144 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <Foundation/Foundation.h>
#include <vector>
#include "webrtc/rtc_base/gunit.h"
#import "NSString+StdString.h"
#import "RTCConfiguration+Private.h"
#import "WebRTC/RTCConfiguration.h"
#import "WebRTC/RTCIceServer.h"
#import "WebRTC/RTCIntervalRange.h"
@interface RTCConfigurationTest : NSObject
- (void)testConversionToNativeConfiguration;
- (void)testNativeConversionToConfiguration;
@end
@implementation RTCConfigurationTest
- (void)testConversionToNativeConfiguration {
NSArray *urlStrings = @[ @"stun:stun1.example.net" ];
RTCIceServer *server = [[RTCIceServer alloc] initWithURLStrings:urlStrings];
RTCIntervalRange *range = [[RTCIntervalRange alloc] initWithMin:0 max:100];
RTCConfiguration *config = [[RTCConfiguration alloc] init];
config.iceServers = @[ server ];
config.iceTransportPolicy = RTCIceTransportPolicyRelay;
config.bundlePolicy = RTCBundlePolicyMaxBundle;
config.rtcpMuxPolicy = RTCRtcpMuxPolicyNegotiate;
config.tcpCandidatePolicy = RTCTcpCandidatePolicyDisabled;
config.candidateNetworkPolicy = RTCCandidateNetworkPolicyLowCost;
const int maxPackets = 60;
const int timeout = 1;
const int interval = 2;
config.audioJitterBufferMaxPackets = maxPackets;
config.audioJitterBufferFastAccelerate = YES;
config.iceConnectionReceivingTimeout = timeout;
config.iceBackupCandidatePairPingInterval = interval;
config.continualGatheringPolicy =
RTCContinualGatheringPolicyGatherContinually;
config.shouldPruneTurnPorts = YES;
config.iceRegatherIntervalRange = range;
std::unique_ptr<webrtc::PeerConnectionInterface::RTCConfiguration>
nativeConfig([config createNativeConfiguration]);
EXPECT_TRUE(nativeConfig.get());
EXPECT_EQ(1u, nativeConfig->servers.size());
webrtc::PeerConnectionInterface::IceServer nativeServer =
nativeConfig->servers.front();
EXPECT_EQ(1u, nativeServer.urls.size());
EXPECT_EQ("stun:stun1.example.net", nativeServer.urls.front());
EXPECT_EQ(webrtc::PeerConnectionInterface::kRelay, nativeConfig->type);
EXPECT_EQ(webrtc::PeerConnectionInterface::kBundlePolicyMaxBundle,
nativeConfig->bundle_policy);
EXPECT_EQ(webrtc::PeerConnectionInterface::kRtcpMuxPolicyNegotiate,
nativeConfig->rtcp_mux_policy);
EXPECT_EQ(webrtc::PeerConnectionInterface::kTcpCandidatePolicyDisabled,
nativeConfig->tcp_candidate_policy);
EXPECT_EQ(webrtc::PeerConnectionInterface::kCandidateNetworkPolicyLowCost,
nativeConfig->candidate_network_policy);
EXPECT_EQ(maxPackets, nativeConfig->audio_jitter_buffer_max_packets);
EXPECT_EQ(true, nativeConfig->audio_jitter_buffer_fast_accelerate);
EXPECT_EQ(timeout, nativeConfig->ice_connection_receiving_timeout);
EXPECT_EQ(interval, nativeConfig->ice_backup_candidate_pair_ping_interval);
EXPECT_EQ(webrtc::PeerConnectionInterface::GATHER_CONTINUALLY,
nativeConfig->continual_gathering_policy);
EXPECT_EQ(true, nativeConfig->prune_turn_ports);
EXPECT_EQ(range.min, nativeConfig->ice_regather_interval_range->min());
EXPECT_EQ(range.max, nativeConfig->ice_regather_interval_range->max());
}
- (void)testNativeConversionToConfiguration {
NSArray *urlStrings = @[ @"stun:stun1.example.net" ];
RTCIceServer *server = [[RTCIceServer alloc] initWithURLStrings:urlStrings];
RTCIntervalRange *range = [[RTCIntervalRange alloc] initWithMin:0 max:100];
RTCConfiguration *config = [[RTCConfiguration alloc] init];
config.iceServers = @[ server ];
config.iceTransportPolicy = RTCIceTransportPolicyRelay;
config.bundlePolicy = RTCBundlePolicyMaxBundle;
config.rtcpMuxPolicy = RTCRtcpMuxPolicyNegotiate;
config.tcpCandidatePolicy = RTCTcpCandidatePolicyDisabled;
config.candidateNetworkPolicy = RTCCandidateNetworkPolicyLowCost;
const int maxPackets = 60;
const int timeout = 1;
const int interval = 2;
config.audioJitterBufferMaxPackets = maxPackets;
config.audioJitterBufferFastAccelerate = YES;
config.iceConnectionReceivingTimeout = timeout;
config.iceBackupCandidatePairPingInterval = interval;
config.continualGatheringPolicy =
RTCContinualGatheringPolicyGatherContinually;
config.shouldPruneTurnPorts = YES;
config.iceRegatherIntervalRange = range;
webrtc::PeerConnectionInterface::RTCConfiguration *nativeConfig =
[config createNativeConfiguration];
RTCConfiguration *newConfig = [[RTCConfiguration alloc]
initWithNativeConfiguration:*nativeConfig];
EXPECT_EQ([config.iceServers count], newConfig.iceServers.count);
RTCIceServer *newServer = newConfig.iceServers[0];
RTCIceServer *origServer = config.iceServers[0];
EXPECT_EQ(origServer.urlStrings.count, server.urlStrings.count);
std::string origUrl = origServer.urlStrings.firstObject.UTF8String;
std::string url = newServer.urlStrings.firstObject.UTF8String;
EXPECT_EQ(origUrl, url);
EXPECT_EQ(config.iceTransportPolicy, newConfig.iceTransportPolicy);
EXPECT_EQ(config.bundlePolicy, newConfig.bundlePolicy);
EXPECT_EQ(config.rtcpMuxPolicy, newConfig.rtcpMuxPolicy);
EXPECT_EQ(config.tcpCandidatePolicy, newConfig.tcpCandidatePolicy);
EXPECT_EQ(config.candidateNetworkPolicy, newConfig.candidateNetworkPolicy);
EXPECT_EQ(config.audioJitterBufferMaxPackets, newConfig.audioJitterBufferMaxPackets);
EXPECT_EQ(config.audioJitterBufferFastAccelerate, newConfig.audioJitterBufferFastAccelerate);
EXPECT_EQ(config.iceConnectionReceivingTimeout, newConfig.iceConnectionReceivingTimeout);
EXPECT_EQ(config.iceBackupCandidatePairPingInterval,
newConfig.iceBackupCandidatePairPingInterval);
EXPECT_EQ(config.continualGatheringPolicy, newConfig.continualGatheringPolicy);
EXPECT_EQ(config.shouldPruneTurnPorts, newConfig.shouldPruneTurnPorts);
EXPECT_EQ(config.iceRegatherIntervalRange.min, newConfig.iceRegatherIntervalRange.min);
EXPECT_EQ(config.iceRegatherIntervalRange.max, newConfig.iceRegatherIntervalRange.max);
}
@end
TEST(RTCConfigurationTest, NativeConfigurationConversionTest) {
@autoreleasepool {
RTCConfigurationTest *test = [[RTCConfigurationTest alloc] init];
[test testConversionToNativeConfiguration];
[test testNativeConversionToConfiguration];
}
}

View File

@ -0,0 +1,59 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <Foundation/Foundation.h>
#include "webrtc/rtc_base/gunit.h"
#import "NSString+StdString.h"
#import "RTCDataChannelConfiguration+Private.h"
#import "WebRTC/RTCDataChannelConfiguration.h"
@interface RTCDataChannelConfigurationTest : NSObject
- (void)testConversionToNativeDataChannelInit;
@end
@implementation RTCDataChannelConfigurationTest
- (void)testConversionToNativeDataChannelInit {
BOOL isOrdered = NO;
int maxPacketLifeTime = 5;
int maxRetransmits = 4;
BOOL isNegotiated = YES;
int channelId = 4;
NSString *protocol = @"protocol";
RTCDataChannelConfiguration *dataChannelConfig =
[[RTCDataChannelConfiguration alloc] init];
dataChannelConfig.isOrdered = isOrdered;
dataChannelConfig.maxPacketLifeTime = maxPacketLifeTime;
dataChannelConfig.maxRetransmits = maxRetransmits;
dataChannelConfig.isNegotiated = isNegotiated;
dataChannelConfig.channelId = channelId;
dataChannelConfig.protocol = protocol;
webrtc::DataChannelInit nativeInit = dataChannelConfig.nativeDataChannelInit;
EXPECT_EQ(isOrdered, nativeInit.ordered);
EXPECT_EQ(maxPacketLifeTime, nativeInit.maxRetransmitTime);
EXPECT_EQ(maxRetransmits, nativeInit.maxRetransmits);
EXPECT_EQ(isNegotiated, nativeInit.negotiated);
EXPECT_EQ(channelId, nativeInit.id);
EXPECT_EQ(protocol.stdString, nativeInit.protocol);
}
@end
TEST(RTCDataChannelConfiguration, NativeDataChannelInitConversionTest) {
@autoreleasepool {
RTCDataChannelConfigurationTest *test =
[[RTCDataChannelConfigurationTest alloc] init];
[test testConversionToNativeDataChannelInit];
}
}

View File

@ -0,0 +1,76 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <Foundation/Foundation.h>
#include <memory>
#include "webrtc/rtc_base/gunit.h"
#import "NSString+StdString.h"
#import "RTCIceCandidate+Private.h"
#import "WebRTC/RTCIceCandidate.h"
@interface RTCIceCandidateTest : NSObject
- (void)testCandidate;
- (void)testInitFromNativeCandidate;
@end
@implementation RTCIceCandidateTest
- (void)testCandidate {
NSString *sdp = @"candidate:4025901590 1 udp 2122265343 "
"fdff:2642:12a6:fe38:c001:beda:fcf9:51aa "
"59052 typ host generation 0";
RTCIceCandidate *candidate = [[RTCIceCandidate alloc] initWithSdp:sdp
sdpMLineIndex:0
sdpMid:@"audio"];
std::unique_ptr<webrtc::IceCandidateInterface> nativeCandidate =
candidate.nativeCandidate;
EXPECT_EQ("audio", nativeCandidate->sdp_mid());
EXPECT_EQ(0, nativeCandidate->sdp_mline_index());
std::string sdpString;
nativeCandidate->ToString(&sdpString);
EXPECT_EQ(sdp.stdString, sdpString);
}
- (void)testInitFromNativeCandidate {
std::string sdp("candidate:4025901590 1 udp 2122265343 "
"fdff:2642:12a6:fe38:c001:beda:fcf9:51aa "
"59052 typ host generation 0");
webrtc::IceCandidateInterface *nativeCandidate =
webrtc::CreateIceCandidate("audio", 0, sdp, nullptr);
RTCIceCandidate *iceCandidate =
[[RTCIceCandidate alloc] initWithNativeCandidate:nativeCandidate];
EXPECT_TRUE([@"audio" isEqualToString:iceCandidate.sdpMid]);
EXPECT_EQ(0, iceCandidate.sdpMLineIndex);
EXPECT_EQ(sdp, iceCandidate.sdp.stdString);
}
@end
TEST(RTCIceCandidateTest, CandidateTest) {
@autoreleasepool {
RTCIceCandidateTest *test = [[RTCIceCandidateTest alloc] init];
[test testCandidate];
}
}
TEST(RTCIceCandidateTest, InitFromCandidateTest) {
@autoreleasepool {
RTCIceCandidateTest *test = [[RTCIceCandidateTest alloc] init];
[test testInitFromNativeCandidate];
}
}

View File

@ -0,0 +1,171 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <Foundation/Foundation.h>
#include <vector>
#include "webrtc/rtc_base/gunit.h"
#import "NSString+StdString.h"
#import "RTCIceServer+Private.h"
#import "WebRTC/RTCIceServer.h"
@interface RTCIceServerTest : NSObject
- (void)testOneURLServer;
- (void)testTwoURLServer;
- (void)testPasswordCredential;
- (void)testInitFromNativeServer;
@end
@implementation RTCIceServerTest
- (void)testOneURLServer {
RTCIceServer *server = [[RTCIceServer alloc] initWithURLStrings:@[
@"stun:stun1.example.net" ]];
webrtc::PeerConnectionInterface::IceServer iceStruct = server.nativeServer;
EXPECT_EQ(1u, iceStruct.urls.size());
EXPECT_EQ("stun:stun1.example.net", iceStruct.urls.front());
EXPECT_EQ("", iceStruct.username);
EXPECT_EQ("", iceStruct.password);
}
- (void)testTwoURLServer {
RTCIceServer *server = [[RTCIceServer alloc] initWithURLStrings:@[
@"turn1:turn1.example.net", @"turn2:turn2.example.net" ]];
webrtc::PeerConnectionInterface::IceServer iceStruct = server.nativeServer;
EXPECT_EQ(2u, iceStruct.urls.size());
EXPECT_EQ("turn1:turn1.example.net", iceStruct.urls.front());
EXPECT_EQ("turn2:turn2.example.net", iceStruct.urls.back());
EXPECT_EQ("", iceStruct.username);
EXPECT_EQ("", iceStruct.password);
}
- (void)testPasswordCredential {
RTCIceServer *server = [[RTCIceServer alloc]
initWithURLStrings:@[ @"turn1:turn1.example.net" ]
username:@"username"
credential:@"credential"];
webrtc::PeerConnectionInterface::IceServer iceStruct = server.nativeServer;
EXPECT_EQ(1u, iceStruct.urls.size());
EXPECT_EQ("turn1:turn1.example.net", iceStruct.urls.front());
EXPECT_EQ("username", iceStruct.username);
EXPECT_EQ("credential", iceStruct.password);
}
- (void)testHostname {
RTCIceServer *server = [[RTCIceServer alloc] initWithURLStrings:@[ @"turn1:turn1.example.net" ]
username:@"username"
credential:@"credential"
tlsCertPolicy:RTCTlsCertPolicySecure
hostname:@"hostname"];
webrtc::PeerConnectionInterface::IceServer iceStruct = server.nativeServer;
EXPECT_EQ(1u, iceStruct.urls.size());
EXPECT_EQ("turn1:turn1.example.net", iceStruct.urls.front());
EXPECT_EQ("username", iceStruct.username);
EXPECT_EQ("credential", iceStruct.password);
EXPECT_EQ("hostname", iceStruct.hostname);
}
- (void)testTlsAlpnProtocols {
RTCIceServer *server = [[RTCIceServer alloc] initWithURLStrings:@[ @"turn1:turn1.example.net" ]
username:@"username"
credential:@"credential"
tlsCertPolicy:RTCTlsCertPolicySecure
hostname:@"hostname"
tlsAlpnProtocols:@[ @"proto1", @"proto2" ]];
webrtc::PeerConnectionInterface::IceServer iceStruct = server.nativeServer;
EXPECT_EQ(1u, iceStruct.urls.size());
EXPECT_EQ("turn1:turn1.example.net", iceStruct.urls.front());
EXPECT_EQ("username", iceStruct.username);
EXPECT_EQ("credential", iceStruct.password);
EXPECT_EQ("hostname", iceStruct.hostname);
EXPECT_EQ(2u, iceStruct.tls_alpn_protocols.size());
}
- (void)testTlsEllipticCurves {
RTCIceServer *server = [[RTCIceServer alloc] initWithURLStrings:@[ @"turn1:turn1.example.net" ]
username:@"username"
credential:@"credential"
tlsCertPolicy:RTCTlsCertPolicySecure
hostname:@"hostname"
tlsAlpnProtocols:@[ @"proto1", @"proto2" ]
tlsEllipticCurves:@[ @"curve1", @"curve2" ]];
webrtc::PeerConnectionInterface::IceServer iceStruct = server.nativeServer;
EXPECT_EQ(1u, iceStruct.urls.size());
EXPECT_EQ("turn1:turn1.example.net", iceStruct.urls.front());
EXPECT_EQ("username", iceStruct.username);
EXPECT_EQ("credential", iceStruct.password);
EXPECT_EQ("hostname", iceStruct.hostname);
EXPECT_EQ(2u, iceStruct.tls_alpn_protocols.size());
EXPECT_EQ(2u, iceStruct.tls_elliptic_curves.size());
}
- (void)testInitFromNativeServer {
webrtc::PeerConnectionInterface::IceServer nativeServer;
nativeServer.username = "username";
nativeServer.password = "password";
nativeServer.urls.push_back("stun:stun.example.net");
nativeServer.hostname = "hostname";
nativeServer.tls_alpn_protocols.push_back("proto1");
nativeServer.tls_alpn_protocols.push_back("proto2");
nativeServer.tls_elliptic_curves.push_back("curve1");
nativeServer.tls_elliptic_curves.push_back("curve2");
RTCIceServer *iceServer =
[[RTCIceServer alloc] initWithNativeServer:nativeServer];
EXPECT_EQ(1u, iceServer.urlStrings.count);
EXPECT_EQ("stun:stun.example.net",
[NSString stdStringForString:iceServer.urlStrings.firstObject]);
EXPECT_EQ("username", [NSString stdStringForString:iceServer.username]);
EXPECT_EQ("password", [NSString stdStringForString:iceServer.credential]);
EXPECT_EQ("hostname", [NSString stdStringForString:iceServer.hostname]);
EXPECT_EQ(2u, iceServer.tlsAlpnProtocols.count);
EXPECT_EQ(2u, iceServer.tlsEllipticCurves.count);
}
@end
TEST(RTCIceServerTest, OneURLTest) {
@autoreleasepool {
RTCIceServerTest *test = [[RTCIceServerTest alloc] init];
[test testOneURLServer];
}
}
TEST(RTCIceServerTest, TwoURLTest) {
@autoreleasepool {
RTCIceServerTest *test = [[RTCIceServerTest alloc] init];
[test testTwoURLServer];
}
}
TEST(RTCIceServerTest, PasswordCredentialTest) {
@autoreleasepool {
RTCIceServerTest *test = [[RTCIceServerTest alloc] init];
[test testPasswordCredential];
}
}
TEST(RTCIceServerTest, HostnameTest) {
@autoreleasepool {
RTCIceServerTest *test = [[RTCIceServerTest alloc] init];
[test testHostname];
}
}
TEST(RTCIceServerTest, InitFromNativeServerTest) {
@autoreleasepool {
RTCIceServerTest *test = [[RTCIceServerTest alloc] init];
[test testInitFromNativeServer];
}
}

View File

@ -0,0 +1,54 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <Foundation/Foundation.h>
#include "webrtc/rtc_base/gunit.h"
#import "RTCIntervalRange+Private.h"
#import "WebRTC/RTCIntervalRange.h"
@interface RTCIntervalRangeTest : NSObject
- (void)testConversionToNativeConfiguration;
- (void)testNativeConversionToConfiguration;
@end
@implementation RTCIntervalRangeTest
- (void)testConversionToNativeConfiguration {
NSInteger min = 0;
NSInteger max = 100;
RTCIntervalRange *range = [[RTCIntervalRange alloc] initWithMin:min max:max];
EXPECT_EQ(min, range.min);
EXPECT_EQ(max, range.max);
std::unique_ptr<rtc::IntervalRange> nativeRange = range.nativeIntervalRange;
EXPECT_EQ(min, nativeRange->min());
EXPECT_EQ(max, nativeRange->max());
}
- (void)testNativeConversionToConfiguration {
NSInteger min = 0;
NSInteger max = 100;
rtc::IntervalRange nativeRange((int)min, (int)max);
RTCIntervalRange *range =
[[RTCIntervalRange alloc] initWithNativeIntervalRange:nativeRange];
EXPECT_EQ(min, range.min);
EXPECT_EQ(max, range.max);
}
@end
TEST(RTCIntervalRangeTest, NativeConfigurationConversionTest) {
@autoreleasepool {
RTCIntervalRangeTest *test = [[RTCIntervalRangeTest alloc] init];
[test testConversionToNativeConfiguration];
[test testNativeConversionToConfiguration];
}
}

View File

@ -0,0 +1,196 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <Foundation/Foundation.h>
#import <OCMock/OCMock.h>
#include "webrtc/rtc_base/gunit.h"
#include <Metal/RTCMTLNV12Renderer.h>
#include <WebRTC/RTCMTLVideoView.h>
#include <WebRTC/RTCVideoFrameBuffer.h>
// Extension of RTCMTLVideoView for testing purposes.
@interface RTCMTLVideoView (Testing)
+ (BOOL)isMetalAvailable;
+ (UIView*)createMetalView:(CGRect)frame;
+ (id<RTCMTLRenderer>)createNV12Renderer;
+ (id<RTCMTLRenderer>)createI420Renderer;
- (void)drawInMTKView:(id)view;
@end
@interface RTCMTLVideoViewTests : NSObject
@property(nonatomic, strong) id classMock;
@property(nonatomic, strong) id metalViewMock;
@property(nonatomic, strong) id rendererNV12Mock;
@property(nonatomic, strong) id rendererI420Mock;
@property(nonatomic, strong) id frameMock;
@end
@implementation RTCMTLVideoViewTests
@synthesize classMock = _classMock;
@synthesize metalViewMock = _metalViewMock;
@synthesize rendererNV12Mock = _rendererNV12Mock;
@synthesize rendererI420Mock = _rendererI420Mock;
@synthesize frameMock = _frameMock;
- (void)setup {
self.classMock = OCMClassMock([RTCMTLVideoView class]);
}
- (void)tearDown {
[self.classMock stopMocking];
[self.rendererI420Mock stopMocking];
[self.rendererNV12Mock stopMocking];
[self.metalViewMock stopMocking];
[self.frameMock stopMocking];
self.classMock = nil;
self.rendererI420Mock = nil;
self.rendererNV12Mock = nil;
self.metalViewMock = nil;
self.frameMock = nil;
}
- (id)frameMockWithCVPixelBuffer:(BOOL)hasCVPixelBuffer {
id frameMock = OCMClassMock([RTCVideoFrame class]);
if (hasCVPixelBuffer) {
CVPixelBufferRef pixelBufferRef;
CVPixelBufferCreate(kCFAllocatorDefault,
200,
200,
kCVPixelFormatType_420YpCbCr8Planar,
nullptr,
&pixelBufferRef);
OCMStub([frameMock buffer])
.andReturn([[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef]);
} else {
OCMStub([frameMock buffer]).andReturn([[RTCI420Buffer alloc] initWithWidth:200 height:200]);
}
return frameMock;
}
- (id)rendererMockWithSuccessfulSetup:(BOOL)sucess {
id rendererMock = OCMProtocolMock(@protocol(RTCMTLRenderer));
OCMStub([rendererMock addRenderingDestination:[OCMArg any]]).andReturn(sucess);
return rendererMock;
}
#pragma mark - Test cases
- (void)testInitAssertsIfMetalUnavailabe {
// given
OCMStub([self.classMock isMetalAvailable]).andReturn(NO);
// when
BOOL asserts = NO;
@try {
RTCMTLVideoView *realView = [[RTCMTLVideoView alloc] initWithFrame:CGRectZero];
(void)realView;
} @catch (NSException *ex) {
asserts = YES;
}
EXPECT_TRUE(asserts);
}
- (void)testRTCVideoRenderNilFrameCallback {
// given
OCMStub([self.classMock isMetalAvailable]).andReturn(YES);
RTCMTLVideoView *realView = [[RTCMTLVideoView alloc] init];
self.frameMock = OCMClassMock([RTCVideoFrame class]);
[[self.frameMock reject] buffer];
[[self.classMock reject] createNV12Renderer];
[[self.classMock reject] createI420Renderer];
// when
[realView renderFrame:nil];
[realView drawInMTKView:self.metalViewMock];
// then
[self.frameMock verify];
[self.classMock verify];
}
- (void)testRTCVideoRenderFrameCallbackI420 {
// given
OCMStub([self.classMock isMetalAvailable]).andReturn(YES);
self.rendererI420Mock = [self rendererMockWithSuccessfulSetup:YES];
self.frameMock = [self frameMockWithCVPixelBuffer:NO];
OCMExpect([self.rendererI420Mock drawFrame:self.frameMock]);
OCMExpect([self.classMock createI420Renderer]).andReturn(self.rendererI420Mock);
[[self.classMock reject] createNV12Renderer];
RTCMTLVideoView *realView = [[RTCMTLVideoView alloc] init];
// when
[realView renderFrame:self.frameMock];
[realView drawInMTKView:self.metalViewMock];
// then
[self.rendererI420Mock verify];
[self.classMock verify];
}
- (void)testRTCVideoRenderFrameCallbackNV12 {
// given
OCMStub([self.classMock isMetalAvailable]).andReturn(YES);
self.rendererNV12Mock = [self rendererMockWithSuccessfulSetup:YES];
self.frameMock = [self frameMockWithCVPixelBuffer:YES];
OCMExpect([self.rendererNV12Mock drawFrame:self.frameMock]);
OCMExpect([self.classMock createNV12Renderer]).andReturn(self.rendererNV12Mock);
[[self.classMock reject] createI420Renderer];
RTCMTLVideoView *realView = [[RTCMTLVideoView alloc] init];
// when
[realView renderFrame:self.frameMock];
[realView drawInMTKView:self.metalViewMock];
// then
[self.rendererNV12Mock verify];
[self.classMock verify];
}
@end
TEST(RTCMTLVideoViewTests, InitAssertsIfMetalUnavailabe) {
RTCMTLVideoViewTests *test = [[RTCMTLVideoViewTests alloc] init];
[test setup];
[test testInitAssertsIfMetalUnavailabe];
[test tearDown];
}
TEST(RTCMTLVideoViewTests, RTCVideoRenderNilFrameCallback) {
RTCMTLVideoViewTests *test = [[RTCMTLVideoViewTests alloc] init];
[test setup];
[test testRTCVideoRenderNilFrameCallback];
[test tearDown];
}
TEST(RTCMTLVideoViewTests, RTCVideoRenderFrameCallbackI420) {
RTCMTLVideoViewTests *test = [[RTCMTLVideoViewTests alloc] init];
[test setup];
[test testRTCVideoRenderFrameCallbackI420];
[test tearDown];
}
TEST(RTCMTLVideoViewTests, RTCVideoRenderFrameCallbackNV12) {
RTCMTLVideoViewTests *test = [[RTCMTLVideoViewTests alloc] init];
[test setup];
[test testRTCVideoRenderFrameCallbackNV12];
[test tearDown];
}

View File

@ -0,0 +1,68 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <Foundation/Foundation.h>
#include <memory>
#include "webrtc/rtc_base/gunit.h"
#import "NSString+StdString.h"
#import "RTCMediaConstraints+Private.h"
#import "WebRTC/RTCMediaConstraints.h"
@interface RTCMediaConstraintsTest : NSObject
- (void)testMediaConstraints;
@end
@implementation RTCMediaConstraintsTest
- (void)testMediaConstraints {
NSDictionary *mandatory = @{@"key1": @"value1", @"key2": @"value2"};
NSDictionary *optional = @{@"key3": @"value3", @"key4": @"value4"};
RTCMediaConstraints *constraints = [[RTCMediaConstraints alloc]
initWithMandatoryConstraints:mandatory
optionalConstraints:optional];
std::unique_ptr<webrtc::MediaConstraints> nativeConstraints =
[constraints nativeConstraints];
webrtc::MediaConstraintsInterface::Constraints nativeMandatory =
nativeConstraints->GetMandatory();
[self expectConstraints:mandatory inNativeConstraints:nativeMandatory];
webrtc::MediaConstraintsInterface::Constraints nativeOptional =
nativeConstraints->GetOptional();
[self expectConstraints:optional inNativeConstraints:nativeOptional];
}
- (void)expectConstraints:(NSDictionary *)constraints
inNativeConstraints:
(webrtc::MediaConstraintsInterface::Constraints)nativeConstraints {
EXPECT_EQ(constraints.count, nativeConstraints.size());
for (NSString *key in constraints) {
NSString *value = [constraints objectForKey:key];
std::string nativeValue;
bool found = nativeConstraints.FindFirst(key.stdString, &nativeValue);
EXPECT_TRUE(found);
EXPECT_EQ(value.stdString, nativeValue);
}
}
@end
TEST(RTCMediaConstraintsTest, MediaConstraintsTest) {
@autoreleasepool {
RTCMediaConstraintsTest *test = [[RTCMediaConstraintsTest alloc] init];
[test testMediaConstraints];
}
}

View File

@ -0,0 +1,101 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <Foundation/Foundation.h>
#include <vector>
#include "webrtc/rtc_base/gunit.h"
#import "NSString+StdString.h"
#import "RTCConfiguration+Private.h"
#import "WebRTC/RTCConfiguration.h"
#import "WebRTC/RTCPeerConnection.h"
#import "WebRTC/RTCPeerConnectionFactory.h"
#import "WebRTC/RTCIceServer.h"
#import "WebRTC/RTCMediaConstraints.h"
@interface RTCPeerConnectionTest : NSObject
- (void)testConfigurationGetter;
@end
@implementation RTCPeerConnectionTest
- (void)testConfigurationGetter {
NSArray *urlStrings = @[ @"stun:stun1.example.net" ];
RTCIceServer *server = [[RTCIceServer alloc] initWithURLStrings:urlStrings];
RTCConfiguration *config = [[RTCConfiguration alloc] init];
config.iceServers = @[ server ];
config.iceTransportPolicy = RTCIceTransportPolicyRelay;
config.bundlePolicy = RTCBundlePolicyMaxBundle;
config.rtcpMuxPolicy = RTCRtcpMuxPolicyNegotiate;
config.tcpCandidatePolicy = RTCTcpCandidatePolicyDisabled;
config.candidateNetworkPolicy = RTCCandidateNetworkPolicyLowCost;
const int maxPackets = 60;
const int timeout = 1;
const int interval = 2;
config.audioJitterBufferMaxPackets = maxPackets;
config.audioJitterBufferFastAccelerate = YES;
config.iceConnectionReceivingTimeout = timeout;
config.iceBackupCandidatePairPingInterval = interval;
config.continualGatheringPolicy =
RTCContinualGatheringPolicyGatherContinually;
config.shouldPruneTurnPorts = YES;
RTCMediaConstraints *contraints = [[RTCMediaConstraints alloc] initWithMandatoryConstraints:@{}
optionalConstraints:nil];
RTCPeerConnectionFactory *factory = [[RTCPeerConnectionFactory alloc] init];
RTCConfiguration *newConfig;
@autoreleasepool {
RTCPeerConnection *peerConnection =
[factory peerConnectionWithConfiguration:config constraints:contraints delegate:nil];
newConfig = peerConnection.configuration;
EXPECT_TRUE([peerConnection setBweMinBitrateBps:[NSNumber numberWithInt:100000]
currentBitrateBps:[NSNumber numberWithInt:5000000]
maxBitrateBps:[NSNumber numberWithInt:500000000]]);
EXPECT_FALSE([peerConnection setBweMinBitrateBps:[NSNumber numberWithInt:2]
currentBitrateBps:[NSNumber numberWithInt:1]
maxBitrateBps:nil]);
}
EXPECT_EQ([config.iceServers count], [newConfig.iceServers count]);
RTCIceServer *newServer = newConfig.iceServers[0];
RTCIceServer *origServer = config.iceServers[0];
std::string origUrl = origServer.urlStrings.firstObject.UTF8String;
std::string url = newServer.urlStrings.firstObject.UTF8String;
EXPECT_EQ(origUrl, url);
EXPECT_EQ(config.iceTransportPolicy, newConfig.iceTransportPolicy);
EXPECT_EQ(config.bundlePolicy, newConfig.bundlePolicy);
EXPECT_EQ(config.rtcpMuxPolicy, newConfig.rtcpMuxPolicy);
EXPECT_EQ(config.tcpCandidatePolicy, newConfig.tcpCandidatePolicy);
EXPECT_EQ(config.candidateNetworkPolicy, newConfig.candidateNetworkPolicy);
EXPECT_EQ(config.audioJitterBufferMaxPackets, newConfig.audioJitterBufferMaxPackets);
EXPECT_EQ(config.audioJitterBufferFastAccelerate, newConfig.audioJitterBufferFastAccelerate);
EXPECT_EQ(config.iceConnectionReceivingTimeout, newConfig.iceConnectionReceivingTimeout);
EXPECT_EQ(config.iceBackupCandidatePairPingInterval,
newConfig.iceBackupCandidatePairPingInterval);
EXPECT_EQ(config.continualGatheringPolicy, newConfig.continualGatheringPolicy);
EXPECT_EQ(config.shouldPruneTurnPorts, newConfig.shouldPruneTurnPorts);
}
@end
TEST(RTCPeerConnectionTest, ConfigurationGetterTest) {
@autoreleasepool {
RTCPeerConnectionTest *test = [[RTCPeerConnectionTest alloc] init];
[test testConfigurationGetter];
}
}

View File

@ -0,0 +1,144 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <Foundation/Foundation.h>
#include "webrtc/rtc_base/gunit.h"
#import "NSString+StdString.h"
#import "RTCSessionDescription+Private.h"
#import "WebRTC/RTCSessionDescription.h"
@interface RTCSessionDescriptionTest : NSObject
- (void)testSessionDescriptionConversion;
- (void)testInitFromNativeSessionDescription;
@end
@implementation RTCSessionDescriptionTest
/**
* Test conversion of an Objective-C RTCSessionDescription to a native
* SessionDescriptionInterface (based on the types and SDP strings being equal).
*/
- (void)testSessionDescriptionConversion {
RTCSessionDescription *description =
[[RTCSessionDescription alloc] initWithType:RTCSdpTypeAnswer
sdp:[self sdp]];
webrtc::SessionDescriptionInterface *nativeDescription =
description.nativeDescription;
EXPECT_EQ(RTCSdpTypeAnswer,
[RTCSessionDescription typeForStdString:nativeDescription->type()]);
std::string sdp;
nativeDescription->ToString(&sdp);
EXPECT_EQ([self sdp].stdString, sdp);
}
- (void)testInitFromNativeSessionDescription {
webrtc::SessionDescriptionInterface *nativeDescription;
nativeDescription = webrtc::CreateSessionDescription(
webrtc::SessionDescriptionInterface::kAnswer,
[self sdp].stdString,
nullptr);
RTCSessionDescription *description =
[[RTCSessionDescription alloc] initWithNativeDescription:
nativeDescription];
EXPECT_EQ(webrtc::SessionDescriptionInterface::kAnswer,
[RTCSessionDescription stdStringForType:description.type]);
EXPECT_TRUE([[self sdp] isEqualToString:description.sdp]);
}
- (NSString *)sdp {
return @"v=0\r\n"
"o=- 5319989746393411314 2 IN IP4 127.0.0.1\r\n"
"s=-\r\n"
"t=0 0\r\n"
"a=group:BUNDLE audio video\r\n"
"a=msid-semantic: WMS ARDAMS\r\n"
"m=audio 9 UDP/TLS/RTP/SAVPF 111 103 9 0 8 126\r\n"
"c=IN IP4 0.0.0.0\r\n"
"a=rtcp:9 IN IP4 0.0.0.0\r\n"
"a=ice-ufrag:f3o+0HG7l9nwIWFY\r\n"
"a=ice-pwd:VDctmJNCptR2TB7+meDpw7w5\r\n"
"a=fingerprint:sha-256 A9:D5:8D:A8:69:22:39:60:92:AD:94:1A:22:2D:5E:"
"A5:4A:A9:18:C2:35:5D:46:5E:59:BD:1C:AF:38:9F:E6:E1\r\n"
"a=setup:active\r\n"
"a=mid:audio\r\n"
"a=extmap:1 urn:ietf:params:rtp-hdrext:ssrc-audio-level\r\n"
"a=extmap:3 http://www.webrtc.org/experiments/rtp-hdrext/"
"abs-send-time\r\n"
"a=sendrecv\r\n"
"a=rtcp-mux\r\n"
"a=rtpmap:111 opus/48000/2\r\n"
"a=fmtp:111 minptime=10;useinbandfec=1\r\n"
"a=rtpmap:103 ISAC/16000\r\n"
"a=rtpmap:9 G722/8000\r\n"
"a=rtpmap:0 PCMU/8000\r\n"
"a=rtpmap:8 PCMA/8000\r\n"
"a=rtpmap:126 telephone-event/8000\r\n"
"a=maxptime:60\r\n"
"a=ssrc:1504474588 cname:V+FdIC5AJpxLhdYQ\r\n"
"a=ssrc:1504474588 msid:ARDAMS ARDAMSa0\r\n"
"a=ssrc:1504474588 mslabel:ARDAMS\r\n"
"a=ssrc:1504474588 label:ARDAMSa0\r\n"
"m=video 9 UDP/TLS/RTP/SAVPF 100 116 117 96\r\n"
"c=IN IP4 0.0.0.0\r\n"
"a=rtcp:9 IN IP4 0.0.0.0\r\n"
"a=ice-ufrag:f3o+0HG7l9nwIWFY\r\n"
"a=ice-pwd:VDctmJNCptR2TB7+meDpw7w5\r\n"
"a=fingerprint:sha-256 A9:D5:8D:A8:69:22:39:60:92:AD:94:1A:22:2D:5E:"
"A5:4A:A9:18:C2:35:5D:46:5E:59:BD:1C:AF:38:9F:E6:E1\r\n"
"a=setup:active\r\n"
"a=mid:video\r\n"
"a=extmap:2 urn:ietf:params:rtp-hdrext:toffset\r\n"
"a=extmap:3 http://www.webrtc.org/experiments/rtp-hdrext/"
"abs-send-time\r\n"
"a=extmap:4 urn:3gpp:video-orientation\r\n"
"a=sendrecv\r\n"
"a=rtcp-mux\r\n"
"a=rtpmap:100 VP8/90000\r\n"
"a=rtcp-fb:100 ccm fir\r\n"
"a=rtcp-fb:100 nack\r\n"
"a=rtcp-fb:100 nack pli\r\n"
"a=rtcp-fb:100 goog-remb\r\n"
"a=rtpmap:116 red/90000\r\n"
"a=rtpmap:117 ulpfec/90000\r\n"
"a=rtpmap:96 rtx/90000\r\n"
"a=fmtp:96 apt=100\r\n"
"a=ssrc-group:FID 498297514 1644357692\r\n"
"a=ssrc:498297514 cname:V+FdIC5AJpxLhdYQ\r\n"
"a=ssrc:498297514 msid:ARDAMS ARDAMSv0\r\n"
"a=ssrc:498297514 mslabel:ARDAMS\r\n"
"a=ssrc:498297514 label:ARDAMSv0\r\n"
"a=ssrc:1644357692 cname:V+FdIC5AJpxLhdYQ\r\n"
"a=ssrc:1644357692 msid:ARDAMS ARDAMSv0\r\n"
"a=ssrc:1644357692 mslabel:ARDAMS\r\n"
"a=ssrc:1644357692 label:ARDAMSv0\r\n";
}
@end
TEST(RTCSessionDescriptionTest, SessionDescriptionConversionTest) {
@autoreleasepool {
RTCSessionDescriptionTest *test = [[RTCSessionDescriptionTest alloc] init];
[test testSessionDescriptionConversion];
}
}
TEST(RTCSessionDescriptionTest, InitFromSessionDescriptionTest) {
@autoreleasepool {
RTCSessionDescriptionTest *test = [[RTCSessionDescriptionTest alloc] init];
[test testInitFromNativeSessionDescription];
}
}

View File

@ -0,0 +1,50 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <Foundation/Foundation.h>
#include <vector>
#include "webrtc/rtc_base/gunit.h"
#import "NSString+StdString.h"
#import "WebRTC/RTCTracing.h"
@interface RTCTracingTest : NSObject
- (void)tracingTestNoInitialization;
@end
@implementation RTCTracingTest
- (NSString *)documentsFilePathForFileName:(NSString *)fileName {
NSParameterAssert(fileName.length);
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirPath = paths.firstObject;
NSString *filePath =
[documentsDirPath stringByAppendingPathComponent:fileName];
return filePath;
}
- (void)tracingTestNoInitialization {
NSString *filePath = [self documentsFilePathForFileName:@"webrtc-trace.txt"];
EXPECT_EQ(NO, RTCStartInternalCapture(filePath));
RTCStopInternalCapture();
}
@end
TEST(RTCTracingTest, TracingTestNoInitialization) {
@autoreleasepool {
RTCTracingTest *test = [[RTCTracingTest alloc] init];
[test tracingTestNoInitialization];
}
}

View File

@ -0,0 +1,246 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <Foundation/Foundation.h>
#import <OCMock/OCMock.h>
#include "webrtc/rtc_base/gunit.h"
#include "Video/avfoundationformatmapper.h"
// Width and height don't play any role so lets use predefined values throughout
// the tests.
static const int kFormatWidth = 789;
static const int kFormatHeight = 987;
// Hardcoded framrate to be used throughout the tests.
static const int kFramerate = 30;
// Same width and height is used so it's ok to expect same cricket::VideoFormat
static cricket::VideoFormat expectedFormat =
cricket::VideoFormat(kFormatWidth,
kFormatHeight,
cricket::VideoFormat::FpsToInterval(kFramerate),
cricket::FOURCC_NV12);
// Mock class for AVCaptureDeviceFormat.
// Custom implementation needed because OCMock cannot handle the
// CMVideoDescriptionRef mocking.
@interface AVCaptureDeviceFormatMock : NSObject
@property (nonatomic, assign) CMVideoFormatDescriptionRef format;
@property (nonatomic, strong) OCMockObject *rangeMock;
- (instancetype)initWithMediaSubtype:(FourCharCode)subtype
minFps:(float)minFps
maxFps:(float)maxFps;
+ (instancetype)validFormat;
+ (instancetype)invalidFpsFormat;
+ (instancetype)invalidMediaSubtypeFormat;
@end
@implementation AVCaptureDeviceFormatMock
@synthesize format = _format;
@synthesize rangeMock = _rangeMock;
- (instancetype)initWithMediaSubtype:(FourCharCode)subtype
minFps:(float)minFps
maxFps:(float)maxFps {
if (self = [super init]) {
CMVideoFormatDescriptionCreate(nil, subtype, kFormatWidth, kFormatHeight,
nil, &_format);
// We can use OCMock for the range.
_rangeMock = [OCMockObject mockForClass:[AVFrameRateRange class]];
[[[_rangeMock stub] andReturnValue:@(minFps)] minFrameRate];
[[[_rangeMock stub] andReturnValue:@(maxFps)] maxFrameRate];
}
return self;
}
+ (instancetype)validFormat {
AVCaptureDeviceFormatMock *instance = [[AVCaptureDeviceFormatMock alloc]
initWithMediaSubtype:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
minFps:0.0
maxFps:30.0];
return instance;
}
+ (instancetype)invalidFpsFormat {
AVCaptureDeviceFormatMock *instance = [[AVCaptureDeviceFormatMock alloc]
initWithMediaSubtype:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
minFps:0.0
maxFps:22.0];
return instance;
}
+ (instancetype)invalidMediaSubtypeFormat {
AVCaptureDeviceFormatMock *instance = [[AVCaptureDeviceFormatMock alloc]
initWithMediaSubtype:kCVPixelFormatType_420YpCbCr8Planar
minFps:0.0
maxFps:60.0];
return instance;
}
- (void)dealloc {
if (_format != nil) {
CFRelease(_format);
_format = nil;
}
}
// Redefinition of AVCaptureDevice methods we want to mock.
- (CMVideoFormatDescriptionRef)formatDescription {
return self.format;
}
- (NSArray *)videoSupportedFrameRateRanges {
return @[ self.rangeMock ];
}
@end
TEST(AVFormatMapperTest, SuportedCricketFormatsWithInvalidFramerateFormats) {
// given
id mockDevice = OCMClassMock([AVCaptureDevice class]);
// Valid media subtype, invalid framerate
AVCaptureDeviceFormatMock* mock =
[AVCaptureDeviceFormatMock invalidFpsFormat];
OCMStub([mockDevice formats]).andReturn(@[ mock ]);
// when
std::set<cricket::VideoFormat> result =
webrtc::GetSupportedVideoFormatsForDevice(mockDevice);
// then
EXPECT_TRUE(result.empty());
}
TEST(AVFormatMapperTest, SuportedCricketFormatsWithInvalidFormats) {
// given
id mockDevice = OCMClassMock([AVCaptureDevice class]);
// Invalid media subtype, valid framerate
AVCaptureDeviceFormatMock* mock =
[AVCaptureDeviceFormatMock invalidMediaSubtypeFormat];
OCMStub([mockDevice formats]).andReturn(@[ mock ]);
// when
std::set<cricket::VideoFormat> result =
webrtc::GetSupportedVideoFormatsForDevice(mockDevice);
// then
EXPECT_TRUE(result.empty());
}
TEST(AVFormatMapperTest, SuportedCricketFormats) {
// given
id mockDevice = OCMClassMock([AVCaptureDevice class]);
// valid media subtype, valid framerate
AVCaptureDeviceFormatMock* mock = [AVCaptureDeviceFormatMock validFormat];
OCMStub([mockDevice formats]).andReturn(@[ mock ]);
// when
std::set<cricket::VideoFormat> result =
webrtc::GetSupportedVideoFormatsForDevice(mockDevice);
// then
EXPECT_EQ(1u, result.size());
// make sure the set has the expected format
EXPECT_EQ(expectedFormat, *result.begin());
}
TEST(AVFormatMapperTest, MediaSubtypePreference) {
// given
id mockDevice = OCMClassMock([AVCaptureDevice class]);
// valid media subtype, valid framerate
AVCaptureDeviceFormatMock* mockOne = [[AVCaptureDeviceFormatMock alloc]
initWithMediaSubtype:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange
minFps:0.0
maxFps:30.0];
// valid media subtype, valid framerate.
// This media subtype should be the preffered one.
AVCaptureDeviceFormatMock* mockTwo = [[AVCaptureDeviceFormatMock alloc]
initWithMediaSubtype:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
minFps:0.0
maxFps:30.0];
OCMStub([mockDevice lockForConfiguration:[OCMArg setTo:nil]]).andReturn(YES);
OCMStub([mockDevice unlockForConfiguration]);
NSArray* array = @[ mockOne, mockTwo ];
OCMStub([mockDevice formats]).andReturn(array);
// to verify
OCMExpect([mockDevice setActiveFormat:(AVCaptureDeviceFormat*)mockTwo]);
OCMExpect(
[mockDevice setActiveVideoMinFrameDuration:CMTimeMake(1, kFramerate)]);
// when
bool resultFormat =
webrtc::SetFormatForCaptureDevice(mockDevice, nil, expectedFormat);
// then
EXPECT_TRUE(resultFormat);
[mockDevice verify];
}
TEST(AVFormatMapperTest, SetFormatWhenDeviceCannotLock) {
// given
id mockDevice = OCMClassMock([AVCaptureDevice class]);
[[[mockDevice stub] andReturnValue:@(NO)]
lockForConfiguration:[OCMArg setTo:nil]];
[[[mockDevice stub] andReturn:@[]] formats];
// when
bool resultFormat = webrtc::SetFormatForCaptureDevice(mockDevice, nil,
cricket::VideoFormat());
// then
EXPECT_FALSE(resultFormat);
}
TEST(AVFormatMapperTest, SetFormatWhenFormatIsIncompatible) {
// given
id mockDevice = OCMClassMock([AVCaptureDevice class]);
OCMStub([mockDevice formats]).andReturn(@[]);
OCMStub([mockDevice lockForConfiguration:[OCMArg setTo:nil]]).andReturn(YES);
NSException* testException =
[NSException exceptionWithName:@"Test exception"
reason:@"Raised from unit tests"
userInfo:nil];
OCMStub([mockDevice setActiveFormat:[OCMArg any]]).andThrow(testException);
OCMExpect([mockDevice unlockForConfiguration]);
// when
bool resultFormat = webrtc::SetFormatForCaptureDevice(mockDevice, nil,
cricket::VideoFormat());
// then
EXPECT_FALSE(resultFormat);
// TODO(denicija): Remove try-catch when Chromium rolls this change:
// https://github.com/erikdoe/ocmock/commit/de1419415581dc307045e54bfe9c98c86efea96b
// Without it, stubbed exceptions are being re-raised on [mock verify].
// More information here:
//https://github.com/erikdoe/ocmock/issues/241
@try {
[mockDevice verify];
} @catch (NSException* exception) {
if ([exception.reason isEqual:testException.reason]) {
// Nothing dangerous here
EXPECT_TRUE([exception.reason isEqualToString:exception.reason]);
}
}
}

View File

@ -0,0 +1,105 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <Foundation/Foundation.h>
#import <OCMock/OCMock.h>
#include "webrtc/sdk/objc/Framework/Classes/VideoToolbox/objc_video_decoder_factory.h"
#import "WebRTC/RTCVideoCodec.h"
#import "WebRTC/RTCVideoCodecFactory.h"
#include "webrtc/media/base/codec.h"
#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/modules/video_coding/include/video_codec_interface.h"
#include "webrtc/modules/video_coding/include/video_error_codes.h"
#include "webrtc/rtc_base/gunit.h"
id<RTCVideoDecoderFactory> CreateDecoderFactoryReturning(int return_code) {
id decoderMock = OCMProtocolMock(@protocol(RTCVideoDecoder));
OCMStub([decoderMock startDecodeWithSettings:[OCMArg any] numberOfCores:1])
.andReturn(return_code);
OCMStub([decoderMock decode:[OCMArg any]
missingFrames:NO
fragmentationHeader:[OCMArg any]
codecSpecificInfo:[OCMArg any]
renderTimeMs:0])
.andReturn(return_code);
OCMStub([decoderMock releaseDecoder]).andReturn(return_code);
id decoderFactoryMock = OCMProtocolMock(@protocol(RTCVideoDecoderFactory));
RTCVideoCodecInfo *supported = [[RTCVideoCodecInfo alloc] initWithName:@"H264" parameters:nil];
OCMStub([decoderFactoryMock supportedCodecs]).andReturn(@[ supported ]);
OCMStub([decoderFactoryMock createDecoder:[OCMArg any]]).andReturn(decoderMock);
return decoderFactoryMock;
}
id<RTCVideoDecoderFactory> CreateOKDecoderFactory() {
return CreateDecoderFactoryReturning(WEBRTC_VIDEO_CODEC_OK);
}
id<RTCVideoDecoderFactory> CreateErrorDecoderFactory() {
return CreateDecoderFactoryReturning(WEBRTC_VIDEO_CODEC_ERROR);
}
webrtc::VideoDecoder *GetObjCDecoder(id<RTCVideoDecoderFactory> factory) {
webrtc::ObjCVideoDecoderFactory decoder_factory(factory);
return decoder_factory.CreateVideoDecoderWithParams(cricket::VideoCodec(cricket::kH264CodecName),
{});
}
#pragma mark -
TEST(ObjCVideoDecoderFactoryTest, InitDecodeReturnsOKOnSuccess) {
webrtc::VideoDecoder *decoder = GetObjCDecoder(CreateOKDecoderFactory());
auto settings = new webrtc::VideoCodec();
EXPECT_EQ(decoder->InitDecode(settings, 1), WEBRTC_VIDEO_CODEC_OK);
}
TEST(ObjCVideoDecoderFactoryTest, InitDecodeReturnsErrorOnFail) {
webrtc::VideoDecoder *decoder = GetObjCDecoder(CreateErrorDecoderFactory());
auto settings = new webrtc::VideoCodec();
EXPECT_EQ(decoder->InitDecode(settings, 1), WEBRTC_VIDEO_CODEC_ERROR);
}
TEST(ObjCVideoDecoderFactoryTest, DecodeReturnsOKOnSuccess) {
webrtc::VideoDecoder *decoder = GetObjCDecoder(CreateOKDecoderFactory());
webrtc::EncodedImage encoded_image;
webrtc::RTPFragmentationHeader header;
webrtc::CodecSpecificInfo info;
info.codecType = webrtc::kVideoCodecH264;
EXPECT_EQ(decoder->Decode(encoded_image, false, &header, &info, 0), WEBRTC_VIDEO_CODEC_OK);
}
TEST(ObjCVideoDecoderFactoryTest, DecodeReturnsErrorOnFail) {
webrtc::VideoDecoder *decoder = GetObjCDecoder(CreateErrorDecoderFactory());
webrtc::EncodedImage encoded_image;
webrtc::RTPFragmentationHeader header;
webrtc::CodecSpecificInfo info;
info.codecType = webrtc::kVideoCodecH264;
EXPECT_EQ(decoder->Decode(encoded_image, false, &header, &info, 0), WEBRTC_VIDEO_CODEC_ERROR);
}
TEST(ObjCVideoDecoderFactoryTest, ReleaseDecodeReturnsOKOnSuccess) {
webrtc::VideoDecoder *decoder = GetObjCDecoder(CreateOKDecoderFactory());
EXPECT_EQ(decoder->Release(), WEBRTC_VIDEO_CODEC_OK);
}
TEST(ObjCVideoDecoderFactoryTest, ReleaseDecodeReturnsErrorOnFail) {
webrtc::VideoDecoder *decoder = GetObjCDecoder(CreateErrorDecoderFactory());
EXPECT_EQ(decoder->Release(), WEBRTC_VIDEO_CODEC_ERROR);
}

View File

@ -0,0 +1,133 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <Foundation/Foundation.h>
#import <OCMock/OCMock.h>
#include "webrtc/sdk/objc/Framework/Classes/VideoToolbox/objc_video_encoder_factory.h"
#import "WebRTC/RTCVideoCodec.h"
#import "WebRTC/RTCVideoCodecFactory.h"
#import "WebRTC/RTCVideoFrameBuffer.h"
#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/modules/video_coding/include/video_codec_interface.h"
#include "webrtc/modules/video_coding/include/video_error_codes.h"
#include "webrtc/rtc_base/gunit.h"
#include "webrtc/sdk/objc/Framework/Classes/Video/objc_frame_buffer.h"
id<RTCVideoEncoderFactory> CreateEncoderFactoryReturning(int return_code) {
id encoderMock = OCMProtocolMock(@protocol(RTCVideoEncoder));
OCMStub([encoderMock startEncodeWithSettings:[OCMArg any] numberOfCores:1])
.andReturn(return_code);
OCMStub([encoderMock encode:[OCMArg any] codecSpecificInfo:[OCMArg any] frameTypes:[OCMArg any]])
.andReturn(return_code);
OCMStub([encoderMock releaseEncoder]).andReturn(return_code);
OCMStub([encoderMock setBitrate:0 framerate:0]).andReturn(return_code);
id encoderFactoryMock = OCMProtocolMock(@protocol(RTCVideoEncoderFactory));
RTCVideoCodecInfo *supported = [[RTCVideoCodecInfo alloc] initWithName:@"H264" parameters:nil];
OCMStub([encoderFactoryMock supportedCodecs]).andReturn(@[ supported ]);
OCMStub([encoderFactoryMock createEncoder:[OCMArg any]]).andReturn(encoderMock);
return encoderFactoryMock;
}
id<RTCVideoEncoderFactory> CreateOKEncoderFactory() {
return CreateEncoderFactoryReturning(WEBRTC_VIDEO_CODEC_OK);
}
id<RTCVideoEncoderFactory> CreateErrorEncoderFactory() {
return CreateEncoderFactoryReturning(WEBRTC_VIDEO_CODEC_ERROR);
}
webrtc::VideoEncoder *GetObjCEncoder(id<RTCVideoEncoderFactory> factory) {
webrtc::ObjCVideoEncoderFactory encoder_factory(factory);
cricket::VideoCodec codec("H264");
return encoder_factory.CreateVideoEncoder(codec);
}
#pragma mark -
TEST(ObjCVideoEncoderFactoryTest, InitEncodeReturnsOKOnSuccess) {
webrtc::VideoEncoder *encoder = GetObjCEncoder(CreateOKEncoderFactory());
auto settings = new webrtc::VideoCodec();
EXPECT_EQ(encoder->InitEncode(settings, 1, 0), WEBRTC_VIDEO_CODEC_OK);
}
TEST(ObjCVideoEncoderFactoryTest, InitEncodeReturnsErrorOnFail) {
webrtc::VideoEncoder *encoder = GetObjCEncoder(CreateErrorEncoderFactory());
auto settings = new webrtc::VideoCodec();
EXPECT_EQ(encoder->InitEncode(settings, 1, 0), WEBRTC_VIDEO_CODEC_ERROR);
}
TEST(ObjCVideoEncoderFactoryTest, EncodeReturnsOKOnSuccess) {
webrtc::VideoEncoder *encoder = GetObjCEncoder(CreateOKEncoderFactory());
CVPixelBufferRef pixel_buffer;
CVPixelBufferCreate(kCFAllocatorDefault, 640, 480, kCVPixelFormatType_32ARGB, nil, &pixel_buffer);
rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
new rtc::RefCountedObject<webrtc::ObjCFrameBuffer>(
[[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixel_buffer]);
webrtc::VideoFrame frame(buffer, webrtc::kVideoRotation_0, 0);
webrtc::CodecSpecificInfo info;
info.codecType = webrtc::kVideoCodecH264;
info.codec_name = "H264";
std::vector<webrtc::FrameType> frame_types;
EXPECT_EQ(encoder->Encode(frame, &info, &frame_types), WEBRTC_VIDEO_CODEC_OK);
}
TEST(ObjCVideoEncoderFactoryTest, EncodeReturnsErrorOnFail) {
webrtc::VideoEncoder *encoder = GetObjCEncoder(CreateErrorEncoderFactory());
CVPixelBufferRef pixel_buffer;
CVPixelBufferCreate(kCFAllocatorDefault, 640, 480, kCVPixelFormatType_32ARGB, nil, &pixel_buffer);
rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
new rtc::RefCountedObject<webrtc::ObjCFrameBuffer>(
[[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixel_buffer]);
webrtc::VideoFrame frame(buffer, webrtc::kVideoRotation_0, 0);
webrtc::CodecSpecificInfo info;
info.codecType = webrtc::kVideoCodecH264;
info.codec_name = "H264";
std::vector<webrtc::FrameType> frame_types;
EXPECT_EQ(encoder->Encode(frame, &info, &frame_types), WEBRTC_VIDEO_CODEC_ERROR);
}
TEST(ObjCVideoEncoderFactoryTest, ReleaseEncodeReturnsOKOnSuccess) {
webrtc::VideoEncoder *encoder = GetObjCEncoder(CreateOKEncoderFactory());
EXPECT_EQ(encoder->Release(), WEBRTC_VIDEO_CODEC_OK);
}
TEST(ObjCVideoEncoderFactoryTest, ReleaseEncodeReturnsErrorOnFail) {
webrtc::VideoEncoder *encoder = GetObjCEncoder(CreateErrorEncoderFactory());
EXPECT_EQ(encoder->Release(), WEBRTC_VIDEO_CODEC_ERROR);
}
TEST(ObjCVideoEncoderFactoryTest, SetChannelParametersAlwaysReturnsOK) {
webrtc::VideoEncoder *encoder = GetObjCEncoder(CreateErrorEncoderFactory());
EXPECT_EQ(encoder->SetChannelParameters(1, 1), WEBRTC_VIDEO_CODEC_OK);
}
TEST(ObjCVideoEncoderFactoryTest, SetRatesReturnsOKOnSuccess) {
webrtc::VideoEncoder *encoder = GetObjCEncoder(CreateOKEncoderFactory());
EXPECT_EQ(encoder->SetRates(0, 0), WEBRTC_VIDEO_CODEC_OK);
}
TEST(ObjCVideoEncoderFactoryTest, SetRatesReturnsErrorOnFail) {
webrtc::VideoEncoder *encoder = GetObjCEncoder(CreateErrorEncoderFactory());
EXPECT_EQ(encoder->SetRates(0, 0), WEBRTC_VIDEO_CODEC_ERROR);
}