Wrap WebRTC OBJC API types with RTC_OBJC_TYPE.

This CL introduced 2 new macros that affect the WebRTC OBJC API symbols:

- RTC_OBJC_TYPE_PREFIX:
  Macro used to prepend a prefix to the API types that are exported with
  RTC_OBJC_EXPORT.

  Clients can patch the definition of this macro locally and build
  WebRTC.framework with their own prefix in case symbol clashing is a
  problem.

  This macro must only be defined by changing the value in
  sdk/objc/base/RTCMacros.h  and not on via compiler flag to ensure
  it has a unique value.

- RCT_OBJC_TYPE:
  Macro used internally to reference API types. Declaring an API type
  without using this macro will not include the declared type in the
  set of types that will be affected by the configurable
  RTC_OBJC_TYPE_PREFIX.

Manual changes:
https://webrtc-review.googlesource.com/c/src/+/173781/5..10

The auto-generated changes in PS#5 have been done with:
https://webrtc-review.googlesource.com/c/src/+/174061.

Bug: None
Change-Id: I0d54ca94db764fb3b6cb4365873f79e14cd879b8
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/173781
Commit-Queue: Mirko Bonadei <mbonadei@webrtc.org>
Reviewed-by: Karl Wiberg <kwiberg@webrtc.org>
Reviewed-by: Kári Helgason <kthelgason@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#31153}
This commit is contained in:
Mirko Bonadei
2020-05-04 16:14:32 +02:00
committed by Commit Bot
parent ce1320cc4d
commit a81e9c82fc
303 changed files with 2534 additions and 2189 deletions

View File

@ -25,7 +25,7 @@
#include "rtc_base/ref_counted_object.h"
#include "sdk/objc/native/api/video_frame.h"
typedef void (^VideoSinkCallback)(RTCVideoFrame *);
typedef void (^VideoSinkCallback)(RTC_OBJC_TYPE(RTCVideoFrame) *);
namespace {
@ -63,10 +63,13 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface<webrtc::VideoFrame>
CVPixelBufferCreate(
NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef];
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
RTCVideoFrame *frame =
[[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0];
RTC_OBJC_TYPE(RTCVideoFrame) *frame =
[[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer
rotation:RTCVideoRotation_0
timeStampNs:0];
cricket::FakeVideoRenderer *video_renderer = new cricket::FakeVideoRenderer();
const rtc::VideoSinkWants video_sink_wants;
@ -92,10 +95,13 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface<webrtc::VideoFrame>
CVPixelBufferCreate(
NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef];
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
RTCVideoFrame *frame =
[[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0];
RTC_OBJC_TYPE(RTCVideoFrame) *frame =
[[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer
rotation:RTCVideoRotation_0
timeStampNs:0];
cricket::FakeVideoRenderer *video_renderer = new cricket::FakeVideoRenderer();
const rtc::VideoSinkWants video_sink_wants;
@ -119,11 +125,13 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface<webrtc::VideoFrame>
CVPixelBufferCreate(
NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef];
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
RTCVideoFrame *frame = [[RTCVideoFrame alloc] initWithBuffer:buffer
rotation:RTCVideoRotation_0
timeStampNs:0];
RTC_OBJC_TYPE(RTCVideoFrame) *frame =
[[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer
rotation:RTCVideoRotation_0
timeStampNs:0];
cricket::FakeVideoRenderer *video_renderer = new cricket::FakeVideoRenderer();
const rtc::VideoSinkWants video_sink_wants;
@ -159,16 +167,19 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface<webrtc::VideoFrame>
CVPixelBufferCreate(
NULL, 360, 640, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef];
RTCVideoFrame *frame =
[[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0];
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
RTC_OBJC_TYPE(RTCVideoFrame) *frame =
[[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer
rotation:RTCVideoRotation_0
timeStampNs:0];
XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"];
ObjCCallbackVideoSink callback_video_sink(^void(RTCVideoFrame *outputFrame) {
ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) {
XCTAssertEqual(frame.width, outputFrame.width);
XCTAssertEqual(frame.height, outputFrame.height);
RTCCVPixelBuffer *outputBuffer = outputFrame.buffer;
RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer;
XCTAssertEqual(buffer.cropX, outputBuffer.cropX);
XCTAssertEqual(buffer.cropY, outputBuffer.cropY);
XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer);
@ -192,16 +203,19 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface<webrtc::VideoFrame>
CVPixelBufferCreate(
NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef];
RTCVideoFrame *frame =
[[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0];
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
RTC_OBJC_TYPE(RTCVideoFrame) *frame =
[[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer
rotation:RTCVideoRotation_0
timeStampNs:0];
XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"];
ObjCCallbackVideoSink callback_video_sink(^void(RTCVideoFrame *outputFrame) {
ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) {
XCTAssertEqual(outputFrame.width, 360);
XCTAssertEqual(outputFrame.height, 640);
RTCCVPixelBuffer *outputBuffer = outputFrame.buffer;
RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer;
XCTAssertEqual(outputBuffer.cropX, 0);
XCTAssertEqual(outputBuffer.cropY, 0);
XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer);
@ -225,16 +239,19 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface<webrtc::VideoFrame>
CVPixelBufferCreate(
NULL, 380, 640, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef];
RTCVideoFrame *frame =
[[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0];
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
RTC_OBJC_TYPE(RTCVideoFrame) *frame =
[[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer
rotation:RTCVideoRotation_0
timeStampNs:0];
XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"];
ObjCCallbackVideoSink callback_video_sink(^void(RTCVideoFrame *outputFrame) {
ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) {
XCTAssertEqual(outputFrame.width, 360);
XCTAssertEqual(outputFrame.height, 640);
RTCCVPixelBuffer *outputBuffer = outputFrame.buffer;
RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer;
XCTAssertEqual(outputBuffer.cropX, 10);
XCTAssertEqual(outputBuffer.cropY, 0);
XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer);
@ -259,22 +276,25 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface<webrtc::VideoFrame>
NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
// Create a frame that's already adapted down.
RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef
adaptedWidth:640
adaptedHeight:360
cropWidth:720
cropHeight:1280
cropX:0
cropY:0];
RTCVideoFrame *frame =
[[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0];
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef
adaptedWidth:640
adaptedHeight:360
cropWidth:720
cropHeight:1280
cropX:0
cropY:0];
RTC_OBJC_TYPE(RTCVideoFrame) *frame =
[[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer
rotation:RTCVideoRotation_0
timeStampNs:0];
XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"];
ObjCCallbackVideoSink callback_video_sink(^void(RTCVideoFrame *outputFrame) {
ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) {
XCTAssertEqual(outputFrame.width, 480);
XCTAssertEqual(outputFrame.height, 270);
RTCCVPixelBuffer *outputBuffer = outputFrame.buffer;
RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer;
XCTAssertEqual(outputBuffer.cropX, 0);
XCTAssertEqual(outputBuffer.cropY, 0);
XCTAssertEqual(outputBuffer.cropWidth, 640);
@ -300,22 +320,25 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface<webrtc::VideoFrame>
CVPixelBufferCreate(
NULL, 380, 640, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef
adaptedWidth:370
adaptedHeight:640
cropWidth:370
cropHeight:640
cropX:10
cropY:0];
RTCVideoFrame *frame =
[[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0];
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef
adaptedWidth:370
adaptedHeight:640
cropWidth:370
cropHeight:640
cropX:10
cropY:0];
RTC_OBJC_TYPE(RTCVideoFrame) *frame =
[[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer
rotation:RTCVideoRotation_0
timeStampNs:0];
XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"];
ObjCCallbackVideoSink callback_video_sink(^void(RTCVideoFrame *outputFrame) {
ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) {
XCTAssertEqual(outputFrame.width, 360);
XCTAssertEqual(outputFrame.height, 640);
RTCCVPixelBuffer *outputBuffer = outputFrame.buffer;
RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer;
XCTAssertEqual(outputBuffer.cropX, 14);
XCTAssertEqual(outputBuffer.cropY, 0);
XCTAssertEqual(outputBuffer.cropWidth, 360);
@ -341,22 +364,25 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface<webrtc::VideoFrame>
CVPixelBufferCreate(
NULL, 380, 640, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef
adaptedWidth:300
adaptedHeight:640
cropWidth:300
cropHeight:640
cropX:40
cropY:0];
RTCVideoFrame *frame =
[[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0];
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef
adaptedWidth:300
adaptedHeight:640
cropWidth:300
cropHeight:640
cropX:40
cropY:0];
RTC_OBJC_TYPE(RTCVideoFrame) *frame =
[[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer
rotation:RTCVideoRotation_0
timeStampNs:0];
XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"];
ObjCCallbackVideoSink callback_video_sink(^void(RTCVideoFrame *outputFrame) {
ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) {
XCTAssertEqual(outputFrame.width, 300);
XCTAssertEqual(outputFrame.height, 534);
RTCCVPixelBuffer *outputBuffer = outputFrame.buffer;
RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer;
XCTAssertEqual(outputBuffer.cropX, 40);
XCTAssertEqual(outputBuffer.cropY, 52);
XCTAssertEqual(outputBuffer.cropWidth, 300);
@ -379,16 +405,19 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface<webrtc::VideoFrame>
- (void)testOnCapturedFrameI420BufferNeedsAdaptation {
rtc::scoped_refptr<webrtc::I420Buffer> i420Buffer = CreateI420Gradient(720, 1280);
RTCI420Buffer *buffer = [[RTCI420Buffer alloc] initWithFrameBuffer:i420Buffer];
RTCVideoFrame *frame =
[[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0];
RTC_OBJC_TYPE(RTCI420Buffer) *buffer =
[[RTC_OBJC_TYPE(RTCI420Buffer) alloc] initWithFrameBuffer:i420Buffer];
RTC_OBJC_TYPE(RTCVideoFrame) *frame =
[[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer
rotation:RTCVideoRotation_0
timeStampNs:0];
XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"];
ObjCCallbackVideoSink callback_video_sink(^void(RTCVideoFrame *outputFrame) {
ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) {
XCTAssertEqual(outputFrame.width, 360);
XCTAssertEqual(outputFrame.height, 640);
RTCI420Buffer *outputBuffer = (RTCI420Buffer *)outputFrame.buffer;
RTC_OBJC_TYPE(RTCI420Buffer) *outputBuffer = (RTC_OBJC_TYPE(RTCI420Buffer) *)outputFrame.buffer;
double psnr = I420PSNR(*[buffer nativeI420Buffer], *[outputBuffer nativeI420Buffer]);
XCTAssertEqual(psnr, webrtc::kPerfectPSNR);
@ -408,16 +437,19 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface<webrtc::VideoFrame>
- (void)testOnCapturedFrameI420BufferNeedsCropping {
rtc::scoped_refptr<webrtc::I420Buffer> i420Buffer = CreateI420Gradient(380, 640);
RTCI420Buffer *buffer = [[RTCI420Buffer alloc] initWithFrameBuffer:i420Buffer];
RTCVideoFrame *frame =
[[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0];
RTC_OBJC_TYPE(RTCI420Buffer) *buffer =
[[RTC_OBJC_TYPE(RTCI420Buffer) alloc] initWithFrameBuffer:i420Buffer];
RTC_OBJC_TYPE(RTCVideoFrame) *frame =
[[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer
rotation:RTCVideoRotation_0
timeStampNs:0];
XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"];
ObjCCallbackVideoSink callback_video_sink(^void(RTCVideoFrame *outputFrame) {
ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) {
XCTAssertEqual(outputFrame.width, 360);
XCTAssertEqual(outputFrame.height, 640);
RTCI420Buffer *outputBuffer = (RTCI420Buffer *)outputFrame.buffer;
RTC_OBJC_TYPE(RTCI420Buffer) *outputBuffer = (RTC_OBJC_TYPE(RTCI420Buffer) *)outputFrame.buffer;
double psnr = I420PSNR(*[buffer nativeI420Buffer], *[outputBuffer nativeI420Buffer]);
XCTAssertGreaterThanOrEqual(psnr, 40);

View File

@ -21,7 +21,7 @@
std::unique_ptr<webrtc::ios_adm::AudioDeviceIOS> _audio_device;
}
@property(nonatomic) RTCAudioSession *audioSession;
@property(nonatomic) RTC_OBJC_TYPE(RTCAudioSession) * audioSession;
@end
@ -34,7 +34,7 @@
_audioDeviceModule = webrtc::CreateAudioDeviceModule();
_audio_device.reset(new webrtc::ios_adm::AudioDeviceIOS());
self.audioSession = [RTCAudioSession sharedInstance];
self.audioSession = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
NSError *error = nil;
[self.audioSession lockForConfiguration];
@ -61,21 +61,21 @@
// Verifies that the AudioDeviceIOS is_interrupted_ flag is reset correctly
// after an iOS AVAudioSessionInterruptionTypeEnded notification event.
// AudioDeviceIOS listens to RTCAudioSession interrupted notifications by:
// AudioDeviceIOS listens to RTC_OBJC_TYPE(RTCAudioSession) interrupted notifications by:
// - In AudioDeviceIOS.InitPlayOrRecord registers its audio_session_observer_
// callback with RTCAudioSession's delegate list.
// - When RTCAudioSession receives an iOS audio interrupted notification, it
// callback with RTC_OBJC_TYPE(RTCAudioSession)'s delegate list.
// - When RTC_OBJC_TYPE(RTCAudioSession) receives an iOS audio interrupted notification, it
// passes the notification to callbacks in its delegate list which sets
// AudioDeviceIOS's is_interrupted_ flag to true.
// - When AudioDeviceIOS.ShutdownPlayOrRecord is called, its
// audio_session_observer_ callback is removed from RTCAudioSessions's
// delegate list.
// So if RTCAudioSession receives an iOS end audio interruption notification,
// AudioDeviceIOS is not notified as its callback is not in RTCAudioSession's
// So if RTC_OBJC_TYPE(RTCAudioSession) receives an iOS end audio interruption notification,
// AudioDeviceIOS is not notified as its callback is not in RTC_OBJC_TYPE(RTCAudioSession)'s
// delegate list. This causes AudioDeviceIOS's is_interrupted_ flag to be in
// the wrong (true) state and the audio session will ignore audio changes.
// As RTCAudioSession keeps its own interrupted state, the fix is to initialize
// AudioDeviceIOS's is_interrupted_ flag to RTCAudioSession's isInterrupted
// As RTC_OBJC_TYPE(RTCAudioSession) keeps its own interrupted state, the fix is to initialize
// AudioDeviceIOS's is_interrupted_ flag to RTC_OBJC_TYPE(RTCAudioSession)'s isInterrupted
// flag in AudioDeviceIOS.InitPlayOrRecord.
- (void)testInterruptedAudioSession {
XCTAssertTrue(self.audioSession.isActive);

View File

@ -20,9 +20,11 @@
#import "components/audio/RTCAudioSession.h"
#import "components/audio/RTCAudioSessionConfiguration.h"
@interface RTCAudioSession (UnitTesting)
@interface RTC_OBJC_TYPE (RTCAudioSession)
(UnitTesting)
@property(nonatomic, readonly) std::vector<__weak id<RTCAudioSessionDelegate> > delegates;
@property(nonatomic,
readonly) std::vector<__weak id<RTC_OBJC_TYPE(RTCAudioSessionDelegate)> > delegates;
- (instancetype)initWithAudioSession:(id)audioSession;
@ -38,7 +40,7 @@
@synthesize outputVolume = _outputVolume;
@end
@interface RTCAudioSessionTestDelegate : NSObject <RTCAudioSessionDelegate>
@interface RTCAudioSessionTestDelegate : NSObject <RTC_OBJC_TYPE (RTCAudioSessionDelegate)>
@property (nonatomic, readonly) float outputVolume;
@ -55,31 +57,31 @@
return self;
}
- (void)audioSessionDidBeginInterruption:(RTCAudioSession *)session {
- (void)audioSessionDidBeginInterruption:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
}
- (void)audioSessionDidEndInterruption:(RTCAudioSession *)session
- (void)audioSessionDidEndInterruption:(RTC_OBJC_TYPE(RTCAudioSession) *)session
shouldResumeSession:(BOOL)shouldResumeSession {
}
- (void)audioSessionDidChangeRoute:(RTCAudioSession *)session
reason:(AVAudioSessionRouteChangeReason)reason
previousRoute:(AVAudioSessionRouteDescription *)previousRoute {
- (void)audioSessionDidChangeRoute:(RTC_OBJC_TYPE(RTCAudioSession) *)session
reason:(AVAudioSessionRouteChangeReason)reason
previousRoute:(AVAudioSessionRouteDescription *)previousRoute {
}
- (void)audioSessionMediaServerTerminated:(RTCAudioSession *)session {
- (void)audioSessionMediaServerTerminated:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
}
- (void)audioSessionMediaServerReset:(RTCAudioSession *)session {
- (void)audioSessionMediaServerReset:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
}
- (void)audioSessionShouldConfigure:(RTCAudioSession *)session {
- (void)audioSessionShouldConfigure:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
}
- (void)audioSessionShouldUnconfigure:(RTCAudioSession *)session {
- (void)audioSessionShouldUnconfigure:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
}
- (void)audioSession:(RTCAudioSession *)audioSession
- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession
didChangeOutputVolume:(float)outputVolume {
_outputVolume = outputVolume;
}
@ -95,14 +97,14 @@
- (instancetype)init {
if (self = [super init]) {
RTCAudioSession *session = [RTCAudioSession sharedInstance];
RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
[session addDelegate:self];
}
return self;
}
- (void)dealloc {
RTCAudioSession *session = [RTCAudioSession sharedInstance];
RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
[session removeDelegate:self];
}
@ -118,7 +120,7 @@
@implementation RTCAudioSessionTest
- (void)testLockForConfiguration {
RTCAudioSession *session = [RTCAudioSession sharedInstance];
RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
for (size_t i = 0; i < 2; i++) {
[session lockForConfiguration];
@ -132,7 +134,7 @@
}
- (void)testAddAndRemoveDelegates {
RTCAudioSession *session = [RTCAudioSession sharedInstance];
RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
NSMutableArray *delegates = [NSMutableArray array];
const size_t count = 5;
for (size_t i = 0; i < count; ++i) {
@ -151,7 +153,7 @@
}
- (void)testPushDelegate {
RTCAudioSession *session = [RTCAudioSession sharedInstance];
RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
NSMutableArray *delegates = [NSMutableArray array];
const size_t count = 2;
for (size_t i = 0; i < count; ++i) {
@ -184,7 +186,7 @@
// Tests that delegates added to the audio session properly zero out. This is
// checking an implementation detail (that vectors of __weak work as expected).
- (void)testZeroingWeakDelegate {
RTCAudioSession *session = [RTCAudioSession sharedInstance];
RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
@autoreleasepool {
// Add a delegate to the session. There should be one delegate at this
// point.
@ -212,12 +214,12 @@
[[RTCTestRemoveOnDeallocDelegate alloc] init];
EXPECT_TRUE(delegate);
}
RTCAudioSession *session = [RTCAudioSession sharedInstance];
RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
EXPECT_EQ(0u, session.delegates.size());
}
- (void)testAudioSessionActivation {
RTCAudioSession *audioSession = [RTCAudioSession sharedInstance];
RTC_OBJC_TYPE(RTCAudioSession) *audioSession = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
EXPECT_EQ(0, audioSession.activationCount);
[audioSession audioSessionDidActivate:[AVAudioSession sharedInstance]];
EXPECT_EQ(1, audioSession.activationCount);
@ -255,10 +257,10 @@ OCMLocation *OCMMakeLocation(id testCase, const char *fileCString, int line){
setActive:YES withOptions:0 error:((NSError __autoreleasing **)[OCMArg anyPointer])]).
andDo(setActiveBlock);
id mockAudioSession = OCMPartialMock([RTCAudioSession sharedInstance]);
id mockAudioSession = OCMPartialMock([RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]);
OCMStub([mockAudioSession session]).andReturn(mockAVAudioSession);
RTCAudioSession *audioSession = mockAudioSession;
RTC_OBJC_TYPE(RTCAudioSession) *audioSession = mockAudioSession;
EXPECT_EQ(0, audioSession.activationCount);
[audioSession lockForConfiguration];
EXPECT_TRUE([audioSession checkLock:nil]);
@ -286,7 +288,8 @@ OCMLocation *OCMMakeLocation(id testCase, const char *fileCString, int line){
- (void)testAudioVolumeDidNotify {
MockAVAudioSession *mockAVAudioSession = [[MockAVAudioSession alloc] init];
RTCAudioSession *session = [[RTCAudioSession alloc] initWithAudioSession:mockAVAudioSession];
RTC_OBJC_TYPE(RTCAudioSession) *session =
[[RTC_OBJC_TYPE(RTCAudioSession) alloc] initWithAudioSession:mockAVAudioSession];
RTCAudioSessionTestDelegate *delegate =
[[RTCAudioSessionTestDelegate alloc] init];
[session addDelegate:delegate];
@ -304,8 +307,8 @@ namespace webrtc {
class AudioSessionTest : public ::testing::Test {
protected:
void TearDown() override {
RTCAudioSession *session = [RTCAudioSession sharedInstance];
for (id<RTCAudioSessionDelegate> delegate : session.delegates) {
RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
for (id<RTC_OBJC_TYPE(RTCAudioSessionDelegate)> delegate : session.delegates) {
[session removeDelegate:delegate];
}
}

View File

@ -31,7 +31,8 @@
CVPixelBufferRef pixelBufferRef = NULL;
CVPixelBufferCreate(
NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef];
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
XCTAssertFalse([buffer requiresCropping]);
@ -42,13 +43,14 @@
CVPixelBufferRef pixelBufferRef = NULL;
CVPixelBufferCreate(
NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
RTCCVPixelBuffer *croppedBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef
adaptedWidth:720
adaptedHeight:1280
cropWidth:360
cropHeight:640
cropX:100
cropY:100];
RTC_OBJC_TYPE(RTCCVPixelBuffer) *croppedBuffer =
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef
adaptedWidth:720
adaptedHeight:1280
cropWidth:360
cropHeight:640
cropX:100
cropY:100];
XCTAssertTrue([croppedBuffer requiresCropping]);
@ -60,7 +62,8 @@
CVPixelBufferCreate(
NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef];
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
XCTAssertFalse([buffer requiresScalingToWidth:720 height:1280]);
CVBufferRelease(pixelBufferRef);
@ -71,7 +74,8 @@
CVPixelBufferCreate(
NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef];
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
XCTAssertTrue([buffer requiresScalingToWidth:360 height:640]);
CVBufferRelease(pixelBufferRef);
@ -82,13 +86,14 @@
CVPixelBufferCreate(
NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef
adaptedWidth:720
adaptedHeight:1280
cropWidth:360
cropHeight:640
cropX:100
cropY:100];
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef
adaptedWidth:720
adaptedHeight:1280
cropWidth:360
cropHeight:640
cropX:100
cropY:100];
XCTAssertFalse([buffer requiresScalingToWidth:360 height:640]);
CVBufferRelease(pixelBufferRef);
@ -99,7 +104,8 @@
CVPixelBufferCreate(
NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef];
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
XCTAssertEqual([buffer bufferSizeForCroppingAndScalingToWidth:360 height:640], 576000);
CVBufferRelease(pixelBufferRef);
@ -109,7 +115,8 @@
CVPixelBufferRef pixelBufferRef = NULL;
CVPixelBufferCreate(NULL, 720, 1280, kCVPixelFormatType_32BGRA, NULL, &pixelBufferRef);
RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef];
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
XCTAssertEqual([buffer bufferSizeForCroppingAndScalingToWidth:360 height:640], 0);
CVBufferRelease(pixelBufferRef);
@ -198,7 +205,8 @@
rtc::scoped_refptr<webrtc::I420Buffer> i420Buffer = CreateI420Gradient(720, 1280);
CopyI420BufferToCVPixelBuffer(i420Buffer, pixelBufferRef);
RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef];
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
XCTAssertEqual(buffer.width, 720);
XCTAssertEqual(buffer.height, 1280);
@ -218,14 +226,14 @@
[buffer cropAndScaleTo:outputPixelBufferRef withTempBuffer:frameScaleBuffer.data()];
RTCCVPixelBuffer *scaledBuffer =
[[RTCCVPixelBuffer alloc] initWithPixelBuffer:outputPixelBufferRef];
RTC_OBJC_TYPE(RTCCVPixelBuffer) *scaledBuffer =
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:outputPixelBufferRef];
XCTAssertEqual(scaledBuffer.width, outputSize.width);
XCTAssertEqual(scaledBuffer.height, outputSize.height);
if (outputSize.width > 0 && outputSize.height > 0) {
RTCI420Buffer *originalBufferI420 = [buffer toI420];
RTCI420Buffer *scaledBufferI420 = [scaledBuffer toI420];
RTC_OBJC_TYPE(RTCI420Buffer) *originalBufferI420 = [buffer toI420];
RTC_OBJC_TYPE(RTCI420Buffer) *scaledBufferI420 = [scaledBuffer toI420];
double psnr =
I420PSNR(*[originalBufferI420 nativeI420Buffer], *[scaledBufferI420 nativeI420Buffer]);
XCTAssertEqual(psnr, webrtc::kPerfectPSNR);
@ -244,14 +252,14 @@
DrawGradientInRGBPixelBuffer(pixelBufferRef);
RTCCVPixelBuffer *buffer =
[[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef
adaptedWidth:CVPixelBufferGetWidth(pixelBufferRef)
adaptedHeight:CVPixelBufferGetHeight(pixelBufferRef)
cropWidth:CVPixelBufferGetWidth(pixelBufferRef) - cropX
cropHeight:CVPixelBufferGetHeight(pixelBufferRef) - cropY
cropX:cropX
cropY:cropY];
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc]
initWithPixelBuffer:pixelBufferRef
adaptedWidth:CVPixelBufferGetWidth(pixelBufferRef)
adaptedHeight:CVPixelBufferGetHeight(pixelBufferRef)
cropWidth:CVPixelBufferGetWidth(pixelBufferRef) - cropX
cropHeight:CVPixelBufferGetHeight(pixelBufferRef) - cropY
cropX:cropX
cropY:cropY];
XCTAssertEqual(buffer.width, 720);
XCTAssertEqual(buffer.height, 1280);
@ -260,13 +268,13 @@
CVPixelBufferCreate(NULL, 360, 640, pixelFormat, NULL, &outputPixelBufferRef);
[buffer cropAndScaleTo:outputPixelBufferRef withTempBuffer:NULL];
RTCCVPixelBuffer *scaledBuffer =
[[RTCCVPixelBuffer alloc] initWithPixelBuffer:outputPixelBufferRef];
RTC_OBJC_TYPE(RTCCVPixelBuffer) *scaledBuffer =
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:outputPixelBufferRef];
XCTAssertEqual(scaledBuffer.width, 360);
XCTAssertEqual(scaledBuffer.height, 640);
RTCI420Buffer *originalBufferI420 = [buffer toI420];
RTCI420Buffer *scaledBufferI420 = [scaledBuffer toI420];
RTC_OBJC_TYPE(RTCI420Buffer) *originalBufferI420 = [buffer toI420];
RTC_OBJC_TYPE(RTCI420Buffer) *scaledBufferI420 = [scaledBuffer toI420];
double psnr =
I420PSNR(*[originalBufferI420 nativeI420Buffer], *[scaledBufferI420 nativeI420Buffer]);
XCTAssertEqual(psnr, webrtc::kPerfectPSNR);
@ -282,8 +290,9 @@
CopyI420BufferToCVPixelBuffer(i420Buffer, pixelBufferRef);
RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef];
RTCI420Buffer *fromCVPixelBuffer = [buffer toI420];
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
RTC_OBJC_TYPE(RTCI420Buffer) *fromCVPixelBuffer = [buffer toI420];
double psnr = I420PSNR(*i420Buffer, *[fromCVPixelBuffer nativeI420Buffer]);
double target = webrtc::kPerfectPSNR;

View File

@ -14,7 +14,7 @@
@interface RTCCallbackLoggerTests : XCTestCase
@property(nonatomic, strong) RTCCallbackLogger *logger;
@property(nonatomic, strong) RTC_OBJC_TYPE(RTCCallbackLogger) * logger;
@end
@ -23,7 +23,7 @@
@synthesize logger;
- (void)setUp {
self.logger = [[RTCCallbackLogger alloc] init];
self.logger = [[RTC_OBJC_TYPE(RTCCallbackLogger) alloc] init];
}
- (void)tearDown {

View File

@ -59,9 +59,11 @@ CMSampleBufferRef createTestSampleBufferRef() {
}
#endif
@interface RTCCameraVideoCapturer (Tests)<AVCaptureVideoDataOutputSampleBufferDelegate>
- (instancetype)initWithDelegate:(__weak id<RTCVideoCapturerDelegate>)delegate
captureSession:(AVCaptureSession *)captureSession;
@interface RTC_OBJC_TYPE (RTCCameraVideoCapturer)
(Tests)<AVCaptureVideoDataOutputSampleBufferDelegate> -
(instancetype)initWithDelegate
: (__weak id<RTC_OBJC_TYPE(RTCVideoCapturerDelegate)>)delegate captureSession
: (AVCaptureSession *)captureSession;
@end
@interface RTCCameraVideoCapturerTests : NSObject
@ -69,7 +71,7 @@ CMSampleBufferRef createTestSampleBufferRef() {
@property(nonatomic, strong) id deviceMock;
@property(nonatomic, strong) id captureConnectionMock;
@property(nonatomic, strong) id captureSessionMock;
@property(nonatomic, strong) RTCCameraVideoCapturer *capturer;
@property(nonatomic, strong) RTC_OBJC_TYPE(RTCCameraVideoCapturer) * capturer;
@end
@implementation RTCCameraVideoCapturerTests
@ -80,9 +82,10 @@ CMSampleBufferRef createTestSampleBufferRef() {
@synthesize capturer = _capturer;
- (void)setup {
self.delegateMock = OCMProtocolMock(@protocol(RTCVideoCapturerDelegate));
self.delegateMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoCapturerDelegate)));
self.captureConnectionMock = OCMClassMock([AVCaptureConnection class]);
self.capturer = [[RTCCameraVideoCapturer alloc] initWithDelegate:self.delegateMock];
self.capturer =
[[RTC_OBJC_TYPE(RTCCameraVideoCapturer) alloc] initWithDelegate:self.delegateMock];
self.deviceMock = [self createDeviceMock];
}
@ -94,10 +97,11 @@ CMSampleBufferRef createTestSampleBufferRef() {
OCMStub([self.captureSessionMock addOutput:[OCMArg any]]);
OCMStub([self.captureSessionMock beginConfiguration]);
OCMStub([self.captureSessionMock commitConfiguration]);
self.delegateMock = OCMProtocolMock(@protocol(RTCVideoCapturerDelegate));
self.delegateMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoCapturerDelegate)));
self.captureConnectionMock = OCMClassMock([AVCaptureConnection class]);
self.capturer = [[RTCCameraVideoCapturer alloc] initWithDelegate:self.delegateMock
captureSession:self.captureSessionMock];
self.capturer =
[[RTC_OBJC_TYPE(RTCCameraVideoCapturer) alloc] initWithDelegate:self.delegateMock
captureSession:self.captureSessionMock];
self.deviceMock = [self createDeviceMock];
}
@ -160,7 +164,8 @@ CMSampleBufferRef createTestSampleBufferRef() {
OCMStub([self.deviceMock formats]).andReturn(formats);
// when
NSArray *supportedFormats = [RTCCameraVideoCapturer supportedFormatsForDevice:self.deviceMock];
NSArray *supportedFormats =
[RTC_OBJC_TYPE(RTCCameraVideoCapturer) supportedFormatsForDevice:self.deviceMock];
// then
EXPECT_EQ(supportedFormats.count, 3u);
@ -199,7 +204,8 @@ CMSampleBufferRef createTestSampleBufferRef() {
// then
[[self.delegateMock expect] capturer:self.capturer
didCaptureVideoFrame:[OCMArg checkWithBlock:^BOOL(RTCVideoFrame *expectedFrame) {
didCaptureVideoFrame:[OCMArg checkWithBlock:^BOOL(RTC_OBJC_TYPE(RTCVideoFrame) *
expectedFrame) {
EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_270);
return YES;
}]];
@ -240,22 +246,23 @@ CMSampleBufferRef createTestSampleBufferRef() {
CMSampleBufferRef sampleBuffer = createTestSampleBufferRef();
[[self.delegateMock expect] capturer:self.capturer
didCaptureVideoFrame:[OCMArg checkWithBlock:^BOOL(RTCVideoFrame *expectedFrame) {
if (camera == AVCaptureDevicePositionFront) {
if (deviceOrientation == UIDeviceOrientationLandscapeLeft) {
EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_180);
} else if (deviceOrientation == UIDeviceOrientationLandscapeRight) {
EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_0);
}
} else if (camera == AVCaptureDevicePositionBack) {
if (deviceOrientation == UIDeviceOrientationLandscapeLeft) {
EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_0);
} else if (deviceOrientation == UIDeviceOrientationLandscapeRight) {
EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_180);
}
}
return YES;
}]];
didCaptureVideoFrame:[OCMArg checkWithBlock:^BOOL(RTC_OBJC_TYPE(RTCVideoFrame) *
expectedFrame) {
if (camera == AVCaptureDevicePositionFront) {
if (deviceOrientation == UIDeviceOrientationLandscapeLeft) {
EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_180);
} else if (deviceOrientation == UIDeviceOrientationLandscapeRight) {
EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_0);
}
} else if (camera == AVCaptureDevicePositionBack) {
if (deviceOrientation == UIDeviceOrientationLandscapeLeft) {
EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_0);
} else if (deviceOrientation == UIDeviceOrientationLandscapeRight) {
EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_180);
}
}
return YES;
}]];
NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
[center postNotificationName:UIDeviceOrientationDidChangeNotification object:nil];
@ -298,12 +305,13 @@ CMSampleBufferRef createTestSampleBufferRef() {
CMSampleBufferRef sampleBuffer = createTestSampleBufferRef();
[[self.delegateMock expect] capturer:self.capturer
didCaptureVideoFrame:[OCMArg checkWithBlock:^BOOL(RTCVideoFrame *expectedFrame) {
// Front camera and landscape left should return 180. But the frame says its from the back
// camera, so rotation should be 0.
EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_0);
return YES;
}]];
didCaptureVideoFrame:[OCMArg checkWithBlock:^BOOL(RTC_OBJC_TYPE(RTCVideoFrame) *
expectedFrame) {
// Front camera and landscape left should return 180. But the frame says its
// from the back camera, so rotation should be 0.
EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_0);
return YES;
}]];
NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
[center postNotificationName:UIDeviceOrientationDidChangeNotification object:nil];

View File

@ -29,38 +29,39 @@
@implementation RTCCertificateTest
- (void)testCertificateIsUsedInConfig {
RTCConfiguration *originalConfig = [[RTCConfiguration alloc] init];
RTC_OBJC_TYPE(RTCConfiguration) *originalConfig = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
NSArray *urlStrings = @[ @"stun:stun1.example.net" ];
RTCIceServer *server = [[RTCIceServer alloc] initWithURLStrings:urlStrings];
RTC_OBJC_TYPE(RTCIceServer) *server =
[[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:urlStrings];
originalConfig.iceServers = @[ server ];
// Generate a new certificate.
RTCCertificate *originalCertificate = [RTCCertificate generateCertificateWithParams:@{
@"expires" : @100000,
@"name" : @"RSASSA-PKCS1-v1_5"
}];
RTC_OBJC_TYPE(RTCCertificate) *originalCertificate = [RTC_OBJC_TYPE(RTCCertificate)
generateCertificateWithParams:@{@"expires" : @100000, @"name" : @"RSASSA-PKCS1-v1_5"}];
// Store certificate in configuration.
originalConfig.certificate = originalCertificate;
RTCMediaConstraints *contraints =
[[RTCMediaConstraints alloc] initWithMandatoryConstraints:@{} optionalConstraints:nil];
RTCPeerConnectionFactory *factory = [[RTCPeerConnectionFactory alloc] init];
RTC_OBJC_TYPE(RTCMediaConstraints) *contraints =
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{}
optionalConstraints:nil];
RTC_OBJC_TYPE(RTCPeerConnectionFactory) *factory =
[[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
// Create PeerConnection with this certificate.
RTCPeerConnection *peerConnection =
RTC_OBJC_TYPE(RTCPeerConnection) *peerConnection =
[factory peerConnectionWithConfiguration:originalConfig constraints:contraints delegate:nil];
// Retrieve certificate from the configuration.
RTCConfiguration *retrievedConfig = peerConnection.configuration;
RTC_OBJC_TYPE(RTCConfiguration) *retrievedConfig = peerConnection.configuration;
// Extract PEM strings from original certificate.
std::string originalPrivateKeyField = [[originalCertificate private_key] UTF8String];
std::string originalCertificateField = [[originalCertificate certificate] UTF8String];
// Extract PEM strings from certificate retrieved from configuration.
RTCCertificate *retrievedCertificate = retrievedConfig.certificate;
RTC_OBJC_TYPE(RTCCertificate) *retrievedCertificate = retrievedConfig.certificate;
std::string retrievedPrivateKeyField = [[retrievedCertificate private_key] UTF8String];
std::string retrievedCertificateField = [[retrievedCertificate certificate] UTF8String];

View File

@ -28,9 +28,10 @@
- (void)testConversionToNativeConfiguration {
NSArray *urlStrings = @[ @"stun:stun1.example.net" ];
RTCIceServer *server = [[RTCIceServer alloc] initWithURLStrings:urlStrings];
RTC_OBJC_TYPE(RTCIceServer) *server =
[[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:urlStrings];
RTCConfiguration *config = [[RTCConfiguration alloc] init];
RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
config.iceServers = @[ server ];
config.iceTransportPolicy = RTCIceTransportPolicyRelay;
config.bundlePolicy = RTCBundlePolicyMaxBundle;
@ -47,10 +48,11 @@
config.continualGatheringPolicy =
RTCContinualGatheringPolicyGatherContinually;
config.shouldPruneTurnPorts = YES;
config.cryptoOptions = [[RTCCryptoOptions alloc] initWithSrtpEnableGcmCryptoSuites:YES
srtpEnableAes128Sha1_32CryptoCipher:YES
srtpEnableEncryptedRtpHeaderExtensions:YES
sframeRequireFrameEncryption:YES];
config.cryptoOptions =
[[RTC_OBJC_TYPE(RTCCryptoOptions) alloc] initWithSrtpEnableGcmCryptoSuites:YES
srtpEnableAes128Sha1_32CryptoCipher:YES
srtpEnableEncryptedRtpHeaderExtensions:YES
sframeRequireFrameEncryption:YES];
config.rtcpAudioReportIntervalMs = 2500;
config.rtcpVideoReportIntervalMs = 3750;
@ -89,9 +91,10 @@
- (void)testNativeConversionToConfiguration {
NSArray *urlStrings = @[ @"stun:stun1.example.net" ];
RTCIceServer *server = [[RTCIceServer alloc] initWithURLStrings:urlStrings];
RTC_OBJC_TYPE(RTCIceServer) *server =
[[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:urlStrings];
RTCConfiguration *config = [[RTCConfiguration alloc] init];
RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
config.iceServers = @[ server ];
config.iceTransportPolicy = RTCIceTransportPolicyRelay;
config.bundlePolicy = RTCBundlePolicyMaxBundle;
@ -108,20 +111,21 @@
config.continualGatheringPolicy =
RTCContinualGatheringPolicyGatherContinually;
config.shouldPruneTurnPorts = YES;
config.cryptoOptions = [[RTCCryptoOptions alloc] initWithSrtpEnableGcmCryptoSuites:YES
srtpEnableAes128Sha1_32CryptoCipher:NO
srtpEnableEncryptedRtpHeaderExtensions:NO
sframeRequireFrameEncryption:NO];
config.cryptoOptions =
[[RTC_OBJC_TYPE(RTCCryptoOptions) alloc] initWithSrtpEnableGcmCryptoSuites:YES
srtpEnableAes128Sha1_32CryptoCipher:NO
srtpEnableEncryptedRtpHeaderExtensions:NO
sframeRequireFrameEncryption:NO];
config.rtcpAudioReportIntervalMs = 1500;
config.rtcpVideoReportIntervalMs = 2150;
webrtc::PeerConnectionInterface::RTCConfiguration *nativeConfig =
[config createNativeConfiguration];
RTCConfiguration *newConfig = [[RTCConfiguration alloc]
initWithNativeConfiguration:*nativeConfig];
RTC_OBJC_TYPE(RTCConfiguration) *newConfig =
[[RTC_OBJC_TYPE(RTCConfiguration) alloc] initWithNativeConfiguration:*nativeConfig];
EXPECT_EQ([config.iceServers count], newConfig.iceServers.count);
RTCIceServer *newServer = newConfig.iceServers[0];
RTCIceServer *origServer = config.iceServers[0];
RTC_OBJC_TYPE(RTCIceServer) *newServer = newConfig.iceServers[0];
RTC_OBJC_TYPE(RTCIceServer) *origServer = config.iceServers[0];
EXPECT_EQ(origServer.urlStrings.count, server.urlStrings.count);
std::string origUrl = origServer.urlStrings.firstObject.UTF8String;
std::string url = newServer.urlStrings.firstObject.UTF8String;
@ -152,7 +156,7 @@
}
- (void)testDefaultValues {
RTCConfiguration *config = [[RTCConfiguration alloc] init];
RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
EXPECT_EQ(config.cryptoOptions, nil);
}

View File

@ -30,8 +30,8 @@
int channelId = 4;
NSString *protocol = @"protocol";
RTCDataChannelConfiguration *dataChannelConfig =
[[RTCDataChannelConfiguration alloc] init];
RTC_OBJC_TYPE(RTCDataChannelConfiguration) *dataChannelConfig =
[[RTC_OBJC_TYPE(RTCDataChannelConfiguration) alloc] init];
dataChannelConfig.isOrdered = isOrdered;
dataChannelConfig.maxPacketLifeTime = maxPacketLifeTime;
dataChannelConfig.maxRetransmits = maxRetransmits;
@ -50,7 +50,7 @@
@end
TEST(RTCDataChannelConfiguration, NativeDataChannelInitConversionTest) {
TEST(RTC_OBJC_TYPE(RTCDataChannelConfiguration), NativeDataChannelInitConversionTest) {
@autoreleasepool {
RTCDataChannelConfigurationTest *test =
[[RTCDataChannelConfigurationTest alloc] init];

View File

@ -22,15 +22,15 @@
webrtc::EncodedImage encoded_image;
encoded_image.SetEncodedData(encoded_data);
RTCEncodedImage *encodedImage =
[[RTCEncodedImage alloc] initWithNativeEncodedImage:encoded_image];
RTC_OBJC_TYPE(RTCEncodedImage) *encodedImage =
[[RTC_OBJC_TYPE(RTCEncodedImage) alloc] initWithNativeEncodedImage:encoded_image];
XCTAssertEqual([encodedImage nativeEncodedImage].GetEncodedData(), encoded_data);
}
- (void)testInitWithNSData {
NSData *bufferData = [NSData data];
RTCEncodedImage *encodedImage = [[RTCEncodedImage alloc] init];
RTC_OBJC_TYPE(RTCEncodedImage) *encodedImage = [[RTC_OBJC_TYPE(RTCEncodedImage) alloc] init];
encodedImage.buffer = bufferData;
webrtc::EncodedImage result_encoded_image = [encodedImage nativeEncodedImage];
@ -39,12 +39,13 @@
}
- (void)testRetainsNativeEncodedImage {
RTCEncodedImage *encodedImage;
RTC_OBJC_TYPE(RTCEncodedImage) * encodedImage;
{
const auto encoded_data = webrtc::EncodedImageBuffer::Create();
webrtc::EncodedImage encoded_image;
encoded_image.SetEncodedData(encoded_data);
encodedImage = [[RTCEncodedImage alloc] initWithNativeEncodedImage:encoded_image];
encodedImage =
[[RTC_OBJC_TYPE(RTCEncodedImage) alloc] initWithNativeEncodedImage:encoded_image];
}
webrtc::EncodedImage result_encoded_image = [encodedImage nativeEncodedImage];
XCTAssertTrue(result_encoded_image.GetEncodedData() != nullptr);

View File

@ -17,7 +17,7 @@
NSString *const kTestFileName = @"foreman.mp4";
static const int kTestTimeoutMs = 5 * 1000; // 5secs.
@interface MockCapturerDelegate : NSObject <RTCVideoCapturerDelegate>
@interface MockCapturerDelegate : NSObject <RTC_OBJC_TYPE (RTCVideoCapturerDelegate)>
@property(nonatomic, assign) NSInteger capturedFramesCount;
@ -26,7 +26,8 @@ static const int kTestTimeoutMs = 5 * 1000; // 5secs.
@implementation MockCapturerDelegate
@synthesize capturedFramesCount = _capturedFramesCount;
- (void)capturer:(RTCVideoCapturer *)capturer didCaptureVideoFrame:(RTCVideoFrame *)frame {
- (void)capturer:(RTC_OBJC_TYPE(RTCVideoCapturer) *)capturer
didCaptureVideoFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
self.capturedFramesCount++;
}
@ -35,7 +36,7 @@ static const int kTestTimeoutMs = 5 * 1000; // 5secs.
NS_CLASS_AVAILABLE_IOS(10)
@interface RTCFileVideoCapturerTests : XCTestCase
@property(nonatomic, strong) RTCFileVideoCapturer *capturer;
@property(nonatomic, strong) RTC_OBJC_TYPE(RTCFileVideoCapturer) * capturer;
@property(nonatomic, strong) MockCapturerDelegate *mockDelegate;
@end
@ -46,7 +47,7 @@ NS_CLASS_AVAILABLE_IOS(10)
- (void)setUp {
self.mockDelegate = [[MockCapturerDelegate alloc] init];
self.capturer = [[RTCFileVideoCapturer alloc] initWithDelegate:self.mockDelegate];
self.capturer = [[RTC_OBJC_TYPE(RTCFileVideoCapturer) alloc] initWithDelegate:self.mockDelegate];
}
- (void)tearDown {

View File

@ -22,24 +22,26 @@ static NSString *level31ConstrainedBaseline = @"42e01f";
@implementation RTCH264ProfileLevelIdTests
- (void)testInitWithString {
RTCH264ProfileLevelId *profileLevelId =
[[RTCH264ProfileLevelId alloc] initWithHexString:level31ConstrainedHigh];
RTC_OBJC_TYPE(RTCH264ProfileLevelId) *profileLevelId =
[[RTC_OBJC_TYPE(RTCH264ProfileLevelId) alloc] initWithHexString:level31ConstrainedHigh];
XCTAssertEqual(profileLevelId.profile, RTCH264ProfileConstrainedHigh);
XCTAssertEqual(profileLevelId.level, RTCH264Level3_1);
profileLevelId = [[RTCH264ProfileLevelId alloc] initWithHexString:level31ConstrainedBaseline];
profileLevelId =
[[RTC_OBJC_TYPE(RTCH264ProfileLevelId) alloc] initWithHexString:level31ConstrainedBaseline];
XCTAssertEqual(profileLevelId.profile, RTCH264ProfileConstrainedBaseline);
XCTAssertEqual(profileLevelId.level, RTCH264Level3_1);
}
- (void)testInitWithProfileAndLevel {
RTCH264ProfileLevelId *profileLevelId =
[[RTCH264ProfileLevelId alloc] initWithProfile:RTCH264ProfileConstrainedHigh
level:RTCH264Level3_1];
RTC_OBJC_TYPE(RTCH264ProfileLevelId) *profileLevelId =
[[RTC_OBJC_TYPE(RTCH264ProfileLevelId) alloc] initWithProfile:RTCH264ProfileConstrainedHigh
level:RTCH264Level3_1];
XCTAssertEqualObjects(profileLevelId.hexString, level31ConstrainedHigh);
profileLevelId = [[RTCH264ProfileLevelId alloc] initWithProfile:RTCH264ProfileConstrainedBaseline
level:RTCH264Level3_1];
profileLevelId = [[RTC_OBJC_TYPE(RTCH264ProfileLevelId) alloc]
initWithProfile:RTCH264ProfileConstrainedBaseline
level:RTCH264Level3_1];
XCTAssertEqualObjects(profileLevelId.hexString, level31ConstrainedBaseline);
}

View File

@ -30,9 +30,8 @@
"fdff:2642:12a6:fe38:c001:beda:fcf9:51aa "
"59052 typ host generation 0";
RTCIceCandidate *candidate = [[RTCIceCandidate alloc] initWithSdp:sdp
sdpMLineIndex:0
sdpMid:@"audio"];
RTC_OBJC_TYPE(RTCIceCandidate) *candidate =
[[RTC_OBJC_TYPE(RTCIceCandidate) alloc] initWithSdp:sdp sdpMLineIndex:0 sdpMid:@"audio"];
std::unique_ptr<webrtc::IceCandidateInterface> nativeCandidate =
candidate.nativeCandidate;
@ -51,8 +50,8 @@
webrtc::IceCandidateInterface *nativeCandidate =
webrtc::CreateIceCandidate("audio", 0, sdp, nullptr);
RTCIceCandidate *iceCandidate =
[[RTCIceCandidate alloc] initWithNativeCandidate:nativeCandidate];
RTC_OBJC_TYPE(RTCIceCandidate) *iceCandidate =
[[RTC_OBJC_TYPE(RTCIceCandidate) alloc] initWithNativeCandidate:nativeCandidate];
EXPECT_TRUE([@"audio" isEqualToString:iceCandidate.sdpMid]);
EXPECT_EQ(0, iceCandidate.sdpMLineIndex);

View File

@ -28,8 +28,8 @@
@implementation RTCIceServerTest
- (void)testOneURLServer {
RTCIceServer *server = [[RTCIceServer alloc] initWithURLStrings:@[
@"stun:stun1.example.net" ]];
RTC_OBJC_TYPE(RTCIceServer) *server =
[[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:@[ @"stun:stun1.example.net" ]];
webrtc::PeerConnectionInterface::IceServer iceStruct = server.nativeServer;
EXPECT_EQ(1u, iceStruct.urls.size());
@ -39,8 +39,8 @@
}
- (void)testTwoURLServer {
RTCIceServer *server = [[RTCIceServer alloc] initWithURLStrings:@[
@"turn1:turn1.example.net", @"turn2:turn2.example.net" ]];
RTC_OBJC_TYPE(RTCIceServer) *server = [[RTC_OBJC_TYPE(RTCIceServer) alloc]
initWithURLStrings:@[ @"turn1:turn1.example.net", @"turn2:turn2.example.net" ]];
webrtc::PeerConnectionInterface::IceServer iceStruct = server.nativeServer;
EXPECT_EQ(2u, iceStruct.urls.size());
@ -51,10 +51,10 @@
}
- (void)testPasswordCredential {
RTCIceServer *server = [[RTCIceServer alloc]
initWithURLStrings:@[ @"turn1:turn1.example.net" ]
username:@"username"
credential:@"credential"];
RTC_OBJC_TYPE(RTCIceServer) *server =
[[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:@[ @"turn1:turn1.example.net" ]
username:@"username"
credential:@"credential"];
webrtc::PeerConnectionInterface::IceServer iceStruct = server.nativeServer;
EXPECT_EQ(1u, iceStruct.urls.size());
EXPECT_EQ("turn1:turn1.example.net", iceStruct.urls.front());
@ -63,11 +63,12 @@
}
- (void)testHostname {
RTCIceServer *server = [[RTCIceServer alloc] initWithURLStrings:@[ @"turn1:turn1.example.net" ]
username:@"username"
credential:@"credential"
tlsCertPolicy:RTCTlsCertPolicySecure
hostname:@"hostname"];
RTC_OBJC_TYPE(RTCIceServer) *server =
[[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:@[ @"turn1:turn1.example.net" ]
username:@"username"
credential:@"credential"
tlsCertPolicy:RTCTlsCertPolicySecure
hostname:@"hostname"];
webrtc::PeerConnectionInterface::IceServer iceStruct = server.nativeServer;
EXPECT_EQ(1u, iceStruct.urls.size());
EXPECT_EQ("turn1:turn1.example.net", iceStruct.urls.front());
@ -77,12 +78,13 @@
}
- (void)testTlsAlpnProtocols {
RTCIceServer *server = [[RTCIceServer alloc] initWithURLStrings:@[ @"turn1:turn1.example.net" ]
username:@"username"
credential:@"credential"
tlsCertPolicy:RTCTlsCertPolicySecure
hostname:@"hostname"
tlsAlpnProtocols:@[ @"proto1", @"proto2" ]];
RTC_OBJC_TYPE(RTCIceServer) *server =
[[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:@[ @"turn1:turn1.example.net" ]
username:@"username"
credential:@"credential"
tlsCertPolicy:RTCTlsCertPolicySecure
hostname:@"hostname"
tlsAlpnProtocols:@[ @"proto1", @"proto2" ]];
webrtc::PeerConnectionInterface::IceServer iceStruct = server.nativeServer;
EXPECT_EQ(1u, iceStruct.urls.size());
EXPECT_EQ("turn1:turn1.example.net", iceStruct.urls.front());
@ -93,13 +95,14 @@
}
- (void)testTlsEllipticCurves {
RTCIceServer *server = [[RTCIceServer alloc] initWithURLStrings:@[ @"turn1:turn1.example.net" ]
username:@"username"
credential:@"credential"
tlsCertPolicy:RTCTlsCertPolicySecure
hostname:@"hostname"
tlsAlpnProtocols:@[ @"proto1", @"proto2" ]
tlsEllipticCurves:@[ @"curve1", @"curve2" ]];
RTC_OBJC_TYPE(RTCIceServer) *server =
[[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:@[ @"turn1:turn1.example.net" ]
username:@"username"
credential:@"credential"
tlsCertPolicy:RTCTlsCertPolicySecure
hostname:@"hostname"
tlsAlpnProtocols:@[ @"proto1", @"proto2" ]
tlsEllipticCurves:@[ @"curve1", @"curve2" ]];
webrtc::PeerConnectionInterface::IceServer iceStruct = server.nativeServer;
EXPECT_EQ(1u, iceStruct.urls.size());
EXPECT_EQ("turn1:turn1.example.net", iceStruct.urls.front());
@ -121,8 +124,8 @@
nativeServer.tls_elliptic_curves.push_back("curve1");
nativeServer.tls_elliptic_curves.push_back("curve2");
RTCIceServer *iceServer =
[[RTCIceServer alloc] initWithNativeServer:nativeServer];
RTC_OBJC_TYPE(RTCIceServer) *iceServer =
[[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithNativeServer:nativeServer];
EXPECT_EQ(1u, iceServer.urlStrings.count);
EXPECT_EQ("stun:stun.example.net",
[NSString stdStringForString:iceServer.urlStrings.firstObject]);

View File

@ -21,10 +21,11 @@
#import "components/renderer/metal/RTCMTLNV12Renderer.h"
#import "components/video_frame_buffer/RTCCVPixelBuffer.h"
// Extension of RTCMTLVideoView for testing purposes.
@interface RTCMTLVideoView (Testing)
// Extension of RTC_OBJC_TYPE(RTCMTLVideoView) for testing purposes.
@interface RTC_OBJC_TYPE (RTCMTLVideoView)
(Testing)
@property(nonatomic, readonly) MTKView *metalView;
@property(nonatomic, readonly) MTKView *metalView;
+ (BOOL)isMetalAvailable;
+ (UIView *)createMetalView:(CGRect)frame;
@ -48,7 +49,7 @@
@synthesize frameMock = _frameMock;
- (void)setUp {
self.classMock = OCMClassMock([RTCMTLVideoView class]);
self.classMock = OCMClassMock([RTC_OBJC_TYPE(RTCMTLVideoView) class]);
[self startMockingNilView];
}
@ -64,15 +65,16 @@
}
- (id)frameMockWithCVPixelBuffer:(BOOL)hasCVPixelBuffer {
id frameMock = OCMClassMock([RTCVideoFrame class]);
id frameMock = OCMClassMock([RTC_OBJC_TYPE(RTCVideoFrame) class]);
if (hasCVPixelBuffer) {
CVPixelBufferRef pixelBufferRef;
CVPixelBufferCreate(
kCFAllocatorDefault, 200, 200, kCVPixelFormatType_420YpCbCr8Planar, nil, &pixelBufferRef);
OCMStub([frameMock buffer])
.andReturn([[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef]);
.andReturn([[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef]);
} else {
OCMStub([frameMock buffer]).andReturn([[RTCI420Buffer alloc] initWithWidth:200 height:200]);
OCMStub([frameMock buffer])
.andReturn([[RTC_OBJC_TYPE(RTCI420Buffer) alloc] initWithWidth:200 height:200]);
}
OCMStub([frameMock timeStampNs]).andReturn(arc4random_uniform(INT_MAX));
return frameMock;
@ -98,7 +100,8 @@
// when
BOOL asserts = NO;
@try {
RTCMTLVideoView *realView = [[RTCMTLVideoView alloc] initWithFrame:CGRectZero];
RTC_OBJC_TYPE(RTCMTLVideoView) *realView =
[[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectZero];
(void)realView;
} @catch (NSException *ex) {
asserts = YES;
@ -111,8 +114,9 @@
// given
OCMStub([self.classMock isMetalAvailable]).andReturn(YES);
RTCMTLVideoView *realView = [[RTCMTLVideoView alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
self.frameMock = OCMClassMock([RTCVideoFrame class]);
RTC_OBJC_TYPE(RTCMTLVideoView) *realView =
[[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
self.frameMock = OCMClassMock([RTC_OBJC_TYPE(RTCVideoFrame) class]);
[[self.frameMock reject] buffer];
[[self.classMock reject] createNV12Renderer];
@ -137,7 +141,8 @@
OCMExpect([self.classMock createI420Renderer]).andReturn(self.rendererI420Mock);
[[self.classMock reject] createNV12Renderer];
RTCMTLVideoView *realView = [[RTCMTLVideoView alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
RTC_OBJC_TYPE(RTCMTLVideoView) *realView =
[[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
// when
[realView renderFrame:self.frameMock];
@ -158,7 +163,8 @@
OCMExpect([self.classMock createNV12Renderer]).andReturn(self.rendererNV12Mock);
[[self.classMock reject] createI420Renderer];
RTCMTLVideoView *realView = [[RTCMTLVideoView alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
RTC_OBJC_TYPE(RTCMTLVideoView) *realView =
[[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
// when
[realView renderFrame:self.frameMock];
@ -178,7 +184,8 @@
OCMExpect([self.classMock createNV12Renderer]).andReturn(self.rendererNV12Mock);
[[self.classMock reject] createI420Renderer];
RTCMTLVideoView *realView = [[RTCMTLVideoView alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
RTC_OBJC_TYPE(RTCMTLVideoView) *realView =
[[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
[realView renderFrame:self.frameMock];
[realView drawInMTKView:realView.metalView];
@ -186,7 +193,7 @@
[self.classMock verify];
// Recreate view.
realView = [[RTCMTLVideoView alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
realView = [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
OCMExpect([self.rendererNV12Mock drawFrame:self.frameMock]);
// View hould reinit renderer.
OCMExpect([self.classMock createNV12Renderer]).andReturn(self.rendererNV12Mock);
@ -206,7 +213,8 @@
OCMExpect([self.classMock createNV12Renderer]).andReturn(self.rendererNV12Mock);
[[self.classMock reject] createI420Renderer];
RTCMTLVideoView *realView = [[RTCMTLVideoView alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
RTC_OBJC_TYPE(RTCMTLVideoView) *realView =
[[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
[realView renderFrame:self.frameMock];
[realView drawInMTKView:realView.metalView];
@ -230,7 +238,8 @@
OCMExpect([self.classMock createNV12Renderer]).andReturn(self.rendererNV12Mock);
[[self.classMock reject] createI420Renderer];
RTCMTLVideoView *realView = [[RTCMTLVideoView alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
RTC_OBJC_TYPE(RTCMTLVideoView) *realView =
[[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
[realView renderFrame:self.frameMock];
[realView drawInMTKView:realView.metalView];
@ -250,11 +259,12 @@
- (void)testReportsSizeChangesToDelegate {
OCMStub([self.classMock isMetalAvailable]).andReturn(YES);
id delegateMock = OCMProtocolMock(@protocol(RTCVideoViewDelegate));
id delegateMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoViewDelegate)));
CGSize size = CGSizeMake(640, 480);
OCMExpect([delegateMock videoView:[OCMArg any] didChangeVideoSize:size]);
RTCMTLVideoView *realView = [[RTCMTLVideoView alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
RTC_OBJC_TYPE(RTCMTLVideoView) *realView =
[[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
realView.delegate = delegateMock;
[realView setSize:size];
@ -269,7 +279,7 @@
createMetalView:CGRectZero];
OCMExpect([metalKitView setContentMode:UIViewContentModeScaleAspectFill]);
RTCMTLVideoView *realView = [[RTCMTLVideoView alloc] init];
RTC_OBJC_TYPE(RTCMTLVideoView) *realView = [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] init];
[realView setVideoContentMode:UIViewContentModeScaleAspectFill];
OCMVerify(metalKitView);

View File

@ -28,9 +28,9 @@
NSDictionary *mandatory = @{@"key1": @"value1", @"key2": @"value2"};
NSDictionary *optional = @{@"key3": @"value3", @"key4": @"value4"};
RTCMediaConstraints *constraints = [[RTCMediaConstraints alloc]
initWithMandatoryConstraints:mandatory
optionalConstraints:optional];
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:mandatory
optionalConstraints:optional];
std::unique_ptr<webrtc::MediaConstraints> nativeConstraints =
[constraints nativeConstraints];

View File

@ -43,10 +43,12 @@
- (void)testNV12TextureCacheDoesNotCrashOnEmptyFrame {
CVPixelBufferRef nullPixelBuffer = NULL;
RTCCVPixelBuffer *badFrameBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:nullPixelBuffer];
RTCVideoFrame *badFrame = [[RTCVideoFrame alloc] initWithBuffer:badFrameBuffer
rotation:RTCVideoRotation_0
timeStampNs:0];
RTC_OBJC_TYPE(RTCCVPixelBuffer) *badFrameBuffer =
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:nullPixelBuffer];
RTC_OBJC_TYPE(RTCVideoFrame) *badFrame =
[[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:badFrameBuffer
rotation:RTCVideoRotation_0
timeStampNs:0];
[_nv12TextureCache uploadFrameToTextures:badFrame];
}

View File

@ -39,7 +39,7 @@ extern "C" {
@implementation RTCPeerConnectionFactoryBuilderTest
- (void)testBuilder {
id factoryMock = OCMStrictClassMock([RTCPeerConnectionFactory class]);
id factoryMock = OCMStrictClassMock([RTC_OBJC_TYPE(RTCPeerConnectionFactory) class]);
OCMExpect([factoryMock alloc]).andReturn(factoryMock);
#ifdef HAVE_NO_MEDIA
RTC_UNUSED([[[factoryMock expect] andReturn:factoryMock] initWithNoMedia]);
@ -54,13 +54,14 @@ extern "C" {
mediaTransportFactory:nullptr]);
#endif
RTCPeerConnectionFactoryBuilder* builder = [[RTCPeerConnectionFactoryBuilder alloc] init];
RTCPeerConnectionFactory* peerConnectionFactory = [builder createPeerConnectionFactory];
RTC_OBJC_TYPE(RTCPeerConnectionFactory)* peerConnectionFactory =
[builder createPeerConnectionFactory];
EXPECT_TRUE(peerConnectionFactory != nil);
OCMVerifyAll(factoryMock);
}
- (void)testDefaultComponentsBuilder {
id factoryMock = OCMStrictClassMock([RTCPeerConnectionFactory class]);
id factoryMock = OCMStrictClassMock([RTC_OBJC_TYPE(RTCPeerConnectionFactory) class]);
OCMExpect([factoryMock alloc]).andReturn(factoryMock);
#ifdef HAVE_NO_MEDIA
RTC_UNUSED([[[factoryMock expect] andReturn:factoryMock] initWithNoMedia]);
@ -75,7 +76,8 @@ extern "C" {
mediaTransportFactory:nullptr]);
#endif
RTCPeerConnectionFactoryBuilder* builder = [RTCPeerConnectionFactoryBuilder defaultBuilder];
RTCPeerConnectionFactory* peerConnectionFactory = [builder createPeerConnectionFactory];
RTC_OBJC_TYPE(RTCPeerConnectionFactory)* peerConnectionFactory =
[builder createPeerConnectionFactory];
EXPECT_TRUE(peerConnectionFactory != nil);
OCMVerifyAll(factoryMock);
}

View File

@ -30,16 +30,17 @@
- (void)testPeerConnectionLifetime {
@autoreleasepool {
RTCConfiguration *config = [[RTCConfiguration alloc] init];
RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
RTCMediaConstraints *constraints =
[[RTCMediaConstraints alloc] initWithMandatoryConstraints:@{} optionalConstraints:nil];
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{}
optionalConstraints:nil];
RTCPeerConnectionFactory *factory;
RTCPeerConnection *peerConnection;
RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
RTC_OBJC_TYPE(RTCPeerConnection) * peerConnection;
@autoreleasepool {
factory = [[RTCPeerConnectionFactory alloc] init];
factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
peerConnection =
[factory peerConnectionWithConfiguration:config constraints:constraints delegate:nil];
[peerConnection close];
@ -53,11 +54,11 @@
- (void)testMediaStreamLifetime {
@autoreleasepool {
RTCPeerConnectionFactory *factory;
RTCMediaStream *mediaStream;
RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
RTC_OBJC_TYPE(RTCMediaStream) * mediaStream;
@autoreleasepool {
factory = [[RTCPeerConnectionFactory alloc] init];
factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
mediaStream = [factory mediaStreamWithStreamId:@"mediaStream"];
factory = nil;
}
@ -69,17 +70,19 @@
- (void)testDataChannelLifetime {
@autoreleasepool {
RTCConfiguration *config = [[RTCConfiguration alloc] init];
RTCMediaConstraints *constraints =
[[RTCMediaConstraints alloc] initWithMandatoryConstraints:@{} optionalConstraints:nil];
RTCDataChannelConfiguration *dataChannelConfig = [[RTCDataChannelConfiguration alloc] init];
RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{}
optionalConstraints:nil];
RTC_OBJC_TYPE(RTCDataChannelConfiguration) *dataChannelConfig =
[[RTC_OBJC_TYPE(RTCDataChannelConfiguration) alloc] init];
RTCPeerConnectionFactory *factory;
RTCPeerConnection *peerConnection;
RTCDataChannel *dataChannel;
RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
RTC_OBJC_TYPE(RTCPeerConnection) * peerConnection;
RTC_OBJC_TYPE(RTCDataChannel) * dataChannel;
@autoreleasepool {
factory = [[RTCPeerConnectionFactory alloc] init];
factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
peerConnection =
[factory peerConnectionWithConfiguration:config constraints:constraints delegate:nil];
dataChannel =
@ -97,18 +100,20 @@
- (void)testRTCRtpTransceiverLifetime {
@autoreleasepool {
RTCConfiguration *config = [[RTCConfiguration alloc] init];
RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
config.sdpSemantics = RTCSdpSemanticsUnifiedPlan;
RTCMediaConstraints *contraints =
[[RTCMediaConstraints alloc] initWithMandatoryConstraints:@{} optionalConstraints:nil];
RTCRtpTransceiverInit *init = [[RTCRtpTransceiverInit alloc] init];
RTC_OBJC_TYPE(RTCMediaConstraints) *contraints =
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{}
optionalConstraints:nil];
RTC_OBJC_TYPE(RTCRtpTransceiverInit) *init =
[[RTC_OBJC_TYPE(RTCRtpTransceiverInit) alloc] init];
RTCPeerConnectionFactory *factory;
RTCPeerConnection *peerConnection;
RTCRtpTransceiver *tranceiver;
RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
RTC_OBJC_TYPE(RTCPeerConnection) * peerConnection;
RTC_OBJC_TYPE(RTCRtpTransceiver) * tranceiver;
@autoreleasepool {
factory = [[RTCPeerConnectionFactory alloc] init];
factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
peerConnection =
[factory peerConnectionWithConfiguration:config constraints:contraints delegate:nil];
tranceiver = [peerConnection addTransceiverOfType:RTCRtpMediaTypeAudio init:init];
@ -125,16 +130,17 @@
- (void)testRTCRtpSenderLifetime {
@autoreleasepool {
RTCConfiguration *config = [[RTCConfiguration alloc] init];
RTCMediaConstraints *constraints =
[[RTCMediaConstraints alloc] initWithMandatoryConstraints:@{} optionalConstraints:nil];
RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{}
optionalConstraints:nil];
RTCPeerConnectionFactory *factory;
RTCPeerConnection *peerConnection;
RTCRtpSender *sender;
RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
RTC_OBJC_TYPE(RTCPeerConnection) * peerConnection;
RTC_OBJC_TYPE(RTCRtpSender) * sender;
@autoreleasepool {
factory = [[RTCPeerConnectionFactory alloc] init];
factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
peerConnection =
[factory peerConnectionWithConfiguration:config constraints:constraints delegate:nil];
sender = [peerConnection senderWithKind:kRTCMediaStreamTrackKindVideo streamId:@"stream"];
@ -151,19 +157,20 @@
- (void)testRTCRtpReceiverLifetime {
@autoreleasepool {
RTCConfiguration *config = [[RTCConfiguration alloc] init];
RTCMediaConstraints *constraints =
[[RTCMediaConstraints alloc] initWithMandatoryConstraints:@{} optionalConstraints:nil];
RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{}
optionalConstraints:nil];
RTCPeerConnectionFactory *factory;
RTCPeerConnection *pc1;
RTCPeerConnection *pc2;
RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
RTC_OBJC_TYPE(RTCPeerConnection) * pc1;
RTC_OBJC_TYPE(RTCPeerConnection) * pc2;
NSArray<RTCRtpReceiver *> *receivers1;
NSArray<RTCRtpReceiver *> *receivers2;
NSArray<RTC_OBJC_TYPE(RTCRtpReceiver) *> *receivers1;
NSArray<RTC_OBJC_TYPE(RTCRtpReceiver) *> *receivers2;
@autoreleasepool {
factory = [[RTCPeerConnectionFactory alloc] init];
factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
pc1 = [factory peerConnectionWithConfiguration:config constraints:constraints delegate:nil];
[pc1 senderWithKind:kRTCMediaStreamTrackKindAudio streamId:@"stream"];
@ -197,11 +204,11 @@
- (void)testAudioSourceLifetime {
@autoreleasepool {
RTCPeerConnectionFactory *factory;
RTCAudioSource *audioSource;
RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
RTC_OBJC_TYPE(RTCAudioSource) * audioSource;
@autoreleasepool {
factory = [[RTCPeerConnectionFactory alloc] init];
factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
audioSource = [factory audioSourceWithConstraints:nil];
XCTAssertNotNil(audioSource);
factory = nil;
@ -214,11 +221,11 @@
- (void)testVideoSourceLifetime {
@autoreleasepool {
RTCPeerConnectionFactory *factory;
RTCVideoSource *videoSource;
RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
RTC_OBJC_TYPE(RTCVideoSource) * videoSource;
@autoreleasepool {
factory = [[RTCPeerConnectionFactory alloc] init];
factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
videoSource = [factory videoSource];
XCTAssertNotNil(videoSource);
factory = nil;
@ -231,11 +238,11 @@
- (void)testAudioTrackLifetime {
@autoreleasepool {
RTCPeerConnectionFactory *factory;
RTCAudioTrack *audioTrack;
RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
RTC_OBJC_TYPE(RTCAudioTrack) * audioTrack;
@autoreleasepool {
factory = [[RTCPeerConnectionFactory alloc] init];
factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
audioTrack = [factory audioTrackWithTrackId:@"audioTrack"];
XCTAssertNotNil(audioTrack);
factory = nil;
@ -248,11 +255,11 @@
- (void)testVideoTrackLifetime {
@autoreleasepool {
RTCPeerConnectionFactory *factory;
RTCVideoTrack *videoTrack;
RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
RTC_OBJC_TYPE(RTCVideoTrack) * videoTrack;
@autoreleasepool {
factory = [[RTCPeerConnectionFactory alloc] init];
factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
videoTrack = [factory videoTrackWithSource:[factory videoSource] trackId:@"videoTrack"];
XCTAssertNotNil(videoTrack);
factory = nil;
@ -263,20 +270,20 @@
XCTAssertTrue(true, "Expect test does not crash");
}
- (bool)negotiatePeerConnection:(RTCPeerConnection *)pc1
withPeerConnection:(RTCPeerConnection *)pc2
- (bool)negotiatePeerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)pc1
withPeerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)pc2
negotiationTimeout:(NSTimeInterval)timeout {
__weak RTCPeerConnection *weakPC1 = pc1;
__weak RTCPeerConnection *weakPC2 = pc2;
RTCMediaConstraints *sdpConstraints =
[[RTCMediaConstraints alloc] initWithMandatoryConstraints:@{
__weak RTC_OBJC_TYPE(RTCPeerConnection) *weakPC1 = pc1;
__weak RTC_OBJC_TYPE(RTCPeerConnection) *weakPC2 = pc2;
RTC_OBJC_TYPE(RTCMediaConstraints) *sdpConstraints =
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{
kRTCMediaConstraintsOfferToReceiveAudio : kRTCMediaConstraintsValueTrue
}
optionalConstraints:nil];
optionalConstraints:nil];
dispatch_semaphore_t negotiatedSem = dispatch_semaphore_create(0);
[weakPC1 offerForConstraints:sdpConstraints
completionHandler:^(RTCSessionDescription *offer, NSError *error) {
completionHandler:^(RTC_OBJC_TYPE(RTCSessionDescription) * offer, NSError * error) {
XCTAssertNil(error);
XCTAssertNotNil(offer);
[weakPC1
@ -289,8 +296,9 @@
XCTAssertNil(error);
[weakPC2
answerForConstraints:sdpConstraints
completionHandler:^(RTCSessionDescription *answer,
NSError *error) {
completionHandler:^(
RTC_OBJC_TYPE(RTCSessionDescription) * answer,
NSError * error) {
XCTAssertNil(error);
XCTAssertNotNil(answer);
[weakPC2

View File

@ -34,9 +34,10 @@
- (void)testConfigurationGetter {
NSArray *urlStrings = @[ @"stun:stun1.example.net" ];
RTCIceServer *server = [[RTCIceServer alloc] initWithURLStrings:urlStrings];
RTC_OBJC_TYPE(RTCIceServer) *server =
[[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:urlStrings];
RTCConfiguration *config = [[RTCConfiguration alloc] init];
RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
config.iceServers = @[ server ];
config.iceTransportPolicy = RTCIceTransportPolicyRelay;
config.bundlePolicy = RTCBundlePolicyMaxBundle;
@ -54,18 +55,21 @@
RTCContinualGatheringPolicyGatherContinually;
config.shouldPruneTurnPorts = YES;
config.activeResetSrtpParams = YES;
config.cryptoOptions = [[RTCCryptoOptions alloc] initWithSrtpEnableGcmCryptoSuites:YES
srtpEnableAes128Sha1_32CryptoCipher:YES
srtpEnableEncryptedRtpHeaderExtensions:NO
sframeRequireFrameEncryption:NO];
config.cryptoOptions =
[[RTC_OBJC_TYPE(RTCCryptoOptions) alloc] initWithSrtpEnableGcmCryptoSuites:YES
srtpEnableAes128Sha1_32CryptoCipher:YES
srtpEnableEncryptedRtpHeaderExtensions:NO
sframeRequireFrameEncryption:NO];
RTCMediaConstraints *contraints = [[RTCMediaConstraints alloc] initWithMandatoryConstraints:@{}
optionalConstraints:nil];
RTCPeerConnectionFactory *factory = [[RTCPeerConnectionFactory alloc] init];
RTC_OBJC_TYPE(RTCMediaConstraints) *contraints =
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{}
optionalConstraints:nil];
RTC_OBJC_TYPE(RTCPeerConnectionFactory) *factory =
[[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
RTCConfiguration *newConfig;
RTC_OBJC_TYPE(RTCConfiguration) * newConfig;
@autoreleasepool {
RTCPeerConnection *peerConnection =
RTC_OBJC_TYPE(RTCPeerConnection) *peerConnection =
[factory peerConnectionWithConfiguration:config constraints:contraints delegate:nil];
newConfig = peerConnection.configuration;
@ -78,8 +82,8 @@
}
EXPECT_EQ([config.iceServers count], [newConfig.iceServers count]);
RTCIceServer *newServer = newConfig.iceServers[0];
RTCIceServer *origServer = config.iceServers[0];
RTC_OBJC_TYPE(RTCIceServer) *newServer = newConfig.iceServers[0];
RTC_OBJC_TYPE(RTCIceServer) *origServer = config.iceServers[0];
std::string origUrl = origServer.urlStrings.firstObject.UTF8String;
std::string url = newServer.urlStrings.firstObject.UTF8String;
EXPECT_EQ(origUrl, url);
@ -109,19 +113,22 @@
- (void)testWithDependencies {
NSArray *urlStrings = @[ @"stun:stun1.example.net" ];
RTCIceServer *server = [[RTCIceServer alloc] initWithURLStrings:urlStrings];
RTC_OBJC_TYPE(RTCIceServer) *server =
[[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:urlStrings];
RTCConfiguration *config = [[RTCConfiguration alloc] init];
RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
config.iceServers = @[ server ];
RTCMediaConstraints *contraints = [[RTCMediaConstraints alloc] initWithMandatoryConstraints:@{}
optionalConstraints:nil];
RTCPeerConnectionFactory *factory = [[RTCPeerConnectionFactory alloc] init];
RTC_OBJC_TYPE(RTCMediaConstraints) *contraints =
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{}
optionalConstraints:nil];
RTC_OBJC_TYPE(RTCPeerConnectionFactory) *factory =
[[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
RTCConfiguration *newConfig;
RTC_OBJC_TYPE(RTCConfiguration) * newConfig;
std::unique_ptr<webrtc::PeerConnectionDependencies> pc_dependencies =
std::make_unique<webrtc::PeerConnectionDependencies>(nullptr);
@autoreleasepool {
RTCPeerConnection *peerConnection =
RTC_OBJC_TYPE(RTCPeerConnection) *peerConnection =
[factory peerConnectionWithDependencies:config
constraints:contraints
dependencies:std::move(pc_dependencies)

View File

@ -24,19 +24,18 @@
@implementation RTCSessionDescriptionTest
/**
* Test conversion of an Objective-C RTCSessionDescription to a native
* Test conversion of an Objective-C RTC_OBJC_TYPE(RTCSessionDescription) to a native
* SessionDescriptionInterface (based on the types and SDP strings being equal).
*/
- (void)testSessionDescriptionConversion {
RTCSessionDescription *description =
[[RTCSessionDescription alloc] initWithType:RTCSdpTypeAnswer
sdp:[self sdp]];
RTC_OBJC_TYPE(RTCSessionDescription) *description =
[[RTC_OBJC_TYPE(RTCSessionDescription) alloc] initWithType:RTCSdpTypeAnswer sdp:[self sdp]];
webrtc::SessionDescriptionInterface *nativeDescription =
description.nativeDescription;
EXPECT_EQ(RTCSdpTypeAnswer,
[RTCSessionDescription typeForStdString:nativeDescription->type()]);
[RTC_OBJC_TYPE(RTCSessionDescription) typeForStdString:nativeDescription->type()]);
std::string sdp;
nativeDescription->ToString(&sdp);
@ -51,11 +50,10 @@
[self sdp].stdString,
nullptr);
RTCSessionDescription *description =
[[RTCSessionDescription alloc] initWithNativeDescription:
nativeDescription];
RTC_OBJC_TYPE(RTCSessionDescription) *description =
[[RTC_OBJC_TYPE(RTCSessionDescription) alloc] initWithNativeDescription:nativeDescription];
EXPECT_EQ(webrtc::SessionDescriptionInterface::kAnswer,
[RTCSessionDescription stdStringForType:description.type]);
[RTC_OBJC_TYPE(RTCSessionDescription) stdStringForType:description.type]);
EXPECT_TRUE([[self sdp] isEqualToString:description.sdp]);
}

View File

@ -13,6 +13,7 @@
#include "sdk/objc/native/src/objc_video_decoder_factory.h"
#import "base/RTCMacros.h"
#import "base/RTCVideoDecoder.h"
#import "base/RTCVideoDecoderFactory.h"
#include "media/base/codec.h"
@ -20,8 +21,8 @@
#include "modules/video_coding/include/video_error_codes.h"
#include "rtc_base/gunit.h"
id<RTCVideoDecoderFactory> CreateDecoderFactoryReturning(int return_code) {
id decoderMock = OCMProtocolMock(@protocol(RTCVideoDecoder));
id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)> CreateDecoderFactoryReturning(int return_code) {
id decoderMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoDecoder)));
OCMStub([decoderMock startDecodeWithNumberOfCores:1]).andReturn(return_code);
OCMStub([decoderMock decode:[OCMArg any]
missingFrames:NO
@ -30,22 +31,24 @@ id<RTCVideoDecoderFactory> CreateDecoderFactoryReturning(int return_code) {
.andReturn(return_code);
OCMStub([decoderMock releaseDecoder]).andReturn(return_code);
id decoderFactoryMock = OCMProtocolMock(@protocol(RTCVideoDecoderFactory));
RTCVideoCodecInfo *supported = [[RTCVideoCodecInfo alloc] initWithName:@"H264" parameters:nil];
id decoderFactoryMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoDecoderFactory)));
RTC_OBJC_TYPE(RTCVideoCodecInfo)* supported =
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:@"H264" parameters:nil];
OCMStub([decoderFactoryMock supportedCodecs]).andReturn(@[ supported ]);
OCMStub([decoderFactoryMock createDecoder:[OCMArg any]]).andReturn(decoderMock);
return decoderFactoryMock;
}
id<RTCVideoDecoderFactory> CreateOKDecoderFactory() {
id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)> CreateOKDecoderFactory() {
return CreateDecoderFactoryReturning(WEBRTC_VIDEO_CODEC_OK);
}
id<RTCVideoDecoderFactory> CreateErrorDecoderFactory() {
id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)> CreateErrorDecoderFactory() {
return CreateDecoderFactoryReturning(WEBRTC_VIDEO_CODEC_ERROR);
}
std::unique_ptr<webrtc::VideoDecoder> GetObjCDecoder(id<RTCVideoDecoderFactory> factory) {
std::unique_ptr<webrtc::VideoDecoder> GetObjCDecoder(
id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)> factory) {
webrtc::ObjCVideoDecoderFactory decoder_factory(factory);
return decoder_factory.CreateVideoDecoder(webrtc::SdpVideoFormat(cricket::kH264CodecName));
}

View File

@ -25,8 +25,8 @@
#include "rtc_base/gunit.h"
#include "sdk/objc/native/src/objc_frame_buffer.h"
id<RTCVideoEncoderFactory> CreateEncoderFactoryReturning(int return_code) {
id encoderMock = OCMProtocolMock(@protocol(RTCVideoEncoder));
id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)> CreateEncoderFactoryReturning(int return_code) {
id encoderMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoEncoder)));
OCMStub([encoderMock startEncodeWithSettings:[OCMArg any] numberOfCores:1])
.andReturn(return_code);
OCMStub([encoderMock encode:[OCMArg any] codecSpecificInfo:[OCMArg any] frameTypes:[OCMArg any]])
@ -34,23 +34,25 @@ id<RTCVideoEncoderFactory> CreateEncoderFactoryReturning(int return_code) {
OCMStub([encoderMock releaseEncoder]).andReturn(return_code);
OCMStub([encoderMock setBitrate:0 framerate:0]).andReturn(return_code);
id encoderFactoryMock = OCMProtocolMock(@protocol(RTCVideoEncoderFactory));
RTCVideoCodecInfo *supported = [[RTCVideoCodecInfo alloc] initWithName:@"H264" parameters:nil];
id encoderFactoryMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoEncoderFactory)));
RTC_OBJC_TYPE(RTCVideoCodecInfo)* supported =
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:@"H264" parameters:nil];
OCMStub([encoderFactoryMock supportedCodecs]).andReturn(@[ supported ]);
OCMStub([encoderFactoryMock implementations]).andReturn(@[ supported ]);
OCMStub([encoderFactoryMock createEncoder:[OCMArg any]]).andReturn(encoderMock);
return encoderFactoryMock;
}
id<RTCVideoEncoderFactory> CreateOKEncoderFactory() {
id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)> CreateOKEncoderFactory() {
return CreateEncoderFactoryReturning(WEBRTC_VIDEO_CODEC_OK);
}
id<RTCVideoEncoderFactory> CreateErrorEncoderFactory() {
id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)> CreateErrorEncoderFactory() {
return CreateEncoderFactoryReturning(WEBRTC_VIDEO_CODEC_ERROR);
}
std::unique_ptr<webrtc::VideoEncoder> GetObjCEncoder(id<RTCVideoEncoderFactory> factory) {
std::unique_ptr<webrtc::VideoEncoder> GetObjCEncoder(
id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)> factory) {
webrtc::ObjCVideoEncoderFactory encoder_factory(factory);
webrtc::SdpVideoFormat format("H264");
return encoder_factory.CreateVideoEncoder(format);
@ -83,7 +85,7 @@ TEST(ObjCVideoEncoderFactoryTest, EncodeReturnsOKOnSuccess) {
CVPixelBufferCreate(kCFAllocatorDefault, 640, 480, kCVPixelFormatType_32ARGB, nil, &pixel_buffer);
rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
new rtc::RefCountedObject<webrtc::ObjCFrameBuffer>(
[[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixel_buffer]);
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixel_buffer]);
webrtc::VideoFrame frame = webrtc::VideoFrame::Builder()
.set_video_frame_buffer(buffer)
.set_rotation(webrtc::kVideoRotation_0)
@ -101,7 +103,7 @@ TEST(ObjCVideoEncoderFactoryTest, EncodeReturnsErrorOnFail) {
CVPixelBufferCreate(kCFAllocatorDefault, 640, 480, kCVPixelFormatType_32ARGB, nil, &pixel_buffer);
rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
new rtc::RefCountedObject<webrtc::ObjCFrameBuffer>(
[[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixel_buffer]);
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixel_buffer]);
webrtc::VideoFrame frame = webrtc::VideoFrame::Builder()
.set_video_frame_buffer(buffer)
.set_rotation(webrtc::kVideoRotation_0)