Wrap WebRTC OBJC API types with RTC_OBJC_TYPE.

This CL introduced 2 new macros that affect the WebRTC OBJC API symbols:

- RTC_OBJC_TYPE_PREFIX:
  Macro used to prepend a prefix to the API types that are exported with
  RTC_OBJC_EXPORT.

  Clients can patch the definition of this macro locally and build
  WebRTC.framework with their own prefix in case symbol clashing is a
  problem.

  This macro must only be defined by changing the value in
  sdk/objc/base/RTCMacros.h  and not on via compiler flag to ensure
  it has a unique value.

- RCT_OBJC_TYPE:
  Macro used internally to reference API types. Declaring an API type
  without using this macro will not include the declared type in the
  set of types that will be affected by the configurable
  RTC_OBJC_TYPE_PREFIX.

Manual changes:
https://webrtc-review.googlesource.com/c/src/+/173781/5..10

The auto-generated changes in PS#5 have been done with:
https://webrtc-review.googlesource.com/c/src/+/174061.

Bug: None
Change-Id: I0d54ca94db764fb3b6cb4365873f79e14cd879b8
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/173781
Commit-Queue: Mirko Bonadei <mbonadei@webrtc.org>
Reviewed-by: Karl Wiberg <kwiberg@webrtc.org>
Reviewed-by: Kári Helgason <kthelgason@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#31153}
This commit is contained in:
Mirko Bonadei
2020-05-04 16:14:32 +02:00
committed by Commit Bot
parent ce1320cc4d
commit a81e9c82fc
303 changed files with 2534 additions and 2189 deletions

View File

@ -15,9 +15,10 @@
NS_ASSUME_NONNULL_BEGIN
/* Interfaces for converting to/from internal C++ formats. */
@interface RTCCodecSpecificInfoH264 ()
@interface RTC_OBJC_TYPE (RTCCodecSpecificInfoH264)
()
- (webrtc::CodecSpecificInfo)nativeCodecSpecificInfo;
- (webrtc::CodecSpecificInfo)nativeCodecSpecificInfo;
@end

View File

@ -20,7 +20,7 @@ typedef NS_ENUM(NSUInteger, RTCH264PacketizationMode) {
};
RTC_OBJC_EXPORT
@interface RTCCodecSpecificInfoH264 : NSObject <RTCCodecSpecificInfo>
@interface RTC_OBJC_TYPE (RTCCodecSpecificInfoH264) : NSObject <RTC_OBJC_TYPE(RTCCodecSpecificInfo)>
@property(nonatomic, assign) RTCH264PacketizationMode packetizationMode;

View File

@ -13,7 +13,7 @@
#import "RTCH264ProfileLevelId.h"
// H264 specific settings.
@implementation RTCCodecSpecificInfoH264
@implementation RTC_OBJC_TYPE (RTCCodecSpecificInfoH264)
@synthesize packetizationMode = _packetizationMode;

View File

@ -16,10 +16,11 @@
NS_ASSUME_NONNULL_BEGIN
/** This decoder factory include support for all codecs bundled with WebRTC. If using custom
* codecs, create custom implementations of RTCVideoEncoderFactory and RTCVideoDecoderFactory.
* codecs, create custom implementations of RTCVideoEncoderFactory and
* RTCVideoDecoderFactory.
*/
RTC_OBJC_EXPORT
@interface RTCDefaultVideoDecoderFactory : NSObject <RTCVideoDecoderFactory>
@interface RTC_OBJC_TYPE (RTCDefaultVideoDecoderFactory) : NSObject <RTC_OBJC_TYPE(RTCVideoDecoderFactory)>
@end
NS_ASSUME_NONNULL_END

View File

@ -19,31 +19,33 @@
#import "api/video_codec/RTCVideoDecoderVP9.h"
#endif
@implementation RTCDefaultVideoDecoderFactory
@implementation RTC_OBJC_TYPE (RTCDefaultVideoDecoderFactory)
- (NSArray<RTCVideoCodecInfo *> *)supportedCodecs {
- (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)supportedCodecs {
NSDictionary<NSString *, NSString *> *constrainedHighParams = @{
@"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedHigh,
@"level-asymmetry-allowed" : @"1",
@"packetization-mode" : @"1",
};
RTCVideoCodecInfo *constrainedHighInfo =
[[RTCVideoCodecInfo alloc] initWithName:kRTCVideoCodecH264Name
parameters:constrainedHighParams];
RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedHighInfo =
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecH264Name
parameters:constrainedHighParams];
NSDictionary<NSString *, NSString *> *constrainedBaselineParams = @{
@"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedBaseline,
@"level-asymmetry-allowed" : @"1",
@"packetization-mode" : @"1",
};
RTCVideoCodecInfo *constrainedBaselineInfo =
[[RTCVideoCodecInfo alloc] initWithName:kRTCVideoCodecH264Name
parameters:constrainedBaselineParams];
RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedBaselineInfo =
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecH264Name
parameters:constrainedBaselineParams];
RTCVideoCodecInfo *vp8Info = [[RTCVideoCodecInfo alloc] initWithName:kRTCVideoCodecVp8Name];
RTC_OBJC_TYPE(RTCVideoCodecInfo) *vp8Info =
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecVp8Name];
#if defined(RTC_ENABLE_VP9)
RTCVideoCodecInfo *vp9Info = [[RTCVideoCodecInfo alloc] initWithName:kRTCVideoCodecVp9Name];
RTC_OBJC_TYPE(RTCVideoCodecInfo) *vp9Info =
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecVp9Name];
#endif
return @[
@ -56,14 +58,14 @@
];
}
- (id<RTCVideoDecoder>)createDecoder:(RTCVideoCodecInfo *)info {
- (id<RTC_OBJC_TYPE(RTCVideoDecoder)>)createDecoder:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info {
if ([info.name isEqualToString:kRTCVideoCodecH264Name]) {
return [[RTCVideoDecoderH264 alloc] init];
return [[RTC_OBJC_TYPE(RTCVideoDecoderH264) alloc] init];
} else if ([info.name isEqualToString:kRTCVideoCodecVp8Name]) {
return [RTCVideoDecoderVP8 vp8Decoder];
return [RTC_OBJC_TYPE(RTCVideoDecoderVP8) vp8Decoder];
#if defined(RTC_ENABLE_VP9)
} else if ([info.name isEqualToString:kRTCVideoCodecVp9Name]) {
return [RTCVideoDecoderVP9 vp9Decoder];
return [RTC_OBJC_TYPE(RTCVideoDecoderVP9) vp9Decoder];
#endif
}

View File

@ -16,14 +16,15 @@
NS_ASSUME_NONNULL_BEGIN
/** This encoder factory include support for all codecs bundled with WebRTC. If using custom
* codecs, create custom implementations of RTCVideoEncoderFactory and RTCVideoDecoderFactory.
* codecs, create custom implementations of RTCVideoEncoderFactory and
* RTCVideoDecoderFactory.
*/
RTC_OBJC_EXPORT
@interface RTCDefaultVideoEncoderFactory : NSObject <RTCVideoEncoderFactory>
@interface RTC_OBJC_TYPE (RTCDefaultVideoEncoderFactory) : NSObject <RTC_OBJC_TYPE(RTCVideoEncoderFactory)>
@property(nonatomic, retain) RTCVideoCodecInfo *preferredCodec;
@property(nonatomic, retain) RTC_OBJC_TYPE(RTCVideoCodecInfo) *preferredCodec;
+ (NSArray<RTCVideoCodecInfo *> *)supportedCodecs;
+ (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)supportedCodecs;
@end

View File

@ -19,33 +19,35 @@
#import "api/video_codec/RTCVideoEncoderVP9.h"
#endif
@implementation RTCDefaultVideoEncoderFactory
@implementation RTC_OBJC_TYPE (RTCDefaultVideoEncoderFactory)
@synthesize preferredCodec;
+ (NSArray<RTCVideoCodecInfo *> *)supportedCodecs {
+ (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)supportedCodecs {
NSDictionary<NSString *, NSString *> *constrainedHighParams = @{
@"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedHigh,
@"level-asymmetry-allowed" : @"1",
@"packetization-mode" : @"1",
};
RTCVideoCodecInfo *constrainedHighInfo =
[[RTCVideoCodecInfo alloc] initWithName:kRTCVideoCodecH264Name
parameters:constrainedHighParams];
RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedHighInfo =
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecH264Name
parameters:constrainedHighParams];
NSDictionary<NSString *, NSString *> *constrainedBaselineParams = @{
@"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedBaseline,
@"level-asymmetry-allowed" : @"1",
@"packetization-mode" : @"1",
};
RTCVideoCodecInfo *constrainedBaselineInfo =
[[RTCVideoCodecInfo alloc] initWithName:kRTCVideoCodecH264Name
parameters:constrainedBaselineParams];
RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedBaselineInfo =
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecH264Name
parameters:constrainedBaselineParams];
RTCVideoCodecInfo *vp8Info = [[RTCVideoCodecInfo alloc] initWithName:kRTCVideoCodecVp8Name];
RTC_OBJC_TYPE(RTCVideoCodecInfo) *vp8Info =
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecVp8Name];
#if defined(RTC_ENABLE_VP9)
RTCVideoCodecInfo *vp9Info = [[RTCVideoCodecInfo alloc] initWithName:kRTCVideoCodecVp9Name];
RTC_OBJC_TYPE(RTCVideoCodecInfo) *vp9Info =
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecVp9Name];
#endif
return @[
@ -58,24 +60,25 @@
];
}
- (id<RTCVideoEncoder>)createEncoder:(RTCVideoCodecInfo *)info {
- (id<RTC_OBJC_TYPE(RTCVideoEncoder)>)createEncoder:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info {
if ([info.name isEqualToString:kRTCVideoCodecH264Name]) {
return [[RTCVideoEncoderH264 alloc] initWithCodecInfo:info];
return [[RTC_OBJC_TYPE(RTCVideoEncoderH264) alloc] initWithCodecInfo:info];
} else if ([info.name isEqualToString:kRTCVideoCodecVp8Name]) {
return [RTCVideoEncoderVP8 vp8Encoder];
return [RTC_OBJC_TYPE(RTCVideoEncoderVP8) vp8Encoder];
#if defined(RTC_ENABLE_VP9)
} else if ([info.name isEqualToString:kRTCVideoCodecVp9Name]) {
return [RTCVideoEncoderVP9 vp9Encoder];
return [RTC_OBJC_TYPE(RTCVideoEncoderVP9) vp9Encoder];
#endif
}
return nil;
}
- (NSArray<RTCVideoCodecInfo *> *)supportedCodecs {
NSMutableArray<RTCVideoCodecInfo *> *codecs = [[[self class] supportedCodecs] mutableCopy];
- (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)supportedCodecs {
NSMutableArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *codecs =
[[[self class] supportedCodecs] mutableCopy];
NSMutableArray<RTCVideoCodecInfo *> *orderedCodecs = [NSMutableArray array];
NSMutableArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *orderedCodecs = [NSMutableArray array];
NSUInteger index = [codecs indexOfObject:self.preferredCodec];
if (index != NSNotFound) {
[orderedCodecs addObject:[codecs objectAtIndex:index]];

View File

@ -48,7 +48,7 @@ typedef NS_ENUM(NSUInteger, RTCH264Level) {
};
RTC_OBJC_EXPORT
@interface RTCH264ProfileLevelId : NSObject
@interface RTC_OBJC_TYPE (RTCH264ProfileLevelId) : NSObject
@property(nonatomic, readonly) RTCH264Profile profile;
@property(nonatomic, readonly) RTCH264Level level;

View File

@ -75,15 +75,16 @@ NSString *MaxSupportedProfileLevelConstrainedHigh() {
} // namespace
@interface RTCH264ProfileLevelId ()
@interface RTC_OBJC_TYPE (RTCH264ProfileLevelId)
()
@property(nonatomic, assign) RTCH264Profile profile;
@property(nonatomic, assign) RTCH264Profile profile;
@property(nonatomic, assign) RTCH264Level level;
@property(nonatomic, strong) NSString *hexString;
@end
@implementation RTCH264ProfileLevelId
@implementation RTC_OBJC_TYPE (RTCH264ProfileLevelId)
@synthesize profile = _profile;
@synthesize level = _level;

View File

@ -14,5 +14,5 @@
#import "RTCVideoDecoderFactory.h"
RTC_OBJC_EXPORT
@interface RTCVideoDecoderFactoryH264 : NSObject <RTCVideoDecoderFactory>
@interface RTC_OBJC_TYPE (RTCVideoDecoderFactoryH264) : NSObject <RTC_OBJC_TYPE(RTCVideoDecoderFactory)>
@end

View File

@ -13,10 +13,10 @@
#import "RTCH264ProfileLevelId.h"
#import "RTCVideoDecoderH264.h"
@implementation RTCVideoDecoderFactoryH264
@implementation RTC_OBJC_TYPE (RTCVideoDecoderFactoryH264)
- (NSArray<RTCVideoCodecInfo *> *)supportedCodecs {
NSMutableArray<RTCVideoCodecInfo *> *codecs = [NSMutableArray array];
- (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)supportedCodecs {
NSMutableArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *codecs = [NSMutableArray array];
NSString *codecName = kRTCVideoCodecH264Name;
NSDictionary<NSString *, NSString *> *constrainedHighParams = @{
@ -24,8 +24,9 @@
@"level-asymmetry-allowed" : @"1",
@"packetization-mode" : @"1",
};
RTCVideoCodecInfo *constrainedHighInfo =
[[RTCVideoCodecInfo alloc] initWithName:codecName parameters:constrainedHighParams];
RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedHighInfo =
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:codecName
parameters:constrainedHighParams];
[codecs addObject:constrainedHighInfo];
NSDictionary<NSString *, NSString *> *constrainedBaselineParams = @{
@ -33,15 +34,16 @@
@"level-asymmetry-allowed" : @"1",
@"packetization-mode" : @"1",
};
RTCVideoCodecInfo *constrainedBaselineInfo =
[[RTCVideoCodecInfo alloc] initWithName:codecName parameters:constrainedBaselineParams];
RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedBaselineInfo =
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:codecName
parameters:constrainedBaselineParams];
[codecs addObject:constrainedBaselineInfo];
return [codecs copy];
}
- (id<RTCVideoDecoder>)createDecoder:(RTCVideoCodecInfo *)info {
return [[RTCVideoDecoderH264 alloc] init];
- (id<RTC_OBJC_TYPE(RTCVideoDecoder)>)createDecoder:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info {
return [[RTC_OBJC_TYPE(RTCVideoDecoderH264) alloc] init];
}
@end

View File

@ -14,5 +14,5 @@
#import "RTCVideoDecoder.h"
RTC_OBJC_EXPORT
@interface RTCVideoDecoderH264 : NSObject <RTCVideoDecoder>
@interface RTC_OBJC_TYPE (RTCVideoDecoderH264) : NSObject <RTC_OBJC_TYPE(RTCVideoDecoder)>
@end

View File

@ -37,8 +37,8 @@ struct RTCFrameDecodeParams {
int64_t timestamp;
};
@interface RTCVideoDecoderH264 ()
- (void)setError:(OSStatus)error;
@interface RTC_OBJC_TYPE (RTCVideoDecoderH264)
() - (void)setError : (OSStatus)error;
@end
// This is the callback function that VideoToolbox calls when decode is
@ -53,23 +53,25 @@ void decompressionOutputCallback(void *decoderRef,
std::unique_ptr<RTCFrameDecodeParams> decodeParams(
reinterpret_cast<RTCFrameDecodeParams *>(params));
if (status != noErr) {
RTCVideoDecoderH264 *decoder = (__bridge RTCVideoDecoderH264 *)decoderRef;
RTC_OBJC_TYPE(RTCVideoDecoderH264) *decoder =
(__bridge RTC_OBJC_TYPE(RTCVideoDecoderH264) *)decoderRef;
[decoder setError:status];
RTC_LOG(LS_ERROR) << "Failed to decode frame. Status: " << status;
return;
}
// TODO(tkchin): Handle CVO properly.
RTCCVPixelBuffer *frameBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:imageBuffer];
RTCVideoFrame *decodedFrame =
[[RTCVideoFrame alloc] initWithBuffer:frameBuffer
rotation:RTCVideoRotation_0
timeStampNs:CMTimeGetSeconds(timestamp) * rtc::kNumNanosecsPerSec];
RTC_OBJC_TYPE(RTCCVPixelBuffer) *frameBuffer =
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:imageBuffer];
RTC_OBJC_TYPE(RTCVideoFrame) *decodedFrame = [[RTC_OBJC_TYPE(RTCVideoFrame) alloc]
initWithBuffer:frameBuffer
rotation:RTCVideoRotation_0
timeStampNs:CMTimeGetSeconds(timestamp) * rtc::kNumNanosecsPerSec];
decodedFrame.timeStamp = decodeParams->timestamp;
decodeParams->callback(decodedFrame);
}
// Decoder.
@implementation RTCVideoDecoderH264 {
@implementation RTC_OBJC_TYPE (RTCVideoDecoderH264) {
CMVideoFormatDescriptionRef _videoFormat;
CMMemoryPoolRef _memoryPool;
VTDecompressionSessionRef _decompressionSession;
@ -96,9 +98,9 @@ void decompressionOutputCallback(void *decoderRef,
return WEBRTC_VIDEO_CODEC_OK;
}
- (NSInteger)decode:(RTCEncodedImage *)inputImage
- (NSInteger)decode:(RTC_OBJC_TYPE(RTCEncodedImage) *)inputImage
missingFrames:(BOOL)missingFrames
codecSpecificInfo:(nullable id<RTCCodecSpecificInfo>)info
codecSpecificInfo:(nullable id<RTC_OBJC_TYPE(RTCCodecSpecificInfo)>)info
renderTimeMs:(int64_t)renderTimeMs {
RTC_DCHECK(inputImage.buffer);

View File

@ -14,5 +14,5 @@
#import "RTCVideoEncoderFactory.h"
RTC_OBJC_EXPORT
@interface RTCVideoEncoderFactoryH264 : NSObject <RTCVideoEncoderFactory>
@interface RTC_OBJC_TYPE (RTCVideoEncoderFactoryH264) : NSObject <RTC_OBJC_TYPE(RTCVideoEncoderFactory)>
@end

View File

@ -13,10 +13,10 @@
#import "RTCH264ProfileLevelId.h"
#import "RTCVideoEncoderH264.h"
@implementation RTCVideoEncoderFactoryH264
@implementation RTC_OBJC_TYPE (RTCVideoEncoderFactoryH264)
- (NSArray<RTCVideoCodecInfo *> *)supportedCodecs {
NSMutableArray<RTCVideoCodecInfo *> *codecs = [NSMutableArray array];
- (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)supportedCodecs {
NSMutableArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *codecs = [NSMutableArray array];
NSString *codecName = kRTCVideoCodecH264Name;
NSDictionary<NSString *, NSString *> *constrainedHighParams = @{
@ -24,8 +24,9 @@
@"level-asymmetry-allowed" : @"1",
@"packetization-mode" : @"1",
};
RTCVideoCodecInfo *constrainedHighInfo =
[[RTCVideoCodecInfo alloc] initWithName:codecName parameters:constrainedHighParams];
RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedHighInfo =
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:codecName
parameters:constrainedHighParams];
[codecs addObject:constrainedHighInfo];
NSDictionary<NSString *, NSString *> *constrainedBaselineParams = @{
@ -33,15 +34,16 @@
@"level-asymmetry-allowed" : @"1",
@"packetization-mode" : @"1",
};
RTCVideoCodecInfo *constrainedBaselineInfo =
[[RTCVideoCodecInfo alloc] initWithName:codecName parameters:constrainedBaselineParams];
RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedBaselineInfo =
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:codecName
parameters:constrainedBaselineParams];
[codecs addObject:constrainedBaselineInfo];
return [codecs copy];
}
- (id<RTCVideoEncoder>)createEncoder:(RTCVideoCodecInfo *)info {
return [[RTCVideoEncoderH264 alloc] initWithCodecInfo:info];
- (id<RTC_OBJC_TYPE(RTCVideoEncoder)>)createEncoder:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info {
return [[RTC_OBJC_TYPE(RTCVideoEncoderH264) alloc] initWithCodecInfo:info];
}
@end

View File

@ -15,8 +15,8 @@
#import "RTCVideoEncoder.h"
RTC_OBJC_EXPORT
@interface RTCVideoEncoderH264 : NSObject <RTCVideoEncoder>
@interface RTC_OBJC_TYPE (RTCVideoEncoderH264) : NSObject <RTC_OBJC_TYPE(RTCVideoEncoder)>
- (instancetype)initWithCodecInfo:(RTCVideoCodecInfo *)codecInfo;
- (instancetype)initWithCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)codecInfo;
@end

View File

@ -40,17 +40,14 @@
#include "sdk/objc/components/video_codec/nalu_rewriter.h"
#include "third_party/libyuv/include/libyuv/convert_from.h"
@interface RTCVideoEncoderH264 ()
@interface RTC_OBJC_TYPE (RTCVideoEncoderH264)
()
- (void)frameWasEncoded:(OSStatus)status
flags:(VTEncodeInfoFlags)infoFlags
sampleBuffer:(CMSampleBufferRef)sampleBuffer
codecSpecificInfo:(id<RTCCodecSpecificInfo>)codecSpecificInfo
width:(int32_t)width
height:(int32_t)height
renderTimeMs:(int64_t)renderTimeMs
timestamp:(uint32_t)timestamp
rotation:(RTCVideoRotation)rotation;
- (void)frameWasEncoded : (OSStatus)status flags : (VTEncodeInfoFlags)infoFlags sampleBuffer
: (CMSampleBufferRef)sampleBuffer codecSpecificInfo
: (id<RTC_OBJC_TYPE(RTCCodecSpecificInfo)>)codecSpecificInfo width : (int32_t)width height
: (int32_t)height renderTimeMs : (int64_t)renderTimeMs timestamp : (uint32_t)timestamp rotation
: (RTCVideoRotation)rotation;
@end
@ -70,8 +67,8 @@ const OSType kNV12PixelFormat = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange;
// Struct that we pass to the encoder per frame to encode. We receive it again
// in the encoder callback.
struct RTCFrameEncodeParams {
RTCFrameEncodeParams(RTCVideoEncoderH264 *e,
RTCCodecSpecificInfoH264 *csi,
RTCFrameEncodeParams(RTC_OBJC_TYPE(RTCVideoEncoderH264) * e,
RTC_OBJC_TYPE(RTCCodecSpecificInfoH264) * csi,
int32_t w,
int32_t h,
int64_t rtms,
@ -81,12 +78,12 @@ struct RTCFrameEncodeParams {
if (csi) {
codecSpecificInfo = csi;
} else {
codecSpecificInfo = [[RTCCodecSpecificInfoH264 alloc] init];
codecSpecificInfo = [[RTC_OBJC_TYPE(RTCCodecSpecificInfoH264) alloc] init];
}
}
RTCVideoEncoderH264 *encoder;
RTCCodecSpecificInfoH264 *codecSpecificInfo;
RTC_OBJC_TYPE(RTCVideoEncoderH264) * encoder;
RTC_OBJC_TYPE(RTCCodecSpecificInfoH264) * codecSpecificInfo;
int32_t width;
int32_t height;
int64_t render_time_ms;
@ -97,7 +94,8 @@ struct RTCFrameEncodeParams {
// We receive I420Frames as input, but we need to feed CVPixelBuffers into the
// encoder. This performs the copy and format conversion.
// TODO(tkchin): See if encoder will accept i420 frames and compare performance.
bool CopyVideoFrameToNV12PixelBuffer(id<RTCI420Buffer> frameBuffer, CVPixelBufferRef pixelBuffer) {
bool CopyVideoFrameToNV12PixelBuffer(id<RTC_OBJC_TYPE(RTCI420Buffer)> frameBuffer,
CVPixelBufferRef pixelBuffer) {
RTC_DCHECK(pixelBuffer);
RTC_DCHECK_EQ(CVPixelBufferGetPixelFormatType(pixelBuffer), kNV12PixelFormat);
RTC_DCHECK_EQ(CVPixelBufferGetHeightOfPlane(pixelBuffer, 0), frameBuffer.height);
@ -313,8 +311,8 @@ NSUInteger GetMaxSampleRate(const webrtc::H264::ProfileLevelId &profile_level_id
}
} // namespace
@implementation RTCVideoEncoderH264 {
RTCVideoCodecInfo *_codecInfo;
@implementation RTC_OBJC_TYPE (RTCVideoEncoderH264) {
RTC_OBJC_TYPE(RTCVideoCodecInfo) * _codecInfo;
std::unique_ptr<webrtc::BitrateAdjuster> _bitrateAdjuster;
uint32_t _targetBitrateBps;
uint32_t _encoderBitrateBps;
@ -340,7 +338,7 @@ NSUInteger GetMaxSampleRate(const webrtc::H264::ProfileLevelId &profile_level_id
// drastically reduced bitrate, so we want to avoid that. In steady state
// conditions, 0.95 seems to give us better overall bitrate over long periods
// of time.
- (instancetype)initWithCodecInfo:(RTCVideoCodecInfo *)codecInfo {
- (instancetype)initWithCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)codecInfo {
if (self = [super init]) {
_codecInfo = codecInfo;
_bitrateAdjuster.reset(new webrtc::BitrateAdjuster(.5, .95));
@ -358,7 +356,7 @@ NSUInteger GetMaxSampleRate(const webrtc::H264::ProfileLevelId &profile_level_id
[self destroyCompressionSession];
}
- (NSInteger)startEncodeWithSettings:(RTCVideoEncoderSettings *)settings
- (NSInteger)startEncodeWithSettings:(RTC_OBJC_TYPE(RTCVideoEncoderSettings) *)settings
numberOfCores:(int)numberOfCores {
RTC_DCHECK(settings);
RTC_DCHECK([settings.name isEqualToString:kRTCVideoCodecH264Name]);
@ -388,8 +386,8 @@ NSUInteger GetMaxSampleRate(const webrtc::H264::ProfileLevelId &profile_level_id
return [self resetCompressionSessionWithPixelFormat:kNV12PixelFormat];
}
- (NSInteger)encode:(RTCVideoFrame *)frame
codecSpecificInfo:(nullable id<RTCCodecSpecificInfo>)codecSpecificInfo
- (NSInteger)encode:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame
codecSpecificInfo:(nullable id<RTC_OBJC_TYPE(RTCCodecSpecificInfo)>)codecSpecificInfo
frameTypes:(NSArray<NSNumber *> *)frameTypes {
RTC_DCHECK_EQ(frame.width, _width);
RTC_DCHECK_EQ(frame.height, _height);
@ -404,9 +402,10 @@ NSUInteger GetMaxSampleRate(const webrtc::H264::ProfileLevelId &profile_level_id
}
CVPixelBufferRef pixelBuffer = nullptr;
if ([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]) {
if ([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) {
// Native frame buffer
RTCCVPixelBuffer *rtcPixelBuffer = (RTCCVPixelBuffer *)frame.buffer;
RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer =
(RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer;
if (![rtcPixelBuffer requiresCropping]) {
// This pixel buffer might have a higher resolution than what the
// compression session is configured to. The compression session can
@ -543,17 +542,18 @@ NSUInteger GetMaxSampleRate(const webrtc::H264::ProfileLevelId &profile_level_id
return WEBRTC_VIDEO_CODEC_OK;
}
- (OSType)pixelFormatOfFrame:(RTCVideoFrame *)frame {
- (OSType)pixelFormatOfFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
// Use NV12 for non-native frames.
if ([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]) {
RTCCVPixelBuffer *rtcPixelBuffer = (RTCCVPixelBuffer *)frame.buffer;
if ([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) {
RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer =
(RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer;
return CVPixelBufferGetPixelFormatType(rtcPixelBuffer.pixelBuffer);
}
return kNV12PixelFormat;
}
- (BOOL)resetCompressionSessionIfNeededWithFrame:(RTCVideoFrame *)frame {
- (BOOL)resetCompressionSessionIfNeededWithFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
BOOL resetCompressionSession = NO;
// If we're capturing native frames in another pixel format than the compression session is
@ -755,7 +755,7 @@ NSUInteger GetMaxSampleRate(const webrtc::H264::ProfileLevelId &profile_level_id
- (void)frameWasEncoded:(OSStatus)status
flags:(VTEncodeInfoFlags)infoFlags
sampleBuffer:(CMSampleBufferRef)sampleBuffer
codecSpecificInfo:(id<RTCCodecSpecificInfo>)codecSpecificInfo
codecSpecificInfo:(id<RTC_OBJC_TYPE(RTCCodecSpecificInfo)>)codecSpecificInfo
width:(int32_t)width
height:(int32_t)height
renderTimeMs:(int64_t)renderTimeMs
@ -783,18 +783,19 @@ NSUInteger GetMaxSampleRate(const webrtc::H264::ProfileLevelId &profile_level_id
}
__block std::unique_ptr<rtc::Buffer> buffer = std::make_unique<rtc::Buffer>();
RTCRtpFragmentationHeader *header;
RTC_OBJC_TYPE(RTCRtpFragmentationHeader) * header;
{
std::unique_ptr<webrtc::RTPFragmentationHeader> header_cpp;
bool result =
H264CMSampleBufferToAnnexBBuffer(sampleBuffer, isKeyframe, buffer.get(), &header_cpp);
header = [[RTCRtpFragmentationHeader alloc] initWithNativeFragmentationHeader:header_cpp.get()];
header = [[RTC_OBJC_TYPE(RTCRtpFragmentationHeader) alloc]
initWithNativeFragmentationHeader:header_cpp.get()];
if (!result) {
return;
}
}
RTCEncodedImage *frame = [[RTCEncodedImage alloc] init];
RTC_OBJC_TYPE(RTCEncodedImage) *frame = [[RTC_OBJC_TYPE(RTCEncodedImage) alloc] init];
// This assumes ownership of `buffer` and is responsible for freeing it when done.
frame.buffer = [[NSData alloc] initWithBytesNoCopy:buffer->data()
length:buffer->size()
@ -825,9 +826,10 @@ NSUInteger GetMaxSampleRate(const webrtc::H264::ProfileLevelId &profile_level_id
_bitrateAdjuster->Update(frame.buffer.length);
}
- (nullable RTCVideoEncoderQpThresholds *)scalingSettings {
return [[RTCVideoEncoderQpThresholds alloc] initWithThresholdsLow:kLowH264QpThreshold
high:kHighH264QpThreshold];
- (nullable RTC_OBJC_TYPE(RTCVideoEncoderQpThresholds) *)scalingSettings {
return [[RTC_OBJC_TYPE(RTCVideoEncoderQpThresholds) alloc]
initWithThresholdsLow:kLowH264QpThreshold
high:kHighH264QpThreshold];
}
@end