Wrap WebRTC OBJC API types with RTC_OBJC_TYPE.

This CL introduced 2 new macros that affect the WebRTC OBJC API symbols:

- RTC_OBJC_TYPE_PREFIX:
  Macro used to prepend a prefix to the API types that are exported with
  RTC_OBJC_EXPORT.

  Clients can patch the definition of this macro locally and build
  WebRTC.framework with their own prefix in case symbol clashing is a
  problem.

  This macro must only be defined by changing the value in
  sdk/objc/base/RTCMacros.h  and not on via compiler flag to ensure
  it has a unique value.

- RCT_OBJC_TYPE:
  Macro used internally to reference API types. Declaring an API type
  without using this macro will not include the declared type in the
  set of types that will be affected by the configurable
  RTC_OBJC_TYPE_PREFIX.

Manual changes:
https://webrtc-review.googlesource.com/c/src/+/173781/5..10

The auto-generated changes in PS#5 have been done with:
https://webrtc-review.googlesource.com/c/src/+/174061.

Bug: None
Change-Id: I0d54ca94db764fb3b6cb4365873f79e14cd879b8
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/173781
Commit-Queue: Mirko Bonadei <mbonadei@webrtc.org>
Reviewed-by: Karl Wiberg <kwiberg@webrtc.org>
Reviewed-by: Kári Helgason <kthelgason@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#31153}
This commit is contained in:
Mirko Bonadei
2020-05-04 16:14:32 +02:00
committed by Commit Bot
parent ce1320cc4d
commit a81e9c82fc
303 changed files with 2534 additions and 2189 deletions

View File

@ -152,7 +152,8 @@ AudioDeviceGeneric::InitStatus AudioDeviceIOS::Init() {
// here. They have not been set and confirmed yet since configureForWebRTC
// is not called until audio is about to start. However, it makes sense to
// store the parameters now and then verify at a later stage.
RTCAudioSessionConfiguration* config = [RTCAudioSessionConfiguration webRTCConfiguration];
RTC_OBJC_TYPE(RTCAudioSessionConfiguration)* config =
[RTC_OBJC_TYPE(RTCAudioSessionConfiguration) webRTCConfiguration];
playout_parameters_.reset(config.sampleRate, config.outputNumberOfChannels);
record_parameters_.reset(config.sampleRate, config.inputNumberOfChannels);
// Ensure that the audio device buffer (ADB) knows about the internal audio
@ -532,12 +533,12 @@ void AudioDeviceIOS::HandleInterruptionEnd() {
// Allocate new buffers given the potentially new stream format.
SetupAudioBuffersForActiveAudioSession();
}
UpdateAudioUnit([RTCAudioSession sharedInstance].canPlayOrRecord);
UpdateAudioUnit([RTC_OBJC_TYPE(RTCAudioSession) sharedInstance].canPlayOrRecord);
}
void AudioDeviceIOS::HandleValidRouteChange() {
RTC_DCHECK_RUN_ON(&thread_checker_);
RTCAudioSession* session = [RTCAudioSession sharedInstance];
RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
RTCLog(@"%@", session);
HandleSampleRateChange(session.sampleRate);
}
@ -565,7 +566,7 @@ void AudioDeviceIOS::HandleSampleRateChange(float sample_rate) {
// The audio unit is already initialized or started.
// Check to see if the sample rate or buffer size has changed.
RTCAudioSession* session = [RTCAudioSession sharedInstance];
RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
const double session_sample_rate = session.sampleRate;
const NSTimeInterval session_buffer_duration = session.IOBufferDuration;
const size_t session_frames_per_buffer =
@ -646,7 +647,7 @@ void AudioDeviceIOS::HandlePlayoutGlitchDetected() {
int64_t glitch_count = num_detected_playout_glitches_;
dispatch_async(dispatch_get_main_queue(), ^{
RTCAudioSession* session = [RTCAudioSession sharedInstance];
RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
[session notifyDidDetectPlayoutGlitch:glitch_count];
});
}
@ -678,7 +679,7 @@ void AudioDeviceIOS::UpdateAudioDeviceBuffer() {
void AudioDeviceIOS::SetupAudioBuffersForActiveAudioSession() {
LOGI() << "SetupAudioBuffersForActiveAudioSession";
// Verify the current values once the audio session has been activated.
RTCAudioSession* session = [RTCAudioSession sharedInstance];
RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
double sample_rate = session.sampleRate;
NSTimeInterval io_buffer_duration = session.IOBufferDuration;
RTCLog(@"%@", session);
@ -687,7 +688,8 @@ void AudioDeviceIOS::SetupAudioBuffersForActiveAudioSession() {
// hardware sample rate but continue and use the non-ideal sample rate after
// reinitializing the audio parameters. Most BT headsets only support 8kHz or
// 16kHz.
RTCAudioSessionConfiguration* webRTCConfig = [RTCAudioSessionConfiguration webRTCConfiguration];
RTC_OBJC_TYPE(RTCAudioSessionConfiguration)* webRTCConfig =
[RTC_OBJC_TYPE(RTCAudioSessionConfiguration) webRTCConfiguration];
if (sample_rate != webRTCConfig.sampleRate) {
RTC_LOG(LS_WARNING) << "Unable to set the preferred sample rate";
}
@ -797,7 +799,7 @@ void AudioDeviceIOS::UpdateAudioUnit(bool can_play_or_record) {
if (should_start_audio_unit) {
RTCLog(@"Starting audio unit for UpdateAudioUnit");
// Log session settings before trying to start audio streaming.
RTCAudioSession* session = [RTCAudioSession sharedInstance];
RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
RTCLog(@"%@", session);
if (!audio_unit_->Start()) {
RTCLogError(@"Failed to start audio unit.");
@ -827,7 +829,7 @@ bool AudioDeviceIOS::ConfigureAudioSession() {
RTCLogWarning(@"Audio session already configured.");
return false;
}
RTCAudioSession* session = [RTCAudioSession sharedInstance];
RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
[session lockForConfiguration];
bool success = [session configureWebRTCSession:nil];
[session unlockForConfiguration];
@ -847,7 +849,7 @@ void AudioDeviceIOS::UnconfigureAudioSession() {
RTCLogWarning(@"Audio session already unconfigured.");
return;
}
RTCAudioSession* session = [RTCAudioSession sharedInstance];
RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
[session lockForConfiguration];
[session unconfigureWebRTCSession:nil];
[session endWebRTCSession:nil];
@ -865,7 +867,7 @@ bool AudioDeviceIOS::InitPlayOrRecord() {
return false;
}
RTCAudioSession* session = [RTCAudioSession sharedInstance];
RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
// Subscribe to audio session events.
[session pushDelegate:audio_session_observer_];
is_interrupted_ = session.isInterrupted ? true : false;
@ -915,7 +917,7 @@ void AudioDeviceIOS::ShutdownPlayOrRecord() {
io_thread_checker_.Detach();
// Remove audio session notification observers.
RTCAudioSession* session = [RTCAudioSession sharedInstance];
RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
[session removeDelegate:audio_session_observer_];
// All I/O should be stopped or paused prior to deactivating the audio

View File

@ -13,15 +13,18 @@
#import <CoreVideo/CoreVideo.h>
#import "base/RTCMacros.h"
#include "common_video/include/video_frame_buffer.h"
@protocol RTCVideoFrameBuffer;
@protocol RTC_OBJC_TYPE
(RTCVideoFrameBuffer);
namespace webrtc {
class ObjCFrameBuffer : public VideoFrameBuffer {
public:
explicit ObjCFrameBuffer(id<RTCVideoFrameBuffer>);
explicit ObjCFrameBuffer(id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)>);
~ObjCFrameBuffer() override;
Type type() const override;
@ -31,15 +34,15 @@ class ObjCFrameBuffer : public VideoFrameBuffer {
rtc::scoped_refptr<I420BufferInterface> ToI420() override;
id<RTCVideoFrameBuffer> wrapped_frame_buffer() const;
id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> wrapped_frame_buffer() const;
private:
id<RTCVideoFrameBuffer> frame_buffer_;
id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> frame_buffer_;
int width_;
int height_;
};
id<RTCVideoFrameBuffer> ToObjCVideoFrameBuffer(
id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> ToObjCVideoFrameBuffer(
const rtc::scoped_refptr<VideoFrameBuffer>& buffer);
} // namespace webrtc

View File

@ -17,10 +17,10 @@ namespace webrtc {
namespace {
/** ObjCFrameBuffer that conforms to I420BufferInterface by wrapping RTCI420Buffer */
/** ObjCFrameBuffer that conforms to I420BufferInterface by wrapping RTC_OBJC_TYPE(RTCI420Buffer) */
class ObjCI420FrameBuffer : public I420BufferInterface {
public:
explicit ObjCI420FrameBuffer(id<RTCI420Buffer> frame_buffer)
explicit ObjCI420FrameBuffer(id<RTC_OBJC_TYPE(RTCI420Buffer)> frame_buffer)
: frame_buffer_(frame_buffer), width_(frame_buffer.width), height_(frame_buffer.height) {}
~ObjCI420FrameBuffer() override {}
@ -41,14 +41,14 @@ class ObjCI420FrameBuffer : public I420BufferInterface {
int StrideV() const override { return frame_buffer_.strideV; }
private:
id<RTCI420Buffer> frame_buffer_;
id<RTC_OBJC_TYPE(RTCI420Buffer)> frame_buffer_;
int width_;
int height_;
};
} // namespace
ObjCFrameBuffer::ObjCFrameBuffer(id<RTCVideoFrameBuffer> frame_buffer)
ObjCFrameBuffer::ObjCFrameBuffer(id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> frame_buffer)
: frame_buffer_(frame_buffer), width_(frame_buffer.width), height_(frame_buffer.height) {}
ObjCFrameBuffer::~ObjCFrameBuffer() {}
@ -72,15 +72,16 @@ rtc::scoped_refptr<I420BufferInterface> ObjCFrameBuffer::ToI420() {
return buffer;
}
id<RTCVideoFrameBuffer> ObjCFrameBuffer::wrapped_frame_buffer() const {
id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> ObjCFrameBuffer::wrapped_frame_buffer() const {
return frame_buffer_;
}
id<RTCVideoFrameBuffer> ToObjCVideoFrameBuffer(const rtc::scoped_refptr<VideoFrameBuffer>& buffer) {
id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> ToObjCVideoFrameBuffer(
const rtc::scoped_refptr<VideoFrameBuffer>& buffer) {
if (buffer->type() == VideoFrameBuffer::Type::kNative) {
return static_cast<ObjCFrameBuffer*>(buffer.get())->wrapped_frame_buffer();
} else {
return [[RTCI420Buffer alloc] initWithFrameBuffer:buffer->ToI420()];
return [[RTC_OBJC_TYPE(RTCI420Buffer) alloc] initWithFrameBuffer:buffer->ToI420()];
}
}

View File

@ -11,26 +11,29 @@
#ifndef SDK_OBJC_NATIVE_SRC_OBJC_VIDEO_DECODER_FACTORY_H_
#define SDK_OBJC_NATIVE_SRC_OBJC_VIDEO_DECODER_FACTORY_H_
#import "base/RTCMacros.h"
#include "api/video_codecs/video_decoder_factory.h"
#include "media/base/codec.h"
@protocol RTCVideoDecoderFactory;
@protocol RTC_OBJC_TYPE
(RTCVideoDecoderFactory);
namespace webrtc {
class ObjCVideoDecoderFactory : public VideoDecoderFactory {
public:
explicit ObjCVideoDecoderFactory(id<RTCVideoDecoderFactory>);
explicit ObjCVideoDecoderFactory(id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)>);
~ObjCVideoDecoderFactory() override;
id<RTCVideoDecoderFactory> wrapped_decoder_factory() const;
id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)> wrapped_decoder_factory() const;
std::vector<SdpVideoFormat> GetSupportedFormats() const override;
std::unique_ptr<VideoDecoder> CreateVideoDecoder(
const SdpVideoFormat& format) override;
private:
id<RTCVideoDecoderFactory> decoder_factory_;
id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)> decoder_factory_;
};
} // namespace webrtc

View File

@ -33,7 +33,7 @@ namespace webrtc {
namespace {
class ObjCVideoDecoder : public VideoDecoder {
public:
ObjCVideoDecoder(id<RTCVideoDecoder> decoder)
ObjCVideoDecoder(id<RTC_OBJC_TYPE(RTCVideoDecoder)> decoder)
: decoder_(decoder), implementation_name_([decoder implementationName].stdString) {}
int32_t InitDecode(const VideoCodec *codec_settings, int32_t number_of_cores) override {
@ -43,8 +43,8 @@ class ObjCVideoDecoder : public VideoDecoder {
int32_t Decode(const EncodedImage &input_image,
bool missing_frames,
int64_t render_time_ms = -1) override {
RTCEncodedImage *encodedImage =
[[RTCEncodedImage alloc] initWithNativeEncodedImage:input_image];
RTC_OBJC_TYPE(RTCEncodedImage) *encodedImage =
[[RTC_OBJC_TYPE(RTCEncodedImage) alloc] initWithNativeEncodedImage:input_image];
return [decoder_ decode:encodedImage
missingFrames:missing_frames
@ -53,7 +53,7 @@ class ObjCVideoDecoder : public VideoDecoder {
}
int32_t RegisterDecodeCompleteCallback(DecodedImageCallback *callback) override {
[decoder_ setCallback:^(RTCVideoFrame *frame) {
[decoder_ setCallback:^(RTC_OBJC_TYPE(RTCVideoFrame) * frame) {
const rtc::scoped_refptr<VideoFrameBuffer> buffer =
new rtc::RefCountedObject<ObjCFrameBuffer>(frame.buffer);
VideoFrame videoFrame =
@ -76,26 +76,27 @@ class ObjCVideoDecoder : public VideoDecoder {
const char *ImplementationName() const override { return implementation_name_.c_str(); }
private:
id<RTCVideoDecoder> decoder_;
id<RTC_OBJC_TYPE(RTCVideoDecoder)> decoder_;
const std::string implementation_name_;
};
} // namespace
ObjCVideoDecoderFactory::ObjCVideoDecoderFactory(id<RTCVideoDecoderFactory> decoder_factory)
ObjCVideoDecoderFactory::ObjCVideoDecoderFactory(
id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)> decoder_factory)
: decoder_factory_(decoder_factory) {}
ObjCVideoDecoderFactory::~ObjCVideoDecoderFactory() {}
id<RTCVideoDecoderFactory> ObjCVideoDecoderFactory::wrapped_decoder_factory() const {
id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)> ObjCVideoDecoderFactory::wrapped_decoder_factory() const {
return decoder_factory_;
}
std::unique_ptr<VideoDecoder> ObjCVideoDecoderFactory::CreateVideoDecoder(
const SdpVideoFormat &format) {
NSString *codecName = [NSString stringWithUTF8String:format.name.c_str()];
for (RTCVideoCodecInfo *codecInfo in decoder_factory_.supportedCodecs) {
for (RTC_OBJC_TYPE(RTCVideoCodecInfo) * codecInfo in decoder_factory_.supportedCodecs) {
if ([codecName isEqualToString:codecInfo.name]) {
id<RTCVideoDecoder> decoder = [decoder_factory_ createDecoder:codecInfo];
id<RTC_OBJC_TYPE(RTCVideoDecoder)> decoder = [decoder_factory_ createDecoder:codecInfo];
if ([decoder isKindOfClass:[RTCWrappedNativeVideoDecoder class]]) {
return [(RTCWrappedNativeVideoDecoder *)decoder releaseWrappedDecoder];
@ -110,7 +111,7 @@ std::unique_ptr<VideoDecoder> ObjCVideoDecoderFactory::CreateVideoDecoder(
std::vector<SdpVideoFormat> ObjCVideoDecoderFactory::GetSupportedFormats() const {
std::vector<SdpVideoFormat> supported_formats;
for (RTCVideoCodecInfo *supportedCodec in decoder_factory_.supportedCodecs) {
for (RTC_OBJC_TYPE(RTCVideoCodecInfo) * supportedCodec in decoder_factory_.supportedCodecs) {
SdpVideoFormat format = [supportedCodec nativeSdpVideoFormat];
supported_formats.push_back(format);
}

View File

@ -13,18 +13,21 @@
#import <Foundation/Foundation.h>
#import "base/RTCMacros.h"
#include "api/video_codecs/video_encoder_factory.h"
@protocol RTCVideoEncoderFactory;
@protocol RTC_OBJC_TYPE
(RTCVideoEncoderFactory);
namespace webrtc {
class ObjCVideoEncoderFactory : public VideoEncoderFactory {
public:
explicit ObjCVideoEncoderFactory(id<RTCVideoEncoderFactory>);
explicit ObjCVideoEncoderFactory(id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)>);
~ObjCVideoEncoderFactory() override;
id<RTCVideoEncoderFactory> wrapped_encoder_factory() const;
id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)> wrapped_encoder_factory() const;
std::vector<SdpVideoFormat> GetSupportedFormats() const override;
std::vector<SdpVideoFormat> GetImplementations() const override;
@ -34,7 +37,7 @@ class ObjCVideoEncoderFactory : public VideoEncoderFactory {
std::unique_ptr<EncoderSelectorInterface> GetEncoderSelector() const override;
private:
id<RTCVideoEncoderFactory> encoder_factory_;
id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)> encoder_factory_;
};
} // namespace webrtc

View File

@ -12,6 +12,7 @@
#include <string>
#import "base/RTCMacros.h"
#import "base/RTCVideoEncoder.h"
#import "base/RTCVideoEncoderFactory.h"
#import "components/video_codec/RTCCodecSpecificInfoH264+Private.h"
@ -38,26 +39,27 @@ namespace {
class ObjCVideoEncoder : public VideoEncoder {
public:
ObjCVideoEncoder(id<RTCVideoEncoder> encoder)
ObjCVideoEncoder(id<RTC_OBJC_TYPE(RTCVideoEncoder)> encoder)
: encoder_(encoder), implementation_name_([encoder implementationName].stdString) {}
int32_t InitEncode(const VideoCodec *codec_settings, const Settings &encoder_settings) override {
RTCVideoEncoderSettings *settings =
[[RTCVideoEncoderSettings alloc] initWithNativeVideoCodec:codec_settings];
RTC_OBJC_TYPE(RTCVideoEncoderSettings) *settings =
[[RTC_OBJC_TYPE(RTCVideoEncoderSettings) alloc] initWithNativeVideoCodec:codec_settings];
return [encoder_ startEncodeWithSettings:settings
numberOfCores:encoder_settings.number_of_cores];
}
int32_t RegisterEncodeCompleteCallback(EncodedImageCallback *callback) override {
[encoder_ setCallback:^BOOL(RTCEncodedImage *_Nonnull frame,
id<RTCCodecSpecificInfo> _Nonnull info,
RTCRtpFragmentationHeader *_Nonnull header) {
[encoder_ setCallback:^BOOL(RTC_OBJC_TYPE(RTCEncodedImage) * _Nonnull frame,
id<RTC_OBJC_TYPE(RTCCodecSpecificInfo)> _Nonnull info,
RTC_OBJC_TYPE(RTCRtpFragmentationHeader) * _Nonnull header) {
EncodedImage encodedImage = [frame nativeEncodedImage];
// Handle types that can be converted into one of CodecSpecificInfo's hard coded cases.
CodecSpecificInfo codecSpecificInfo;
if ([info isKindOfClass:[RTCCodecSpecificInfoH264 class]]) {
codecSpecificInfo = [(RTCCodecSpecificInfoH264 *)info nativeCodecSpecificInfo];
if ([info isKindOfClass:[RTC_OBJC_TYPE(RTCCodecSpecificInfoH264) class]]) {
codecSpecificInfo =
[(RTC_OBJC_TYPE(RTCCodecSpecificInfoH264) *)info nativeCodecSpecificInfo];
}
std::unique_ptr<RTPFragmentationHeader> fragmentationHeader =
@ -95,7 +97,7 @@ class ObjCVideoEncoder : public VideoEncoder {
info.supports_native_handle = true;
info.implementation_name = implementation_name_;
RTCVideoEncoderQpThresholds *qp_thresholds = [encoder_ scalingSettings];
RTC_OBJC_TYPE(RTCVideoEncoderQpThresholds) *qp_thresholds = [encoder_ scalingSettings];
info.scaling_settings = qp_thresholds ? ScalingSettings(qp_thresholds.low, qp_thresholds.high) :
ScalingSettings::kOff;
@ -105,26 +107,29 @@ class ObjCVideoEncoder : public VideoEncoder {
}
private:
id<RTCVideoEncoder> encoder_;
id<RTC_OBJC_TYPE(RTCVideoEncoder)> encoder_;
const std::string implementation_name_;
};
class ObjcVideoEncoderSelector : public VideoEncoderFactory::EncoderSelectorInterface {
public:
ObjcVideoEncoderSelector(id<RTCVideoEncoderSelector> selector) { selector_ = selector; }
ObjcVideoEncoderSelector(id<RTC_OBJC_TYPE(RTCVideoEncoderSelector)> selector) {
selector_ = selector;
}
void OnCurrentEncoder(const SdpVideoFormat &format) override {
RTCVideoCodecInfo *info = [[RTCVideoCodecInfo alloc] initWithNativeSdpVideoFormat:format];
RTC_OBJC_TYPE(RTCVideoCodecInfo) *info =
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithNativeSdpVideoFormat:format];
[selector_ registerCurrentEncoderInfo:info];
}
absl::optional<SdpVideoFormat> OnEncoderBroken() override {
RTCVideoCodecInfo *info = [selector_ encoderForBrokenEncoder];
RTC_OBJC_TYPE(RTCVideoCodecInfo) *info = [selector_ encoderForBrokenEncoder];
if (info) {
return [info nativeSdpVideoFormat];
}
return absl::nullopt;
}
absl::optional<SdpVideoFormat> OnAvailableBitrate(const DataRate &rate) override {
RTCVideoCodecInfo *info = [selector_ encoderForBitrate:rate.kbps<NSInteger>()];
RTC_OBJC_TYPE(RTCVideoCodecInfo) *info = [selector_ encoderForBitrate:rate.kbps<NSInteger>()];
if (info) {
return [info nativeSdpVideoFormat];
}
@ -132,23 +137,24 @@ class ObjcVideoEncoderSelector : public VideoEncoderFactory::EncoderSelectorInte
}
private:
id<RTCVideoEncoderSelector> selector_;
id<RTC_OBJC_TYPE(RTCVideoEncoderSelector)> selector_;
};
} // namespace
ObjCVideoEncoderFactory::ObjCVideoEncoderFactory(id<RTCVideoEncoderFactory> encoder_factory)
ObjCVideoEncoderFactory::ObjCVideoEncoderFactory(
id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)> encoder_factory)
: encoder_factory_(encoder_factory) {}
ObjCVideoEncoderFactory::~ObjCVideoEncoderFactory() {}
id<RTCVideoEncoderFactory> ObjCVideoEncoderFactory::wrapped_encoder_factory() const {
id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)> ObjCVideoEncoderFactory::wrapped_encoder_factory() const {
return encoder_factory_;
}
std::vector<SdpVideoFormat> ObjCVideoEncoderFactory::GetSupportedFormats() const {
std::vector<SdpVideoFormat> supported_formats;
for (RTCVideoCodecInfo *supportedCodec in [encoder_factory_ supportedCodecs]) {
for (RTC_OBJC_TYPE(RTCVideoCodecInfo) * supportedCodec in [encoder_factory_ supportedCodecs]) {
SdpVideoFormat format = [supportedCodec nativeSdpVideoFormat];
supported_formats.push_back(format);
}
@ -159,7 +165,7 @@ std::vector<SdpVideoFormat> ObjCVideoEncoderFactory::GetSupportedFormats() const
std::vector<SdpVideoFormat> ObjCVideoEncoderFactory::GetImplementations() const {
if ([encoder_factory_ respondsToSelector:@selector(implementations)]) {
std::vector<SdpVideoFormat> supported_formats;
for (RTCVideoCodecInfo *supportedCodec in [encoder_factory_ implementations]) {
for (RTC_OBJC_TYPE(RTCVideoCodecInfo) * supportedCodec in [encoder_factory_ implementations]) {
SdpVideoFormat format = [supportedCodec nativeSdpVideoFormat];
supported_formats.push_back(format);
}
@ -183,8 +189,9 @@ VideoEncoderFactory::CodecInfo ObjCVideoEncoderFactory::QueryVideoEncoder(
std::unique_ptr<VideoEncoder> ObjCVideoEncoderFactory::CreateVideoEncoder(
const SdpVideoFormat &format) {
RTCVideoCodecInfo *info = [[RTCVideoCodecInfo alloc] initWithNativeSdpVideoFormat:format];
id<RTCVideoEncoder> encoder = [encoder_factory_ createEncoder:info];
RTC_OBJC_TYPE(RTCVideoCodecInfo) *info =
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithNativeSdpVideoFormat:format];
id<RTC_OBJC_TYPE(RTCVideoEncoder)> encoder = [encoder_factory_ createEncoder:info];
if ([encoder isKindOfClass:[RTCWrappedNativeVideoEncoder class]]) {
return [(RTCWrappedNativeVideoEncoder *)encoder releaseWrappedEncoder];
} else {

View File

@ -17,7 +17,7 @@
namespace webrtc {
RTCVideoFrame* ToObjCVideoFrame(const VideoFrame& frame);
RTC_OBJC_TYPE(RTCVideoFrame) * ToObjCVideoFrame(const VideoFrame& frame);
} // namespace webrtc

View File

@ -15,11 +15,11 @@
namespace webrtc {
RTCVideoFrame *ToObjCVideoFrame(const VideoFrame &frame) {
RTCVideoFrame *videoFrame =
[[RTCVideoFrame alloc] initWithBuffer:ToObjCVideoFrameBuffer(frame.video_frame_buffer())
rotation:RTCVideoRotation(frame.rotation())
timeStampNs:frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec];
RTC_OBJC_TYPE(RTCVideoFrame) * ToObjCVideoFrame(const VideoFrame &frame) {
RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame = [[RTC_OBJC_TYPE(RTCVideoFrame) alloc]
initWithBuffer:ToObjCVideoFrameBuffer(frame.video_frame_buffer())
rotation:RTCVideoRotation(frame.rotation())
timeStampNs:frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec];
videoFrame.timeStamp = frame.timestamp();
return videoFrame;

View File

@ -14,20 +14,23 @@
#import <CoreGraphics/CoreGraphics.h>
#import <Foundation/Foundation.h>
#import "base/RTCMacros.h"
#include "api/video/video_frame.h"
#include "api/video/video_sink_interface.h"
@protocol RTCVideoRenderer;
@protocol RTC_OBJC_TYPE
(RTCVideoRenderer);
namespace webrtc {
class ObjCVideoRenderer : public rtc::VideoSinkInterface<VideoFrame> {
public:
ObjCVideoRenderer(id<RTCVideoRenderer> renderer);
ObjCVideoRenderer(id<RTC_OBJC_TYPE(RTCVideoRenderer)> renderer);
void OnFrame(const VideoFrame& nativeVideoFrame) override;
private:
id<RTCVideoRenderer> renderer_;
id<RTC_OBJC_TYPE(RTCVideoRenderer)> renderer_;
CGSize size_;
};

View File

@ -10,6 +10,7 @@
#include "sdk/objc/native/src/objc_video_renderer.h"
#import "base/RTCMacros.h"
#import "base/RTCVideoFrame.h"
#import "base/RTCVideoRenderer.h"
@ -17,11 +18,11 @@
namespace webrtc {
ObjCVideoRenderer::ObjCVideoRenderer(id<RTCVideoRenderer> renderer)
ObjCVideoRenderer::ObjCVideoRenderer(id<RTC_OBJC_TYPE(RTCVideoRenderer)> renderer)
: renderer_(renderer), size_(CGSizeZero) {}
void ObjCVideoRenderer::OnFrame(const VideoFrame& nativeVideoFrame) {
RTCVideoFrame* videoFrame = ToObjCVideoFrame(nativeVideoFrame);
RTC_OBJC_TYPE(RTCVideoFrame)* videoFrame = ToObjCVideoFrame(nativeVideoFrame);
CGSize current_size = (videoFrame.rotation % 180 == 0) ?
CGSizeMake(videoFrame.width, videoFrame.height) :

View File

@ -17,9 +17,9 @@
#include "media/base/adapted_video_track_source.h"
#include "rtc_base/timestamp_aligner.h"
RTC_FWD_DECL_OBJC_CLASS(RTCVideoFrame);
RTC_FWD_DECL_OBJC_CLASS(RTC_OBJC_TYPE(RTCVideoFrame));
@interface RTCObjCVideoSourceAdapter : NSObject<RTCVideoCapturerDelegate>
@interface RTCObjCVideoSourceAdapter : NSObject <RTC_OBJC_TYPE (RTCVideoCapturerDelegate)>
@end
namespace webrtc {
@ -42,7 +42,7 @@ class ObjCVideoTrackSource : public rtc::AdaptedVideoTrackSource {
bool remote() const override;
void OnCapturedFrame(RTCVideoFrame* frame);
void OnCapturedFrame(RTC_OBJC_TYPE(RTCVideoFrame) * frame);
// Called by RTCVideoSource.
void OnOutputFormatRequest(int width, int height, int fps);

View File

@ -25,7 +25,8 @@
@synthesize objCVideoTrackSource = _objCVideoTrackSource;
- (void)capturer:(RTCVideoCapturer *)capturer didCaptureVideoFrame:(RTCVideoFrame *)frame {
- (void)capturer:(RTC_OBJC_TYPE(RTCVideoCapturer) *)capturer
didCaptureVideoFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
_objCVideoTrackSource->OnCapturedFrame(frame);
}
@ -61,7 +62,7 @@ void ObjCVideoTrackSource::OnOutputFormatRequest(int width, int height, int fps)
video_adapter()->OnOutputFormatRequest(format);
}
void ObjCVideoTrackSource::OnCapturedFrame(RTCVideoFrame *frame) {
void ObjCVideoTrackSource::OnCapturedFrame(RTC_OBJC_TYPE(RTCVideoFrame) * frame) {
const int64_t timestamp_us = frame.timeStampNs / rtc::kNumNanosecsPerMicrosec;
const int64_t translated_timestamp_us =
timestamp_aligner_.TranslateTimestamp(timestamp_us, rtc::TimeMicros());
@ -88,10 +89,11 @@ void ObjCVideoTrackSource::OnCapturedFrame(RTCVideoFrame *frame) {
if (adapted_width == frame.width && adapted_height == frame.height) {
// No adaption - optimized path.
buffer = new rtc::RefCountedObject<ObjCFrameBuffer>(frame.buffer);
} else if ([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]) {
} else if ([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) {
// Adapted CVPixelBuffer frame.
RTCCVPixelBuffer *rtcPixelBuffer = (RTCCVPixelBuffer *)frame.buffer;
buffer = new rtc::RefCountedObject<ObjCFrameBuffer>([[RTCCVPixelBuffer alloc]
RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer =
(RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer;
buffer = new rtc::RefCountedObject<ObjCFrameBuffer>([[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc]
initWithPixelBuffer:rtcPixelBuffer.pixelBuffer
adaptedWidth:adapted_width
adaptedHeight:adapted_height