Prepare for deleting implicit conversion from raw pointer to scoped_refptr.

Updates all webrtc code, to have a small followup cl to just add the
"explicit" keyword. Patchset #24 passed all webrtc tests, with explicit.

Bug: webrtc:13464
Change-Id: I39863d3752f73209b531120f66916dc9177bf63a
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/242363
Reviewed-by: Tomas Gunnarsson <tommi@webrtc.org>
Commit-Queue: Niels Moller <nisse@webrtc.org>
Cr-Commit-Position: refs/heads/main@{#35718}
This commit is contained in:
Niels Möller
2022-01-17 15:26:54 +01:00
committed by WebRTC LUCI CQ
parent 9609a825eb
commit ac0d18341d
33 changed files with 85 additions and 76 deletions

View File

@ -48,11 +48,10 @@
- (RTC_OBJC_TYPE(RTCAudioSource) *)source {
if (!_source) {
rtc::scoped_refptr<webrtc::AudioSourceInterface> source =
self.nativeAudioTrack->GetSource();
rtc::scoped_refptr<webrtc::AudioSourceInterface> source(self.nativeAudioTrack->GetSource());
if (source) {
_source = [[RTC_OBJC_TYPE(RTCAudioSource) alloc] initWithFactory:self.factory
nativeAudioSource:source.get()];
nativeAudioSource:source];
}
}
return _source;
@ -61,7 +60,8 @@
#pragma mark - Private
- (rtc::scoped_refptr<webrtc::AudioTrackInterface>)nativeAudioTrack {
return static_cast<webrtc::AudioTrackInterface *>(self.nativeTrack.get());
return rtc::scoped_refptr<webrtc::AudioTrackInterface>(
static_cast<webrtc::AudioTrackInterface *>(self.nativeTrack.get()));
}
@end

View File

@ -20,7 +20,7 @@ namespace {
class ObjCEncodedImageBuffer : public webrtc::EncodedImageBufferInterface {
public:
static rtc::scoped_refptr<ObjCEncodedImageBuffer> Create(NSData *data) {
return new rtc::RefCountedObject<ObjCEncodedImageBuffer>(data);
return rtc::make_ref_counted<ObjCEncodedImageBuffer>(data);
}
const uint8_t *data() const override { return static_cast<const uint8_t *>(data_.bytes); }
// TODO(bugs.webrtc.org/9378): delete this non-const data method.

View File

@ -69,21 +69,21 @@ class StatsObserverAdapter : public StatsObserver {
- (void)statisticsForSender : (RTC_OBJC_TYPE(RTCRtpSender) *)sender completionHandler
: (RTCStatisticsCompletionHandler)completionHandler {
rtc::scoped_refptr<webrtc::StatsCollectorCallbackAdapter> collector(
new rtc::RefCountedObject<webrtc::StatsCollectorCallbackAdapter>(completionHandler));
rtc::scoped_refptr<webrtc::StatsCollectorCallbackAdapter> collector =
rtc::make_ref_counted<webrtc::StatsCollectorCallbackAdapter>(completionHandler);
self.nativePeerConnection->GetStats(sender.nativeRtpSender, collector);
}
- (void)statisticsForReceiver:(RTC_OBJC_TYPE(RTCRtpReceiver) *)receiver
completionHandler:(RTCStatisticsCompletionHandler)completionHandler {
rtc::scoped_refptr<webrtc::StatsCollectorCallbackAdapter> collector(
new rtc::RefCountedObject<webrtc::StatsCollectorCallbackAdapter>(completionHandler));
rtc::scoped_refptr<webrtc::StatsCollectorCallbackAdapter> collector =
rtc::make_ref_counted<webrtc::StatsCollectorCallbackAdapter>(completionHandler);
self.nativePeerConnection->GetStats(receiver.nativeRtpReceiver, collector);
}
- (void)statisticsWithCompletionHandler:(RTCStatisticsCompletionHandler)completionHandler {
rtc::scoped_refptr<webrtc::StatsCollectorCallbackAdapter> collector(
new rtc::RefCountedObject<webrtc::StatsCollectorCallbackAdapter>(completionHandler));
rtc::scoped_refptr<webrtc::StatsCollectorCallbackAdapter> collector =
rtc::make_ref_counted<webrtc::StatsCollectorCallbackAdapter>(completionHandler);
self.nativePeerConnection->GetStats(collector);
}
@ -91,9 +91,8 @@ class StatsObserverAdapter : public StatsObserver {
statsOutputLevel:(RTCStatsOutputLevel)statsOutputLevel
completionHandler:
(void (^)(NSArray<RTC_OBJC_TYPE(RTCLegacyStatsReport) *> *stats))completionHandler {
rtc::scoped_refptr<webrtc::StatsObserverAdapter> observer(
new rtc::RefCountedObject<webrtc::StatsObserverAdapter>
(completionHandler));
rtc::scoped_refptr<webrtc::StatsObserverAdapter> observer =
rtc::make_ref_counted<webrtc::StatsObserverAdapter>(completionHandler);
webrtc::PeerConnectionInterface::StatsOutputLevel nativeOutputLevel =
[[self class] nativeStatsOutputLevelForLevel:statsOutputLevel];
self.nativePeerConnection->GetStats(

View File

@ -572,9 +572,8 @@ void PeerConnectionDelegateAdapter::OnRemoveTrack(
- (void)offerForConstraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints
completionHandler:(RTCCreateSessionDescriptionCompletionHandler)completionHandler {
RTC_DCHECK(completionHandler != nil);
rtc::scoped_refptr<webrtc::CreateSessionDescriptionObserverAdapter>
observer(new rtc::RefCountedObject
<webrtc::CreateSessionDescriptionObserverAdapter>(completionHandler));
rtc::scoped_refptr<webrtc::CreateSessionDescriptionObserverAdapter> observer =
rtc::make_ref_counted<webrtc::CreateSessionDescriptionObserverAdapter>(completionHandler);
webrtc::PeerConnectionInterface::RTCOfferAnswerOptions options;
CopyConstraintsIntoOfferAnswerOptions(constraints.nativeConstraints.get(), &options);
@ -584,9 +583,8 @@ void PeerConnectionDelegateAdapter::OnRemoveTrack(
- (void)answerForConstraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints
completionHandler:(RTCCreateSessionDescriptionCompletionHandler)completionHandler {
RTC_DCHECK(completionHandler != nil);
rtc::scoped_refptr<webrtc::CreateSessionDescriptionObserverAdapter>
observer(new rtc::RefCountedObject
<webrtc::CreateSessionDescriptionObserverAdapter>(completionHandler));
rtc::scoped_refptr<webrtc::CreateSessionDescriptionObserverAdapter> observer =
rtc::make_ref_counted<webrtc::CreateSessionDescriptionObserverAdapter>(completionHandler);
webrtc::PeerConnectionInterface::RTCOfferAnswerOptions options;
CopyConstraintsIntoOfferAnswerOptions(constraints.nativeConstraints.get(), &options);
@ -596,24 +594,24 @@ void PeerConnectionDelegateAdapter::OnRemoveTrack(
- (void)setLocalDescription:(RTC_OBJC_TYPE(RTCSessionDescription) *)sdp
completionHandler:(RTCSetSessionDescriptionCompletionHandler)completionHandler {
RTC_DCHECK(completionHandler != nil);
rtc::scoped_refptr<webrtc::SetLocalDescriptionObserverInterface> observer(
new rtc::RefCountedObject<::SetSessionDescriptionObserver>(completionHandler));
rtc::scoped_refptr<webrtc::SetLocalDescriptionObserverInterface> observer =
rtc::make_ref_counted<::SetSessionDescriptionObserver>(completionHandler);
_peerConnection->SetLocalDescription(sdp.nativeDescription, observer);
}
- (void)setLocalDescriptionWithCompletionHandler:
(RTCSetSessionDescriptionCompletionHandler)completionHandler {
RTC_DCHECK(completionHandler != nil);
rtc::scoped_refptr<webrtc::SetLocalDescriptionObserverInterface> observer(
new rtc::RefCountedObject<::SetSessionDescriptionObserver>(completionHandler));
rtc::scoped_refptr<webrtc::SetLocalDescriptionObserverInterface> observer =
rtc::make_ref_counted<::SetSessionDescriptionObserver>(completionHandler);
_peerConnection->SetLocalDescription(observer);
}
- (void)setRemoteDescription:(RTC_OBJC_TYPE(RTCSessionDescription) *)sdp
completionHandler:(RTCSetSessionDescriptionCompletionHandler)completionHandler {
RTC_DCHECK(completionHandler != nil);
rtc::scoped_refptr<webrtc::SetRemoteDescriptionObserverInterface> observer(
new rtc::RefCountedObject<::SetSessionDescriptionObserver>(completionHandler));
rtc::scoped_refptr<webrtc::SetRemoteDescriptionObserverInterface> observer =
rtc::make_ref_counted<::SetSessionDescriptionObserver>(completionHandler);
_peerConnection->SetRemoteDescription(sdp.nativeDescription, observer);
}

View File

@ -61,8 +61,8 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(
signalingThread:(rtc::Thread *)signalingThread
workerThread:(rtc::Thread *)workerThread
isScreenCast:(BOOL)isScreenCast {
rtc::scoped_refptr<webrtc::ObjCVideoTrackSource> objCVideoTrackSource(
new rtc::RefCountedObject<webrtc::ObjCVideoTrackSource>(isScreenCast));
rtc::scoped_refptr<webrtc::ObjCVideoTrackSource> objCVideoTrackSource =
rtc::make_ref_counted<webrtc::ObjCVideoTrackSource>(isScreenCast);
return [self initWithFactory:factory
nativeVideoSource:webrtc::VideoTrackSourceProxy::Create(

View File

@ -59,11 +59,11 @@
- (RTC_OBJC_TYPE(RTCVideoSource) *)source {
if (!_source) {
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source =
self.nativeVideoTrack->GetSource();
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source(
self.nativeVideoTrack->GetSource());
if (source) {
_source = [[RTC_OBJC_TYPE(RTCVideoSource) alloc] initWithFactory:self.factory
nativeVideoSource:source.get()];
nativeVideoSource:source];
}
}
return _source;
@ -107,7 +107,8 @@
#pragma mark - Private
- (rtc::scoped_refptr<webrtc::VideoTrackInterface>)nativeVideoTrack {
return static_cast<webrtc::VideoTrackInterface *>(self.nativeTrack.get());
return rtc::scoped_refptr<webrtc::VideoTrackInterface>(
static_cast<webrtc::VideoTrackInterface *>(self.nativeTrack.get()));
}
@end

View File

@ -20,7 +20,7 @@ namespace webrtc {
rtc::scoped_refptr<AudioDeviceModule> CreateAudioDeviceModule(bool bypass_voice_processing) {
RTC_DLOG(LS_INFO) << __FUNCTION__;
#if defined(WEBRTC_IOS)
return new rtc::RefCountedObject<ios_adm::AudioDeviceModuleIOS>(bypass_voice_processing);
return rtc::make_ref_counted<ios_adm::AudioDeviceModuleIOS>(bypass_voice_processing);
#else
RTC_LOG(LS_ERROR) << "current platform is not supported => this module will self destruct!";
return nullptr;

View File

@ -22,8 +22,8 @@ rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> ObjCToNativeVideoCapturer(
rtc::Thread *signaling_thread,
rtc::Thread *worker_thread) {
RTCObjCVideoSourceAdapter *adapter = [[RTCObjCVideoSourceAdapter alloc] init];
rtc::scoped_refptr<webrtc::ObjCVideoTrackSource> objc_video_track_source(
new rtc::RefCountedObject<webrtc::ObjCVideoTrackSource>(adapter));
rtc::scoped_refptr<webrtc::ObjCVideoTrackSource> objc_video_track_source =
rtc::make_ref_counted<webrtc::ObjCVideoTrackSource>(adapter);
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> video_source =
webrtc::CreateVideoTrackSourceProxy(signaling_thread, worker_thread, objc_video_track_source);

View File

@ -16,7 +16,7 @@ namespace webrtc {
rtc::scoped_refptr<VideoFrameBuffer> ObjCToNativeVideoFrameBuffer(
id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> objc_video_frame_buffer) {
return new rtc::RefCountedObject<ObjCFrameBuffer>(objc_video_frame_buffer);
return rtc::make_ref_counted<ObjCFrameBuffer>(objc_video_frame_buffer);
}
id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> NativeToObjCVideoFrameBuffer(

View File

@ -67,7 +67,7 @@ int ObjCFrameBuffer::height() const {
rtc::scoped_refptr<I420BufferInterface> ObjCFrameBuffer::ToI420() {
rtc::scoped_refptr<I420BufferInterface> buffer =
new rtc::RefCountedObject<ObjCI420FrameBuffer>([frame_buffer_ toI420]);
rtc::make_ref_counted<ObjCI420FrameBuffer>([frame_buffer_ toI420]);
return buffer;
}

View File

@ -57,7 +57,7 @@ class ObjCVideoDecoder : public VideoDecoder {
int32_t RegisterDecodeCompleteCallback(DecodedImageCallback *callback) override {
[decoder_ setCallback:^(RTC_OBJC_TYPE(RTCVideoFrame) * frame) {
const rtc::scoped_refptr<VideoFrameBuffer> buffer =
new rtc::RefCountedObject<ObjCFrameBuffer>(frame.buffer);
rtc::make_ref_counted<ObjCFrameBuffer>(frame.buffer);
VideoFrame videoFrame =
VideoFrame::Builder()
.set_video_frame_buffer(buffer)

View File

@ -91,12 +91,12 @@ void ObjCVideoTrackSource::OnCapturedFrame(RTC_OBJC_TYPE(RTCVideoFrame) * frame)
rtc::scoped_refptr<VideoFrameBuffer> buffer;
if (adapted_width == frame.width && adapted_height == frame.height) {
// No adaption - optimized path.
buffer = new rtc::RefCountedObject<ObjCFrameBuffer>(frame.buffer);
buffer = rtc::make_ref_counted<ObjCFrameBuffer>(frame.buffer);
} else if ([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) {
// Adapted CVPixelBuffer frame.
RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer =
(RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer;
buffer = new rtc::RefCountedObject<ObjCFrameBuffer>([[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc]
buffer = rtc::make_ref_counted<ObjCFrameBuffer>([[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc]
initWithPixelBuffer:rtcPixelBuffer.pixelBuffer
adaptedWidth:adapted_width
adaptedHeight:adapted_height
@ -108,7 +108,7 @@ void ObjCVideoTrackSource::OnCapturedFrame(RTC_OBJC_TYPE(RTCVideoFrame) * frame)
// Adapted I420 frame.
// TODO(magjed): Optimize this I420 path.
rtc::scoped_refptr<I420Buffer> i420_buffer = I420Buffer::Create(adapted_width, adapted_height);
buffer = new rtc::RefCountedObject<ObjCFrameBuffer>(frame.buffer);
buffer = rtc::make_ref_counted<ObjCFrameBuffer>(frame.buffer);
i420_buffer->CropAndScaleFrom(*buffer->ToI420(), crop_x, crop_y, crop_width, crop_height);
buffer = i420_buffer;
}

View File

@ -51,7 +51,7 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface<webrtc::VideoFrame>
}
- (void)setUp {
_video_source = new rtc::RefCountedObject<webrtc::ObjCVideoTrackSource>();
_video_source = rtc::make_ref_counted<webrtc::ObjCVideoTrackSource>();
}
- (void)tearDown {

View File

@ -83,7 +83,7 @@ TEST(ObjCVideoEncoderFactoryTest, EncodeReturnsOKOnSuccess) {
CVPixelBufferRef pixel_buffer;
CVPixelBufferCreate(kCFAllocatorDefault, 640, 480, kCVPixelFormatType_32ARGB, nil, &pixel_buffer);
rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
new rtc::RefCountedObject<webrtc::ObjCFrameBuffer>(
rtc::make_ref_counted<webrtc::ObjCFrameBuffer>(
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixel_buffer]);
webrtc::VideoFrame frame = webrtc::VideoFrame::Builder()
.set_video_frame_buffer(buffer)
@ -101,7 +101,7 @@ TEST(ObjCVideoEncoderFactoryTest, EncodeReturnsErrorOnFail) {
CVPixelBufferRef pixel_buffer;
CVPixelBufferCreate(kCFAllocatorDefault, 640, 480, kCVPixelFormatType_32ARGB, nil, &pixel_buffer);
rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
new rtc::RefCountedObject<webrtc::ObjCFrameBuffer>(
rtc::make_ref_counted<webrtc::ObjCFrameBuffer>(
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixel_buffer]);
webrtc::VideoFrame frame = webrtc::VideoFrame::Builder()
.set_video_frame_buffer(buffer)