Add proxy access to some methods in Obj-C SDK

Most calls to C++ PeerConnection and related classes are proxied
to internal threads in WebRTC. However, there is no such thing
in the Obj-C SDK.
It would be nice to proxy methods in the Obj-C SDK as well.

RTCMediaStream and RTCVideoTrack have NSMutableArray members,
and it can throw NSRangeException when it has race conditions,
so that it would be a good starting point.

Also, remove some NSAsserts as its condition isn't a fatal error,
and it doesn't affect the production already.

Bug: None
Change-Id: I10b44a9c773d62a5c04c254986733a6b67d51617
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/262840
Reviewed-by: Mirko Bonadei <mbonadei@webrtc.org>
Commit-Queue: Daniel.L (Byoungchan) Lee <daniel.l@hpcnt.com>
Reviewed-by: Kári Helgason <kthelgason@webrtc.org>
Cr-Commit-Position: refs/heads/main@{#37283}
This commit is contained in:
Byoungchan Lee
2022-06-21 09:19:34 +09:00
committed by WebRTC LUCI CQ
parent 4d1ef54653
commit 2b46a5870b
5 changed files with 67 additions and 14 deletions

View File

@ -1044,6 +1044,7 @@ if (is_ios || is_mac) {
"../rtc_base:network_constants", "../rtc_base:network_constants",
"../rtc_base:safe_conversions", "../rtc_base:safe_conversions",
"../rtc_base:stringutils", "../rtc_base:stringutils",
"../rtc_base:threading",
"../rtc_base:timeutils", "../rtc_base:timeutils",
"../stats:rtc_stats", "../stats:rtc_stats",
"../system_wrappers:field_trial", "../system_wrappers:field_trial",

View File

@ -10,8 +10,6 @@
#import "RTCMediaStream+Private.h" #import "RTCMediaStream+Private.h"
#include <vector>
#import "RTCAudioTrack+Private.h" #import "RTCAudioTrack+Private.h"
#import "RTCMediaStreamTrack+Private.h" #import "RTCMediaStreamTrack+Private.h"
#import "RTCPeerConnectionFactory+Private.h" #import "RTCPeerConnectionFactory+Private.h"
@ -20,8 +18,9 @@
@implementation RTC_OBJC_TYPE (RTCMediaStream) { @implementation RTC_OBJC_TYPE (RTCMediaStream) {
RTC_OBJC_TYPE(RTCPeerConnectionFactory) * _factory; RTC_OBJC_TYPE(RTCPeerConnectionFactory) * _factory;
NSMutableArray *_audioTracks; rtc::Thread *_signalingThread;
NSMutableArray *_videoTracks; NSMutableArray *_audioTracks /* accessed on _signalingThread */;
NSMutableArray *_videoTracks /* accessed on _signalingThread */;
rtc::scoped_refptr<webrtc::MediaStreamInterface> _nativeMediaStream; rtc::scoped_refptr<webrtc::MediaStreamInterface> _nativeMediaStream;
} }
@ -36,10 +35,18 @@
} }
- (NSArray<RTC_OBJC_TYPE(RTCAudioTrack) *> *)audioTracks { - (NSArray<RTC_OBJC_TYPE(RTCAudioTrack) *> *)audioTracks {
if (!_signalingThread->IsCurrent()) {
return _signalingThread->Invoke<NSArray<RTC_OBJC_TYPE(RTCAudioTrack) *> *>(
RTC_FROM_HERE, [self]() { return self.audioTracks; });
}
return [_audioTracks copy]; return [_audioTracks copy];
} }
- (NSArray<RTC_OBJC_TYPE(RTCVideoTrack) *> *)videoTracks { - (NSArray<RTC_OBJC_TYPE(RTCVideoTrack) *> *)videoTracks {
if (!_signalingThread->IsCurrent()) {
return _signalingThread->Invoke<NSArray<RTC_OBJC_TYPE(RTCVideoTrack) *> *>(
RTC_FROM_HERE, [self]() { return self.videoTracks; });
}
return [_videoTracks copy]; return [_videoTracks copy];
} }
@ -48,33 +55,52 @@
} }
- (void)addAudioTrack:(RTC_OBJC_TYPE(RTCAudioTrack) *)audioTrack { - (void)addAudioTrack:(RTC_OBJC_TYPE(RTCAudioTrack) *)audioTrack {
if (!_signalingThread->IsCurrent()) {
return _signalingThread->Invoke<void>(
RTC_FROM_HERE, [audioTrack, self]() { return [self addAudioTrack:audioTrack]; });
}
if (_nativeMediaStream->AddTrack(audioTrack.nativeAudioTrack)) { if (_nativeMediaStream->AddTrack(audioTrack.nativeAudioTrack)) {
[_audioTracks addObject:audioTrack]; [_audioTracks addObject:audioTrack];
} }
} }
- (void)addVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)videoTrack { - (void)addVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)videoTrack {
if (!_signalingThread->IsCurrent()) {
return _signalingThread->Invoke<void>(
RTC_FROM_HERE, [videoTrack, self]() { return [self addVideoTrack:videoTrack]; });
}
if (_nativeMediaStream->AddTrack(videoTrack.nativeVideoTrack)) { if (_nativeMediaStream->AddTrack(videoTrack.nativeVideoTrack)) {
[_videoTracks addObject:videoTrack]; [_videoTracks addObject:videoTrack];
} }
} }
- (void)removeAudioTrack:(RTC_OBJC_TYPE(RTCAudioTrack) *)audioTrack { - (void)removeAudioTrack:(RTC_OBJC_TYPE(RTCAudioTrack) *)audioTrack {
if (!_signalingThread->IsCurrent()) {
return _signalingThread->Invoke<void>(
RTC_FROM_HERE, [audioTrack, self]() { return [self removeAudioTrack:audioTrack]; });
}
NSUInteger index = [_audioTracks indexOfObjectIdenticalTo:audioTrack]; NSUInteger index = [_audioTracks indexOfObjectIdenticalTo:audioTrack];
NSAssert(index != NSNotFound, if (index == NSNotFound) {
@"|removeAudioTrack| called on unexpected RTC_OBJC_TYPE(RTCAudioTrack)"); RTC_LOG(LS_INFO) << "|removeAudioTrack| called on unexpected RTC_OBJC_TYPE(RTCAudioTrack)";
if (index != NSNotFound && return;
_nativeMediaStream->RemoveTrack(audioTrack.nativeAudioTrack)) { }
if (_nativeMediaStream->RemoveTrack(audioTrack.nativeAudioTrack)) {
[_audioTracks removeObjectAtIndex:index]; [_audioTracks removeObjectAtIndex:index];
} }
} }
- (void)removeVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)videoTrack { - (void)removeVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)videoTrack {
if (!_signalingThread->IsCurrent()) {
return _signalingThread->Invoke<void>(
RTC_FROM_HERE, [videoTrack, self]() { return [self removeVideoTrack:videoTrack]; });
}
NSUInteger index = [_videoTracks indexOfObjectIdenticalTo:videoTrack]; NSUInteger index = [_videoTracks indexOfObjectIdenticalTo:videoTrack];
NSAssert(index != NSNotFound, if (index == NSNotFound) {
@"|removeVideoTrack| called on unexpected RTC_OBJC_TYPE(RTCVideoTrack)"); RTC_LOG(LS_INFO) << "|removeVideoTrack| called on unexpected RTC_OBJC_TYPE(RTCVideoTrack)";
if (index != NSNotFound && return;
_nativeMediaStream->RemoveTrack(videoTrack.nativeVideoTrack)) { }
if (_nativeMediaStream->RemoveTrack(videoTrack.nativeVideoTrack)) {
[_videoTracks removeObjectAtIndex:index]; [_videoTracks removeObjectAtIndex:index];
} }
} }
@ -98,6 +124,7 @@
NSParameterAssert(nativeMediaStream); NSParameterAssert(nativeMediaStream);
if (self = [super init]) { if (self = [super init]) {
_factory = factory; _factory = factory;
_signalingThread = factory.signalingThread;
webrtc::AudioTrackVector audioTracks = nativeMediaStream->GetAudioTracks(); webrtc::AudioTrackVector audioTracks = nativeMediaStream->GetAudioTracks();
webrtc::VideoTrackVector videoTracks = nativeMediaStream->GetVideoTracks(); webrtc::VideoTrackVector videoTracks = nativeMediaStream->GetVideoTracks();

View File

@ -12,6 +12,7 @@
#include "api/peer_connection_interface.h" #include "api/peer_connection_interface.h"
#include "api/scoped_refptr.h" #include "api/scoped_refptr.h"
#include "rtc_base/thread.h"
NS_ASSUME_NONNULL_BEGIN NS_ASSUME_NONNULL_BEGIN
@ -26,6 +27,9 @@ NS_ASSUME_NONNULL_BEGIN
@property(nonatomic, @property(nonatomic,
readonly) rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> nativeFactory; readonly) rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> nativeFactory;
@property(nonatomic, readonly) rtc::Thread* signalingThread;
@property(nonatomic, readonly) rtc::Thread* workerThread;
@end @end
NS_ASSUME_NONNULL_END NS_ASSUME_NONNULL_END

View File

@ -331,4 +331,12 @@
_hasStartedAecDump = NO; _hasStartedAecDump = NO;
} }
- (rtc::Thread *)signalingThread {
return _signalingThread.get();
}
- (rtc::Thread *)workerThread {
return _workerThread.get();
}
@end @end

View File

@ -17,7 +17,8 @@
#import "helpers/NSString+StdString.h" #import "helpers/NSString+StdString.h"
@implementation RTC_OBJC_TYPE (RTCVideoTrack) { @implementation RTC_OBJC_TYPE (RTCVideoTrack) {
NSMutableArray *_adapters; rtc::Thread *_workerThread;
NSMutableArray *_adapters /* accessed on _workerThread */;
} }
@synthesize source = _source; @synthesize source = _source;
@ -46,6 +47,7 @@
NSParameterAssert(type == RTCMediaStreamTrackTypeVideo); NSParameterAssert(type == RTCMediaStreamTrackTypeVideo);
if (self = [super initWithFactory:factory nativeTrack:nativeMediaTrack type:type]) { if (self = [super initWithFactory:factory nativeTrack:nativeMediaTrack type:type]) {
_adapters = [NSMutableArray array]; _adapters = [NSMutableArray array];
_workerThread = factory.workerThread;
} }
return self; return self;
} }
@ -69,10 +71,15 @@
} }
- (void)addRenderer:(id<RTC_OBJC_TYPE(RTCVideoRenderer)>)renderer { - (void)addRenderer:(id<RTC_OBJC_TYPE(RTCVideoRenderer)>)renderer {
if (!_workerThread->IsCurrent()) {
_workerThread->Invoke<void>(RTC_FROM_HERE, [renderer, self] { [self addRenderer:renderer]; });
return;
}
// Make sure we don't have this renderer yet. // Make sure we don't have this renderer yet.
for (RTCVideoRendererAdapter *adapter in _adapters) { for (RTCVideoRendererAdapter *adapter in _adapters) {
if (adapter.videoRenderer == renderer) { if (adapter.videoRenderer == renderer) {
NSAssert(NO, @"|renderer| is already attached to this track"); RTC_LOG(LS_INFO) << "|renderer| is already attached to this track";
return; return;
} }
} }
@ -85,6 +92,11 @@
} }
- (void)removeRenderer:(id<RTC_OBJC_TYPE(RTCVideoRenderer)>)renderer { - (void)removeRenderer:(id<RTC_OBJC_TYPE(RTCVideoRenderer)>)renderer {
if (!_workerThread->IsCurrent()) {
_workerThread->Invoke<void>(RTC_FROM_HERE,
[renderer, self] { [self removeRenderer:renderer]; });
return;
}
__block NSUInteger indexToRemove = NSNotFound; __block NSUInteger indexToRemove = NSNotFound;
[_adapters enumerateObjectsUsingBlock:^(RTCVideoRendererAdapter *adapter, [_adapters enumerateObjectsUsingBlock:^(RTCVideoRendererAdapter *adapter,
NSUInteger idx, NSUInteger idx,
@ -95,6 +107,7 @@
} }
}]; }];
if (indexToRemove == NSNotFound) { if (indexToRemove == NSNotFound) {
RTC_LOG(LS_INFO) << "removeRenderer called with a renderer that has not been previously added";
return; return;
} }
RTCVideoRendererAdapter *adapterToRemove = RTCVideoRendererAdapter *adapterToRemove =