Update iOS AppRTC to use PeerConnection Unified Plan

This also changes AppRTC to use addTrack instead of addStream and
"early media" using the RtpTransceiver API.

Bug: webrtc:8870
Change-Id: Ie2848a87c71a95adb785367d822c61e1f753d8c6
Reviewed-on: https://webrtc-review.googlesource.com/56440
Commit-Queue: Steve Anton <steveanton@webrtc.org>
Reviewed-by: Kári Helgason <kthelgason@webrtc.org>
Reviewed-by: Taylor Brandstetter <deadbeef@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#22255}
This commit is contained in:
Steve Anton
2018-03-01 09:22:48 -08:00
committed by Commit Bot
parent 45087cd23f
commit af23b75bdf

View File

@ -21,6 +21,7 @@
#import "WebRTC/RTCMediaStream.h"
#import "WebRTC/RTCPeerConnectionFactory.h"
#import "WebRTC/RTCRtpSender.h"
#import "WebRTC/RTCRtpTransceiver.h"
#import "WebRTC/RTCTracing.h"
#import "WebRTC/RTCVideoCodecFactory.h"
#import "WebRTC/RTCVideoSource.h"
@ -371,15 +372,15 @@ static int const kKbpsMultiplier = 1000;
- (void)peerConnection:(RTCPeerConnection *)peerConnection
didAddStream:(RTCMediaStream *)stream {
dispatch_async(dispatch_get_main_queue(), ^{
RTCLog(@"Received %lu video tracks and %lu audio tracks",
(unsigned long)stream.videoTracks.count,
(unsigned long)stream.audioTracks.count);
if (stream.videoTracks.count) {
RTCVideoTrack *videoTrack = stream.videoTracks[0];
[_delegate appClient:self didReceiveRemoteVideoTrack:videoTrack];
}
});
RTCLog(@"Stream with %lu video tracks and %lu audio tracks was added.",
(unsigned long)stream.videoTracks.count,
(unsigned long)stream.audioTracks.count);
}
- (void)peerConnection:(RTCPeerConnection *)peerConnection
didStartReceivingOnTransceiver:(RTCRtpTransceiver *)transceiver {
RTCMediaStreamTrack *track = transceiver.receiver.track;
RTCLog(@"Now receiving %@ on track %@.", track.kind, track.trackId);
}
- (void)peerConnection:(RTCPeerConnection *)peerConnection
@ -530,6 +531,7 @@ static int const kKbpsMultiplier = 1000;
RTCMediaConstraints *constraints = [self defaultPeerConnectionConstraints];
RTCConfiguration *config = [[RTCConfiguration alloc] init];
config.iceServers = _iceServers;
config.sdpSemantics = RTCSdpSemanticsUnifiedPlan;
_peerConnection = [_factory peerConnectionWithConfiguration:config
constraints:constraints
delegate:self];
@ -676,18 +678,30 @@ static int const kKbpsMultiplier = 1000;
[sender setParameters:parametersToModify];
}
- (RTCRtpTransceiver *)videoTransceiver {
for (RTCRtpTransceiver *transceiver in _peerConnection.transceivers) {
if (transceiver.mediaType == RTCRtpMediaTypeVideo) {
return transceiver;
}
}
return nil;
}
- (void)createMediaSenders {
RTCMediaConstraints *constraints = [self defaultMediaAudioConstraints];
RTCAudioSource *source = [_factory audioSourceWithConstraints:constraints];
RTCAudioTrack *track = [_factory audioTrackWithSource:source
trackId:kARDAudioTrackId];
RTCMediaStream *stream = [_factory mediaStreamWithStreamId:kARDMediaStreamId];
[stream addAudioTrack:track];
[_peerConnection addTrack:track streamLabels:@[ kARDMediaStreamId ]];
_localVideoTrack = [self createLocalVideoTrack];
if (_localVideoTrack) {
[stream addVideoTrack:_localVideoTrack];
[_peerConnection addTrack:_localVideoTrack streamLabels:@[ kARDMediaStreamId ]];
// We can set up rendering for the remote track right away since the transceiver already has an
// RTCRtpReceiver with a track. The track will automatically get unmuted and produce frames
// once RTP is received.
RTCVideoTrack *track = (RTCVideoTrack *)([self videoTransceiver].receiver.track);
[_delegate appClient:self didReceiveRemoteVideoTrack:track];
}
[_peerConnection addStream:stream];
}
- (RTCVideoTrack *)createLocalVideoTrack {