TrackMediaInfoMap added.

This maps, in both directions, [Audio/Video]TrackInterface with
[Voice/Video][Sender/Receiver]Info.

This mapping is necessary for RTCStatsCollector to know the relationship
between RTCMediaStreamTrackStats and RTC[In/Out]boundRTPStreamStats, and
to be able to collect several RTCMediaStreamTrackStats stats.

BUG=webrtc:6757, chromium:659137, chromium:657854, chromium:627816

Review-Url: https://codereview.webrtc.org/2611983002
Cr-Commit-Position: refs/heads/master@{#16090}
This commit is contained in:
hbos
2017-01-16 04:24:10 -08:00
committed by Commit bot
parent be02dcdc4f
commit 1f8239ca6f
6 changed files with 755 additions and 0 deletions

View File

@ -101,6 +101,8 @@ rtc_static_library("libjingle_peerconnection") {
"statstypes.cc",
"statstypes.h",
"streamcollection.h",
"trackmediainfomap.cc",
"trackmediainfomap.h",
"videocapturertracksource.cc",
"videocapturertracksource.h",
"videosourceproxy.h",
@ -254,12 +256,15 @@ if (rtc_include_tests) {
"test/fakevideotrackrenderer.h",
"test/mock_datachannel.h",
"test/mock_peerconnection.h",
"test/mock_rtpreceiver.h",
"test/mock_rtpsender.h",
"test/mock_webrtcsession.h",
"test/mockpeerconnectionobservers.h",
"test/peerconnectiontestwrapper.cc",
"test/peerconnectiontestwrapper.h",
"test/rtcstatsobtainer.h",
"test/testsdpstrings.h",
"trackmediainfomap_unittest.cc",
"videocapturertracksource_unittest.cc",
"videotrack_unittest.cc",
"webrtcsdp_unittest.cc",

View File

@ -0,0 +1,34 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_API_TEST_MOCK_RTPRECEIVER_H_
#define WEBRTC_API_TEST_MOCK_RTPRECEIVER_H_
#include <string>
#include "webrtc/api/rtpreceiverinterface.h"
#include "webrtc/test/gmock.h"
namespace webrtc {
class MockRtpReceiver : public rtc::RefCountedObject<RtpReceiverInterface> {
public:
MOCK_METHOD1(SetTrack, void(MediaStreamTrackInterface*));
MOCK_CONST_METHOD0(track, rtc::scoped_refptr<MediaStreamTrackInterface>());
MOCK_CONST_METHOD0(media_type, cricket::MediaType());
MOCK_CONST_METHOD0(id, std::string());
MOCK_CONST_METHOD0(GetParameters, RtpParameters());
MOCK_METHOD1(SetParameters, bool(const RtpParameters&));
MOCK_METHOD1(SetObserver, void(RtpReceiverObserverInterface*));
};
} // namespace webrtc
#endif // WEBRTC_API_TEST_MOCK_RTPRECEIVER_H_

View File

@ -0,0 +1,36 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_API_TEST_MOCK_RTPSENDER_H_
#define WEBRTC_API_TEST_MOCK_RTPSENDER_H_
#include <string>
#include <vector>
#include "webrtc/api/rtpsenderinterface.h"
#include "webrtc/test/gmock.h"
namespace webrtc {
class MockRtpSender : public rtc::RefCountedObject<RtpSenderInterface> {
public:
MOCK_METHOD1(SetTrack, bool(MediaStreamTrackInterface*));
MOCK_CONST_METHOD0(track, rtc::scoped_refptr<MediaStreamTrackInterface>());
MOCK_CONST_METHOD0(ssrc, uint32_t());
MOCK_CONST_METHOD0(media_type, cricket::MediaType());
MOCK_CONST_METHOD0(id, std::string());
MOCK_CONST_METHOD0(stream_ids, std::vector<std::string>());
MOCK_CONST_METHOD0(GetParameters, RtpParameters());
MOCK_METHOD1(SetParameters, bool(const RtpParameters&));
};
} // namespace webrtc
#endif // WEBRTC_API_TEST_MOCK_RTPSENDER_H_

View File

@ -0,0 +1,201 @@
/*
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/api/trackmediainfomap.h"
#include <utility>
namespace webrtc {
namespace {
template<typename K, typename V>
V FindValueOrNull(const std::map<K, V>& map, const K& key) {
auto it = map.find(key);
return (it != map.end()) ? it->second : nullptr;
}
template<typename K, typename V>
const V* FindAddressOrNull(const std::map<K, V>& map, const K& key) {
auto it = map.find(key);
return (it != map.end()) ? &it->second : nullptr;
}
void GetAudioAndVideoTrackBySsrc(
const std::vector<rtc::scoped_refptr<RtpSenderInterface>>& rtp_senders,
const std::vector<rtc::scoped_refptr<RtpReceiverInterface>>& rtp_receivers,
std::map<uint32_t, AudioTrackInterface*>* audio_track_by_ssrc,
std::map<uint32_t, VideoTrackInterface*>* video_track_by_ssrc) {
RTC_DCHECK(audio_track_by_ssrc->empty());
RTC_DCHECK(video_track_by_ssrc->empty());
// TODO(hbos): RTP senders/receivers uses a proxy to the signaling thread, and
// our sender/receiver implementations invokes on the worker thread. (This
// means one thread jump if on signaling thread and two thread jumps if on any
// other threads). Is there a way to avoid thread jump(s) on a per
// sender/receiver, per method basis?
for (const rtc::scoped_refptr<RtpSenderInterface>& rtp_sender : rtp_senders) {
cricket::MediaType media_type = rtp_sender->media_type();
MediaStreamTrackInterface* track = rtp_sender->track();
if (!track) {
continue;
}
RTC_DCHECK_EQ(track->kind(),
media_type == cricket::MEDIA_TYPE_AUDIO
? MediaStreamTrackInterface::kAudioKind
: MediaStreamTrackInterface::kVideoKind);
// TODO(deadbeef): |ssrc| should be removed in favor of |GetParameters|.
uint32_t ssrc = rtp_sender->ssrc();
if (ssrc != 0) {
if (media_type == cricket::MEDIA_TYPE_AUDIO) {
RTC_DCHECK(audio_track_by_ssrc->find(ssrc) ==
audio_track_by_ssrc->end());
(*audio_track_by_ssrc)[ssrc] = static_cast<AudioTrackInterface*>(track);
} else {
RTC_DCHECK(video_track_by_ssrc->find(ssrc) ==
video_track_by_ssrc->end());
(*video_track_by_ssrc)[ssrc] = static_cast<VideoTrackInterface*>(track);
}
}
}
for (const rtc::scoped_refptr<RtpReceiverInterface>& rtp_receiver :
rtp_receivers) {
cricket::MediaType media_type = rtp_receiver->media_type();
MediaStreamTrackInterface* track = rtp_receiver->track();
RTC_DCHECK(track);
RTC_DCHECK_EQ(track->kind(),
media_type == cricket::MEDIA_TYPE_AUDIO
? MediaStreamTrackInterface::kAudioKind
: MediaStreamTrackInterface::kVideoKind);
RtpParameters params = rtp_receiver->GetParameters();
for (const RtpEncodingParameters& encoding : params.encodings) {
if (!encoding.ssrc) {
continue;
}
if (media_type == cricket::MEDIA_TYPE_AUDIO) {
RTC_DCHECK(audio_track_by_ssrc->find(*encoding.ssrc) ==
audio_track_by_ssrc->end());
(*audio_track_by_ssrc)[*encoding.ssrc] =
static_cast<AudioTrackInterface*>(track);
} else {
RTC_DCHECK(video_track_by_ssrc->find(*encoding.ssrc) ==
video_track_by_ssrc->end());
(*video_track_by_ssrc)[*encoding.ssrc] =
static_cast<VideoTrackInterface*>(track);
}
}
}
}
} // namespace
TrackMediaInfoMap::TrackMediaInfoMap(
std::unique_ptr<cricket::VoiceMediaInfo> voice_media_info,
std::unique_ptr<cricket::VideoMediaInfo> video_media_info,
const std::vector<rtc::scoped_refptr<RtpSenderInterface>>& rtp_senders,
const std::vector<rtc::scoped_refptr<RtpReceiverInterface>>& rtp_receivers)
: voice_media_info_(std::move(voice_media_info)),
video_media_info_(std::move(video_media_info)) {
std::map<uint32_t, AudioTrackInterface*> audio_track_by_ssrc;
std::map<uint32_t, VideoTrackInterface*> video_track_by_ssrc;
GetAudioAndVideoTrackBySsrc(
rtp_senders, rtp_receivers, &audio_track_by_ssrc, &video_track_by_ssrc);
if (voice_media_info_) {
for (auto& sender_info : voice_media_info_->senders) {
AudioTrackInterface* associated_track =
FindValueOrNull(audio_track_by_ssrc, sender_info.ssrc());
if (associated_track) {
// One sender is associated with at most one track.
// One track may be associated with multiple senders.
audio_track_by_sender_info_[&sender_info] = associated_track;
voice_infos_by_local_track_[associated_track].push_back(&sender_info);
}
}
for (auto& receiver_info : voice_media_info_->receivers) {
AudioTrackInterface* associated_track =
FindValueOrNull(audio_track_by_ssrc, receiver_info.ssrc());
if (associated_track) {
// One receiver is associated with at most one track, which is uniquely
// associated with that receiver.
audio_track_by_receiver_info_[&receiver_info] = associated_track;
RTC_DCHECK(voice_info_by_remote_track_.find(associated_track) ==
voice_info_by_remote_track_.end());
voice_info_by_remote_track_[associated_track] = &receiver_info;
}
}
}
if (video_media_info_) {
for (auto& sender_info : video_media_info_->senders) {
VideoTrackInterface* associated_track =
FindValueOrNull(video_track_by_ssrc, sender_info.ssrc());
if (associated_track) {
// One sender is associated with at most one track.
// One track may be associated with multiple senders.
video_track_by_sender_info_[&sender_info] = associated_track;
video_infos_by_local_track_[associated_track].push_back(&sender_info);
}
}
for (auto& receiver_info : video_media_info_->receivers) {
VideoTrackInterface* associated_track =
FindValueOrNull(video_track_by_ssrc, receiver_info.ssrc());
if (associated_track) {
// One receiver is associated with at most one track, which is uniquely
// associated with that receiver.
video_track_by_receiver_info_[&receiver_info] = associated_track;
RTC_DCHECK(video_info_by_remote_track_.find(associated_track) ==
video_info_by_remote_track_.end());
video_info_by_remote_track_[associated_track] = &receiver_info;
}
}
}
}
const std::vector<cricket::VoiceSenderInfo*>*
TrackMediaInfoMap::GetVoiceSenderInfos(
const AudioTrackInterface& local_audio_track) const {
return FindAddressOrNull(voice_infos_by_local_track_, &local_audio_track);
}
const cricket::VoiceReceiverInfo* TrackMediaInfoMap::GetVoiceReceiverInfo(
const AudioTrackInterface& remote_audio_track) const {
return FindValueOrNull(voice_info_by_remote_track_, &remote_audio_track);
}
const std::vector<cricket::VideoSenderInfo*>*
TrackMediaInfoMap::GetVideoSenderInfos(
const VideoTrackInterface& local_video_track) const {
return FindAddressOrNull(video_infos_by_local_track_, &local_video_track);
}
const cricket::VideoReceiverInfo* TrackMediaInfoMap::GetVideoReceiverInfo(
const VideoTrackInterface& remote_video_track) const {
return FindValueOrNull(video_info_by_remote_track_, &remote_video_track);
}
rtc::scoped_refptr<AudioTrackInterface> TrackMediaInfoMap::GetAudioTrack(
const cricket::VoiceSenderInfo& voice_sender_info) const {
return FindValueOrNull(audio_track_by_sender_info_, &voice_sender_info);
}
rtc::scoped_refptr<AudioTrackInterface> TrackMediaInfoMap::GetAudioTrack(
const cricket::VoiceReceiverInfo& voice_receiver_info) const {
return FindValueOrNull(audio_track_by_receiver_info_, &voice_receiver_info);
}
rtc::scoped_refptr<VideoTrackInterface> TrackMediaInfoMap::GetVideoTrack(
const cricket::VideoSenderInfo& video_sender_info) const {
return FindValueOrNull(video_track_by_sender_info_, &video_sender_info);
}
rtc::scoped_refptr<VideoTrackInterface> TrackMediaInfoMap::GetVideoTrack(
const cricket::VideoReceiverInfo& video_receiver_info) const {
return FindValueOrNull(video_track_by_receiver_info_, &video_receiver_info);
}
} // namespace webrtc

View File

@ -0,0 +1,98 @@
/*
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_API_TRACKMEDIAINFOMAP_H_
#define WEBRTC_API_TRACKMEDIAINFOMAP_H_
#include <map>
#include <memory>
#include <vector>
#include "webrtc/api/mediastreaminterface.h"
#include "webrtc/api/rtpreceiverinterface.h"
#include "webrtc/api/rtpsenderinterface.h"
#include "webrtc/base/refcount.h"
#include "webrtc/media/base/mediachannel.h"
namespace webrtc {
// Audio/video tracks and sender/receiver statistical information are associated
// with each other based on attachments to RTP senders/receivers. This class
// maps that relationship, in both directions, so that stats about a track can
// be retrieved on a per-attachment basis.
//
// An RTP sender/receiver sends or receives media for a set of SSRCs. The media
// comes from an audio/video track that is attached to it.
// |[Voice/Video][Sender/Receiver]Info| has statistical information for a set of
// SSRCs. Looking at the RTP senders and receivers uncovers the track <-> info
// relationships, which this class does.
class TrackMediaInfoMap {
public:
TrackMediaInfoMap(
std::unique_ptr<cricket::VoiceMediaInfo> voice_media_info,
std::unique_ptr<cricket::VideoMediaInfo> video_media_info,
const std::vector<rtc::scoped_refptr<RtpSenderInterface>>& rtp_senders,
const std::vector<rtc::scoped_refptr<RtpReceiverInterface>>&
rtp_receivers);
const cricket::VoiceMediaInfo* voice_media_info() const {
return voice_media_info_.get();
}
const cricket::VideoMediaInfo* video_media_info() const {
return video_media_info_.get();
}
const std::vector<cricket::VoiceSenderInfo*>* GetVoiceSenderInfos(
const AudioTrackInterface& local_audio_track) const;
const cricket::VoiceReceiverInfo* GetVoiceReceiverInfo(
const AudioTrackInterface& remote_audio_track) const;
const std::vector<cricket::VideoSenderInfo*>* GetVideoSenderInfos(
const VideoTrackInterface& local_video_track) const;
const cricket::VideoReceiverInfo* GetVideoReceiverInfo(
const VideoTrackInterface& remote_video_track) const;
rtc::scoped_refptr<AudioTrackInterface> GetAudioTrack(
const cricket::VoiceSenderInfo& voice_sender_info) const;
rtc::scoped_refptr<AudioTrackInterface> GetAudioTrack(
const cricket::VoiceReceiverInfo& voice_receiver_info) const;
rtc::scoped_refptr<VideoTrackInterface> GetVideoTrack(
const cricket::VideoSenderInfo& video_sender_info) const;
rtc::scoped_refptr<VideoTrackInterface> GetVideoTrack(
const cricket::VideoReceiverInfo& video_receiver_info) const;
private:
std::unique_ptr<cricket::VoiceMediaInfo> voice_media_info_;
std::unique_ptr<cricket::VideoMediaInfo> video_media_info_;
// These maps map tracks (identified by a pointer) to their corresponding info
// object of the correct kind. One track can map to multiple info objects.
std::map<const AudioTrackInterface*, std::vector<cricket::VoiceSenderInfo*>>
voice_infos_by_local_track_;
std::map<const AudioTrackInterface*, cricket::VoiceReceiverInfo*>
voice_info_by_remote_track_;
std::map<const VideoTrackInterface*, std::vector<cricket::VideoSenderInfo*>>
video_infos_by_local_track_;
std::map<const VideoTrackInterface*, cricket::VideoReceiverInfo*>
video_info_by_remote_track_;
// These maps map info objects to their corresponding tracks. They are always
// the inverse of the maps above. One info object always maps to only one
// track.
std::map<const cricket::VoiceSenderInfo*,
rtc::scoped_refptr<AudioTrackInterface>> audio_track_by_sender_info_;
std::map<const cricket::VoiceReceiverInfo*,
rtc::scoped_refptr<AudioTrackInterface>> audio_track_by_receiver_info_;
std::map<const cricket::VideoSenderInfo*,
rtc::scoped_refptr<VideoTrackInterface>> video_track_by_sender_info_;
std::map<const cricket::VideoReceiverInfo*,
rtc::scoped_refptr<VideoTrackInterface>> video_track_by_receiver_info_;
};
} // namespace webrtc
#endif // WEBRTC_API_TRACKMEDIAINFOMAP_H_

View File

@ -0,0 +1,381 @@
/*
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/api/trackmediainfomap.h"
#include <initializer_list>
#include <memory>
#include <utility>
#include <vector>
#include "webrtc/api/audiotrack.h"
#include "webrtc/api/rtpreceiverinterface.h"
#include "webrtc/api/rtpsenderinterface.h"
#include "webrtc/api/test/mock_rtpreceiver.h"
#include "webrtc/api/test/mock_rtpsender.h"
#include "webrtc/api/test/fakevideotracksource.h"
#include "webrtc/api/videotrack.h"
#include "webrtc/base/refcount.h"
#include "webrtc/media/base/mediachannel.h"
#include "webrtc/test/gtest.h"
namespace webrtc {
namespace {
RtpParameters CreateRtpParametersWithSsrcs(
std::initializer_list<uint32_t> ssrcs) {
RtpParameters params;
for (uint32_t ssrc : ssrcs) {
RtpEncodingParameters encoding_params;
encoding_params.ssrc = rtc::Optional<uint32_t>(ssrc);
params.encodings.push_back(encoding_params);
}
return params;
}
rtc::scoped_refptr<MockRtpSender> CreateMockRtpSender(
cricket::MediaType media_type, std::initializer_list<uint32_t> ssrcs,
rtc::scoped_refptr<MediaStreamTrackInterface> track) {
uint32_t first_ssrc;
if (ssrcs.size()) {
first_ssrc = *ssrcs.begin();
} else {
first_ssrc = 0;
}
rtc::scoped_refptr<MockRtpSender> sender(
new rtc::RefCountedObject<MockRtpSender>());
EXPECT_CALL(*sender, track()).WillRepeatedly(testing::Return(track));
EXPECT_CALL(*sender, ssrc()).WillRepeatedly(testing::Return(first_ssrc));
EXPECT_CALL(*sender, media_type()).WillRepeatedly(testing::Return(
media_type));
EXPECT_CALL(*sender, GetParameters()).WillRepeatedly(testing::Return(
CreateRtpParametersWithSsrcs(ssrcs)));
return sender;
}
rtc::scoped_refptr<MockRtpReceiver> CreateMockRtpReceiver(
cricket::MediaType media_type, std::initializer_list<uint32_t> ssrcs,
rtc::scoped_refptr<MediaStreamTrackInterface> track) {
rtc::scoped_refptr<MockRtpReceiver> receiver(
new rtc::RefCountedObject<MockRtpReceiver>());
EXPECT_CALL(*receiver, track()).WillRepeatedly(testing::Return(track));
EXPECT_CALL(*receiver, media_type()).WillRepeatedly(testing::Return(
media_type));
EXPECT_CALL(*receiver, GetParameters()).WillRepeatedly(testing::Return(
CreateRtpParametersWithSsrcs(ssrcs)));
return receiver;
}
class TrackMediaInfoMapTest : public testing::Test {
public:
TrackMediaInfoMapTest()
: voice_media_info_(new cricket::VoiceMediaInfo()),
video_media_info_(new cricket::VideoMediaInfo()),
local_audio_track_(AudioTrack::Create("LocalAudioTrack", nullptr)),
remote_audio_track_(AudioTrack::Create("RemoteAudioTrack", nullptr)),
local_video_track_(
VideoTrack::Create("LocalVideoTrack",
FakeVideoTrackSource::Create(false))),
remote_video_track_(
VideoTrack::Create("RemoteVideoTrack",
FakeVideoTrackSource::Create(false))) {
}
~TrackMediaInfoMapTest() {
// If we have a map the ownership has been passed to the map, only delete if
// |CreateMap| has not been called.
if (!map_) {
delete voice_media_info_;
delete video_media_info_;
}
}
void AddRtpSenderWithSsrcs(std::initializer_list<uint32_t> ssrcs,
MediaStreamTrackInterface* local_track) {
rtc::scoped_refptr<MockRtpSender> rtp_sender = CreateMockRtpSender(
local_track->kind() == MediaStreamTrackInterface::kAudioKind ?
cricket::MEDIA_TYPE_AUDIO : cricket::MEDIA_TYPE_VIDEO,
ssrcs, local_track);
rtp_senders_.push_back(rtp_sender);
if (local_track->kind() == MediaStreamTrackInterface::kAudioKind) {
cricket::VoiceSenderInfo voice_sender_info;
size_t i = 0;
for (uint32_t ssrc : ssrcs) {
voice_sender_info.local_stats.push_back(cricket::SsrcSenderInfo());
voice_sender_info.local_stats[i++].ssrc = ssrc;
}
voice_media_info_->senders.push_back(voice_sender_info);
} else {
cricket::VideoSenderInfo video_sender_info;
size_t i = 0;
for (uint32_t ssrc : ssrcs) {
video_sender_info.local_stats.push_back(cricket::SsrcSenderInfo());
video_sender_info.local_stats[i++].ssrc = ssrc;
}
video_media_info_->senders.push_back(video_sender_info);
}
}
void AddRtpReceiverWithSsrcs(std::initializer_list<uint32_t> ssrcs,
MediaStreamTrackInterface* remote_track) {
rtc::scoped_refptr<MockRtpReceiver> rtp_receiver = CreateMockRtpReceiver(
remote_track->kind() == MediaStreamTrackInterface::kAudioKind ?
cricket::MEDIA_TYPE_AUDIO : cricket::MEDIA_TYPE_VIDEO,
ssrcs, remote_track);
rtp_receivers_.push_back(rtp_receiver);
if (remote_track->kind() == MediaStreamTrackInterface::kAudioKind) {
cricket::VoiceReceiverInfo voice_receiver_info;
size_t i = 0;
for (uint32_t ssrc : ssrcs) {
voice_receiver_info.local_stats.push_back(cricket::SsrcReceiverInfo());
voice_receiver_info.local_stats[i++].ssrc = ssrc;
}
voice_media_info_->receivers.push_back(voice_receiver_info);
} else {
cricket::VideoReceiverInfo video_receiver_info;
size_t i = 0;
for (uint32_t ssrc : ssrcs) {
video_receiver_info.local_stats.push_back(cricket::SsrcReceiverInfo());
video_receiver_info.local_stats[i++].ssrc = ssrc;
}
video_media_info_->receivers.push_back(video_receiver_info);
}
}
void CreateMap() {
RTC_DCHECK(!map_);
map_.reset(new TrackMediaInfoMap(
std::unique_ptr<cricket::VoiceMediaInfo>(voice_media_info_),
std::unique_ptr<cricket::VideoMediaInfo>(video_media_info_),
rtp_senders_,
rtp_receivers_));
}
protected:
cricket::VoiceMediaInfo* voice_media_info_;
cricket::VideoMediaInfo* video_media_info_;
std::vector<rtc::scoped_refptr<RtpSenderInterface>> rtp_senders_;
std::vector<rtc::scoped_refptr<RtpReceiverInterface>> rtp_receivers_;
std::unique_ptr<TrackMediaInfoMap> map_;
rtc::scoped_refptr<AudioTrack> local_audio_track_;
rtc::scoped_refptr<AudioTrack> remote_audio_track_;
rtc::scoped_refptr<VideoTrack> local_video_track_;
rtc::scoped_refptr<VideoTrack> remote_video_track_;
};
} // namespace
TEST_F(TrackMediaInfoMapTest, SingleSenderReceiverPerTrackWithOneSsrc) {
AddRtpSenderWithSsrcs({ 1 }, local_audio_track_);
AddRtpReceiverWithSsrcs({ 2 }, remote_audio_track_);
AddRtpSenderWithSsrcs({ 3 }, local_video_track_);
AddRtpReceiverWithSsrcs({ 4 }, remote_video_track_);
CreateMap();
// Local audio track <-> RTP audio sender
ASSERT_TRUE(map_->GetVoiceSenderInfos(*local_audio_track_));
EXPECT_EQ(*map_->GetVoiceSenderInfos(*local_audio_track_),
std::vector<cricket::VoiceSenderInfo*>({
&voice_media_info_->senders[0] }));
EXPECT_EQ(map_->GetAudioTrack(voice_media_info_->senders[0]),
local_audio_track_.get());
// Remote audio track <-> RTP audio receiver
EXPECT_EQ(map_->GetVoiceReceiverInfo(*remote_audio_track_),
&voice_media_info_->receivers[0]);
EXPECT_EQ(map_->GetAudioTrack(voice_media_info_->receivers[0]),
remote_audio_track_.get());
// Local video track <-> RTP video sender
ASSERT_TRUE(map_->GetVideoSenderInfos(*local_video_track_));
EXPECT_EQ(*map_->GetVideoSenderInfos(*local_video_track_),
std::vector<cricket::VideoSenderInfo*>({
&video_media_info_->senders[0] }));
EXPECT_EQ(map_->GetVideoTrack(video_media_info_->senders[0]),
local_video_track_.get());
// Remote video track <-> RTP video receiver
EXPECT_EQ(map_->GetVideoReceiverInfo(*remote_video_track_),
&video_media_info_->receivers[0]);
EXPECT_EQ(map_->GetVideoTrack(video_media_info_->receivers[0]),
remote_video_track_.get());
}
TEST_F(TrackMediaInfoMapTest, SingleSenderReceiverPerTrackWithMissingSsrc) {
AddRtpSenderWithSsrcs({}, local_audio_track_);
AddRtpSenderWithSsrcs({}, local_video_track_);
AddRtpReceiverWithSsrcs({}, remote_audio_track_);
AddRtpReceiverWithSsrcs({}, remote_video_track_);
CreateMap();
EXPECT_FALSE(map_->GetVoiceSenderInfos(*local_audio_track_));
EXPECT_FALSE(map_->GetVideoSenderInfos(*local_video_track_));
EXPECT_FALSE(map_->GetVoiceReceiverInfo(*remote_audio_track_));
EXPECT_FALSE(map_->GetVideoReceiverInfo(*remote_video_track_));
}
TEST_F(TrackMediaInfoMapTest,
SingleSenderReceiverPerTrackWithAudioAndVideoUseSameSsrc) {
AddRtpSenderWithSsrcs({ 1 }, local_audio_track_);
AddRtpReceiverWithSsrcs({ 2 }, remote_audio_track_);
AddRtpSenderWithSsrcs({ 1 }, local_video_track_);
AddRtpReceiverWithSsrcs({ 2 }, remote_video_track_);
CreateMap();
// Local audio track <-> RTP audio sender
ASSERT_TRUE(map_->GetVoiceSenderInfos(*local_audio_track_));
EXPECT_EQ(*map_->GetVoiceSenderInfos(*local_audio_track_),
std::vector<cricket::VoiceSenderInfo*>({
&voice_media_info_->senders[0] }));
EXPECT_EQ(map_->GetAudioTrack(voice_media_info_->senders[0]),
local_audio_track_.get());
// Remote audio track <-> RTP audio receiver
EXPECT_EQ(map_->GetVoiceReceiverInfo(*remote_audio_track_),
&voice_media_info_->receivers[0]);
EXPECT_EQ(map_->GetAudioTrack(voice_media_info_->receivers[0]),
remote_audio_track_.get());
// Local video track <-> RTP video sender
ASSERT_TRUE(map_->GetVideoSenderInfos(*local_video_track_));
EXPECT_EQ(*map_->GetVideoSenderInfos(*local_video_track_),
std::vector<cricket::VideoSenderInfo*>({
&video_media_info_->senders[0] }));
EXPECT_EQ(map_->GetVideoTrack(video_media_info_->senders[0]),
local_video_track_.get());
// Remote video track <-> RTP video receiver
EXPECT_EQ(map_->GetVideoReceiverInfo(*remote_video_track_),
&video_media_info_->receivers[0]);
EXPECT_EQ(map_->GetVideoTrack(video_media_info_->receivers[0]),
remote_video_track_.get());
}
TEST_F(TrackMediaInfoMapTest, SingleMultiSsrcSenderPerTrack) {
AddRtpSenderWithSsrcs({ 1, 2 }, local_audio_track_);
AddRtpSenderWithSsrcs({ 3, 4 }, local_video_track_);
CreateMap();
// Local audio track <-> RTP audio senders
ASSERT_TRUE(map_->GetVoiceSenderInfos(*local_audio_track_));
EXPECT_EQ(*map_->GetVoiceSenderInfos(*local_audio_track_),
std::vector<cricket::VoiceSenderInfo*>({
&voice_media_info_->senders[0] }));
EXPECT_EQ(map_->GetAudioTrack(voice_media_info_->senders[0]),
local_audio_track_.get());
// Local video track <-> RTP video senders
ASSERT_TRUE(map_->GetVideoSenderInfos(*local_video_track_));
EXPECT_EQ(*map_->GetVideoSenderInfos(*local_video_track_),
std::vector<cricket::VideoSenderInfo*>({
&video_media_info_->senders[0] }));
EXPECT_EQ(map_->GetVideoTrack(video_media_info_->senders[0]),
local_video_track_.get());
}
TEST_F(TrackMediaInfoMapTest, MultipleOneSsrcSendersPerTrack) {
AddRtpSenderWithSsrcs({ 1 }, local_audio_track_);
AddRtpSenderWithSsrcs({ 2 }, local_audio_track_);
AddRtpSenderWithSsrcs({ 3 }, local_video_track_);
AddRtpSenderWithSsrcs({ 4 }, local_video_track_);
CreateMap();
// Local audio track <-> RTP audio senders
ASSERT_TRUE(map_->GetVoiceSenderInfos(*local_audio_track_));
EXPECT_EQ(*map_->GetVoiceSenderInfos(*local_audio_track_),
std::vector<cricket::VoiceSenderInfo*>({
&voice_media_info_->senders[0],
&voice_media_info_->senders[1] }));
EXPECT_EQ(map_->GetAudioTrack(voice_media_info_->senders[0]),
local_audio_track_.get());
EXPECT_EQ(map_->GetAudioTrack(voice_media_info_->senders[1]),
local_audio_track_.get());
// Local video track <-> RTP video senders
ASSERT_TRUE(map_->GetVideoSenderInfos(*local_video_track_));
EXPECT_EQ(*map_->GetVideoSenderInfos(*local_video_track_),
std::vector<cricket::VideoSenderInfo*>({
&video_media_info_->senders[0],
&video_media_info_->senders[1] }));
EXPECT_EQ(map_->GetVideoTrack(video_media_info_->senders[0]),
local_video_track_.get());
EXPECT_EQ(map_->GetVideoTrack(video_media_info_->senders[1]),
local_video_track_.get());
}
TEST_F(TrackMediaInfoMapTest, MultipleMultiSsrcSendersPerTrack) {
AddRtpSenderWithSsrcs({ 1, 2 }, local_audio_track_);
AddRtpSenderWithSsrcs({ 3, 4 }, local_audio_track_);
AddRtpSenderWithSsrcs({ 5, 6 }, local_video_track_);
AddRtpSenderWithSsrcs({ 7, 8 }, local_video_track_);
CreateMap();
// Local audio track <-> RTP audio senders
ASSERT_TRUE(map_->GetVoiceSenderInfos(*local_audio_track_));
EXPECT_EQ(*map_->GetVoiceSenderInfos(*local_audio_track_),
std::vector<cricket::VoiceSenderInfo*>({
&voice_media_info_->senders[0],
&voice_media_info_->senders[1] }));
EXPECT_EQ(map_->GetAudioTrack(voice_media_info_->senders[0]),
local_audio_track_.get());
EXPECT_EQ(map_->GetAudioTrack(voice_media_info_->senders[1]),
local_audio_track_.get());
// Local video track <-> RTP video senders
ASSERT_TRUE(map_->GetVideoSenderInfos(*local_video_track_));
EXPECT_EQ(*map_->GetVideoSenderInfos(*local_video_track_),
std::vector<cricket::VideoSenderInfo*>({
&video_media_info_->senders[0],
&video_media_info_->senders[1] }));
EXPECT_EQ(map_->GetVideoTrack(video_media_info_->senders[0]),
local_video_track_.get());
EXPECT_EQ(map_->GetVideoTrack(video_media_info_->senders[1]),
local_video_track_.get());
}
// Death tests.
// Disabled on Android because death tests misbehave on Android, see
// base/test/gtest_util.h.
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
class TrackMediaInfoMapDeathTest : public TrackMediaInfoMapTest {
};
TEST_F(TrackMediaInfoMapDeathTest, MultipleOneSsrcReceiversPerTrack) {
AddRtpReceiverWithSsrcs({ 1 }, remote_audio_track_);
AddRtpReceiverWithSsrcs({ 2 }, remote_audio_track_);
AddRtpReceiverWithSsrcs({ 3 }, remote_video_track_);
AddRtpReceiverWithSsrcs({ 4 }, remote_video_track_);
EXPECT_DEATH(CreateMap(), "");
}
TEST_F(TrackMediaInfoMapDeathTest, MultipleMultiSsrcReceiversPerTrack) {
AddRtpReceiverWithSsrcs({ 1, 2 }, remote_audio_track_);
AddRtpReceiverWithSsrcs({ 3, 4 }, remote_audio_track_);
AddRtpReceiverWithSsrcs({ 5, 6 }, remote_video_track_);
AddRtpReceiverWithSsrcs({ 7, 8 }, remote_video_track_);
EXPECT_DEATH(CreateMap(), "");
}
TEST_F(TrackMediaInfoMapDeathTest,
SingleSenderReceiverPerTrackWithSsrcNotUnique) {
AddRtpSenderWithSsrcs({ 1 }, local_audio_track_);
AddRtpReceiverWithSsrcs({ 1 }, remote_audio_track_);
AddRtpSenderWithSsrcs({ 2 }, local_video_track_);
AddRtpReceiverWithSsrcs({ 2 }, remote_video_track_);
EXPECT_DEATH(CreateMap(), "");
}
#endif // RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
} // namespace webrtc