From 5adfafdbf66cee66c64b0485f964a0954bc77fa7 Mon Sep 17 00:00:00 2001 From: Steve Anton Date: Wed, 20 Dec 2017 16:34:00 -0800 Subject: [PATCH] Make ContentInfo/ContentDescription slightly more ergonomic This makes the following changes: - Replaces ContentDescription with its only subclass, MediaContentDescription - Adds helpers to cast a MediaContentDescription to its audio, video, and data subclasses. - Changes ContentInfo.type to a new enum, MediaProtocolType. Bug: webrtc:8620 Change-Id: I5eb0811cb16a51b0b9d73ecc4fe8edc7037f1aed Reviewed-on: https://webrtc-review.googlesource.com/35100 Commit-Queue: Steve Anton Reviewed-by: Peter Thatcher Cr-Commit-Position: refs/heads/master@{#21401} --- p2p/base/p2pconstants.cc | 3 - p2p/base/p2pconstants.h | 3 - pc/jsepsessiondescription_unittest.cc | 7 +- pc/mediasession.cc | 13 ++- pc/mediasession_unittest.cc | 40 +++++----- pc/peerconnection.cc | 3 +- pc/peerconnection_media_unittest.cc | 2 +- pc/sessiondescription.cc | 40 ++++++---- pc/sessiondescription.h | 111 +++++++++++++++++--------- pc/webrtcsdp.cc | 5 +- pc/webrtcsdp_unittest.cc | 34 ++++---- 11 files changed, 150 insertions(+), 111 deletions(-) diff --git a/p2p/base/p2pconstants.cc b/p2p/base/p2pconstants.cc index a793c47bc2..dc6db636e5 100644 --- a/p2p/base/p2pconstants.cc +++ b/p2p/base/p2pconstants.cc @@ -37,9 +37,6 @@ const int ICE_CANDIDATE_COMPONENT_RTP = 1; const int ICE_CANDIDATE_COMPONENT_RTCP = 2; const int ICE_CANDIDATE_COMPONENT_DEFAULT = 1; -const char NS_JINGLE_RTP[] = "urn:xmpp:jingle:apps:rtp:1"; -const char NS_JINGLE_DRAFT_SCTP[] = "google:jingle:sctp"; - // From RFC 4145, SDP setup attribute values. const char CONNECTIONROLE_ACTIVE_STR[] = "active"; const char CONNECTIONROLE_PASSIVE_STR[] = "passive"; diff --git a/p2p/base/p2pconstants.h b/p2p/base/p2pconstants.h index b72d025774..584eac2fd3 100644 --- a/p2p/base/p2pconstants.h +++ b/p2p/base/p2pconstants.h @@ -39,9 +39,6 @@ extern const int ICE_CANDIDATE_COMPONENT_RTP; extern const int ICE_CANDIDATE_COMPONENT_RTCP; extern const int ICE_CANDIDATE_COMPONENT_DEFAULT; -extern const char NS_JINGLE_RTP[]; -extern const char NS_JINGLE_DRAFT_SCTP[]; - // RFC 4145, SDP setup attribute values. extern const char CONNECTIONROLE_ACTIVE_STR[]; extern const char CONNECTIONROLE_PASSIVE_STR[]; diff --git a/pc/jsepsessiondescription_unittest.cc b/pc/jsepsessiondescription_unittest.cc index 1835f29094..0cddd61cc8 100644 --- a/pc/jsepsessiondescription_unittest.cc +++ b/pc/jsepsessiondescription_unittest.cc @@ -23,6 +23,7 @@ #include "rtc_base/ptr_util.h" #include "rtc_base/stringencode.h" +using cricket::MediaProtocolType; using ::testing::Values; using webrtc::IceCandidateCollection; using webrtc::IceCandidateInterface; @@ -54,12 +55,10 @@ static cricket::SessionDescription* CreateCricketSessionDescription() { new cricket::VideoContentDescription()); audio->AddCodec(cricket::AudioCodec(103, "ISAC", 16000, 0, 0)); - desc->AddContent(cricket::CN_AUDIO, cricket::NS_JINGLE_RTP, - audio.release()); + desc->AddContent(cricket::CN_AUDIO, MediaProtocolType::kRtp, audio.release()); video->AddCodec(cricket::VideoCodec(120, "VP8")); - desc->AddContent(cricket::CN_VIDEO, cricket::NS_JINGLE_RTP, - video.release()); + desc->AddContent(cricket::CN_VIDEO, MediaProtocolType::kRtp, video.release()); EXPECT_TRUE(desc->AddTransportInfo(cricket::TransportInfo( cricket::CN_AUDIO, diff --git a/pc/mediasession.cc b/pc/mediasession.cc index e1d3e3c847..e1e4fedb56 100644 --- a/pc/mediasession.cc +++ b/pc/mediasession.cc @@ -1870,7 +1870,7 @@ bool MediaSessionDescriptionFactory::AddAudioContentForOffer( audio->set_direction(media_description_options.direction); - desc->AddContent(media_description_options.mid, NS_JINGLE_RTP, + desc->AddContent(media_description_options.mid, MediaProtocolType::kRtp, media_description_options.stopped, audio.release()); if (!AddTransportOffer(media_description_options.mid, media_description_options.transport_options, @@ -1940,7 +1940,7 @@ bool MediaSessionDescriptionFactory::AddVideoContentForOffer( video->set_direction(media_description_options.direction); - desc->AddContent(media_description_options.mid, NS_JINGLE_RTP, + desc->AddContent(media_description_options.mid, MediaProtocolType::kRtp, media_description_options.stopped, video.release()); if (!AddTransportOffer(media_description_options.mid, media_description_options.transport_options, @@ -2002,12 +2002,12 @@ bool MediaSessionDescriptionFactory::AddDataContentForOffer( } if (is_sctp) { - desc->AddContent(media_description_options.mid, NS_JINGLE_DRAFT_SCTP, + desc->AddContent(media_description_options.mid, MediaProtocolType::kSctp, data.release()); } else { data->set_bandwidth(kDataMaxBandwidth); SetMediaProtocol(secure_transport, data.get()); - desc->AddContent(media_description_options.mid, NS_JINGLE_RTP, + desc->AddContent(media_description_options.mid, MediaProtocolType::kRtp, media_description_options.stopped, data.release()); } if (!AddTransportOffer(media_description_options.mid, @@ -2301,9 +2301,8 @@ void MediaSessionDescriptionFactory::ComputeAudioCodecsIntersectionAndUnion() { } bool IsMediaContent(const ContentInfo* content) { - return (content && - (content->type == NS_JINGLE_RTP || - content->type == NS_JINGLE_DRAFT_SCTP)); + return (content && (content->type == MediaProtocolType::kRtp || + content->type == MediaProtocolType::kSctp)); } bool IsAudioContent(const ContentInfo* content) { diff --git a/pc/mediasession_unittest.cc b/pc/mediasession_unittest.cc index b62373007c..d83d87d821 100644 --- a/pc/mediasession_unittest.cc +++ b/pc/mediasession_unittest.cc @@ -37,6 +37,7 @@ using cricket::MediaSessionDescriptionFactory; using cricket::MediaDescriptionOptions; using cricket::MediaSessionOptions; using cricket::MediaType; +using cricket::MediaProtocolType; using cricket::SessionDescription; using cricket::SsrcGroup; using cricket::StreamParams; @@ -59,7 +60,6 @@ using cricket::kAutoBandwidth; using cricket::AudioCodec; using cricket::VideoCodec; using cricket::DataCodec; -using cricket::NS_JINGLE_RTP; using cricket::MEDIA_TYPE_AUDIO; using cricket::MEDIA_TYPE_VIDEO; using cricket::MEDIA_TYPE_DATA; @@ -630,8 +630,8 @@ class MediaSessionDescriptionFactoryTest : public testing::Test { const ContentInfo* vc = answer->GetContentByName("video"); ASSERT_TRUE(ac != NULL); ASSERT_TRUE(vc != NULL); - EXPECT_EQ(std::string(NS_JINGLE_RTP), ac->type); - EXPECT_EQ(std::string(NS_JINGLE_RTP), vc->type); + EXPECT_EQ(MediaProtocolType::kRtp, ac->type); + EXPECT_EQ(MediaProtocolType::kRtp, vc->type); const AudioContentDescription* acd = static_cast(ac->description); const VideoContentDescription* vcd = @@ -675,7 +675,7 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCreateAudioOffer) { const ContentInfo* vc = offer->GetContentByName("video"); ASSERT_TRUE(ac != NULL); ASSERT_TRUE(vc == NULL); - EXPECT_EQ(std::string(NS_JINGLE_RTP), ac->type); + EXPECT_EQ(MediaProtocolType::kRtp, ac->type); const AudioContentDescription* acd = static_cast(ac->description); EXPECT_EQ(MEDIA_TYPE_AUDIO, acd->type()); @@ -698,8 +698,8 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCreateVideoOffer) { const ContentInfo* vc = offer->GetContentByName("video"); ASSERT_TRUE(ac != NULL); ASSERT_TRUE(vc != NULL); - EXPECT_EQ(std::string(NS_JINGLE_RTP), ac->type); - EXPECT_EQ(std::string(NS_JINGLE_RTP), vc->type); + EXPECT_EQ(MediaProtocolType::kRtp, ac->type); + EXPECT_EQ(MediaProtocolType::kRtp, vc->type); const AudioContentDescription* acd = static_cast(ac->description); const VideoContentDescription* vcd = @@ -807,8 +807,8 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCreateRtpDataOffer) { const ContentInfo* dc = offer->GetContentByName("data"); ASSERT_TRUE(ac != NULL); ASSERT_TRUE(dc != NULL); - EXPECT_EQ(std::string(NS_JINGLE_RTP), ac->type); - EXPECT_EQ(std::string(NS_JINGLE_RTP), dc->type); + EXPECT_EQ(MediaProtocolType::kRtp, ac->type); + EXPECT_EQ(MediaProtocolType::kRtp, dc->type); const AudioContentDescription* acd = static_cast(ac->description); const DataContentDescription* dcd = @@ -952,7 +952,7 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCreateAudioAnswer) { const ContentInfo* vc = answer->GetContentByName("video"); ASSERT_TRUE(ac != NULL); ASSERT_TRUE(vc == NULL); - EXPECT_EQ(std::string(NS_JINGLE_RTP), ac->type); + EXPECT_EQ(MediaProtocolType::kRtp, ac->type); const AudioContentDescription* acd = static_cast(ac->description); EXPECT_EQ(MEDIA_TYPE_AUDIO, acd->type()); @@ -979,7 +979,7 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCreateAudioAnswerGcm) { const ContentInfo* vc = answer->GetContentByName("video"); ASSERT_TRUE(ac != NULL); ASSERT_TRUE(vc == NULL); - EXPECT_EQ(std::string(NS_JINGLE_RTP), ac->type); + EXPECT_EQ(MediaProtocolType::kRtp, ac->type); const AudioContentDescription* acd = static_cast(ac->description); EXPECT_EQ(MEDIA_TYPE_AUDIO, acd->type()); @@ -1005,8 +1005,8 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCreateVideoAnswer) { const ContentInfo* vc = answer->GetContentByName("video"); ASSERT_TRUE(ac != NULL); ASSERT_TRUE(vc != NULL); - EXPECT_EQ(std::string(NS_JINGLE_RTP), ac->type); - EXPECT_EQ(std::string(NS_JINGLE_RTP), vc->type); + EXPECT_EQ(MediaProtocolType::kRtp, ac->type); + EXPECT_EQ(MediaProtocolType::kRtp, vc->type); const AudioContentDescription* acd = static_cast(ac->description); const VideoContentDescription* vcd = @@ -1056,8 +1056,8 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCreateDataAnswer) { const ContentInfo* dc = answer->GetContentByName("data"); ASSERT_TRUE(ac != NULL); ASSERT_TRUE(dc != NULL); - EXPECT_EQ(std::string(NS_JINGLE_RTP), ac->type); - EXPECT_EQ(std::string(NS_JINGLE_RTP), dc->type); + EXPECT_EQ(MediaProtocolType::kRtp, ac->type); + EXPECT_EQ(MediaProtocolType::kRtp, dc->type); const AudioContentDescription* acd = static_cast(ac->description); const DataContentDescription* dcd = @@ -1090,8 +1090,8 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCreateDataAnswerGcm) { const ContentInfo* dc = answer->GetContentByName("data"); ASSERT_TRUE(ac != NULL); ASSERT_TRUE(dc != NULL); - EXPECT_EQ(std::string(NS_JINGLE_RTP), ac->type); - EXPECT_EQ(std::string(NS_JINGLE_RTP), dc->type); + EXPECT_EQ(MediaProtocolType::kRtp, ac->type); + EXPECT_EQ(MediaProtocolType::kRtp, dc->type); const AudioContentDescription* acd = static_cast(ac->description); const DataContentDescription* dcd = @@ -2614,11 +2614,11 @@ TEST(MediaSessionDescription, CopySessionDescription) { AudioContentDescription* acd(new AudioContentDescription()); acd->set_codecs(MAKE_VECTOR(kAudioCodecs1)); acd->AddLegacyStream(1); - source.AddContent(cricket::CN_AUDIO, cricket::NS_JINGLE_RTP, acd); + source.AddContent(cricket::CN_AUDIO, MediaProtocolType::kRtp, acd); VideoContentDescription* vcd(new VideoContentDescription()); vcd->set_codecs(MAKE_VECTOR(kVideoCodecs1)); vcd->AddLegacyStream(2); - source.AddContent(cricket::CN_VIDEO, cricket::NS_JINGLE_RTP, vcd); + source.AddContent(cricket::CN_VIDEO, MediaProtocolType::kRtp, vcd); std::unique_ptr copy(source.Copy()); ASSERT_TRUE(copy.get() != NULL); @@ -2627,13 +2627,13 @@ TEST(MediaSessionDescription, CopySessionDescription) { const ContentInfo* vc = copy->GetContentByName("video"); ASSERT_TRUE(ac != NULL); ASSERT_TRUE(vc != NULL); - EXPECT_EQ(std::string(NS_JINGLE_RTP), ac->type); + EXPECT_EQ(MediaProtocolType::kRtp, ac->type); const AudioContentDescription* acd_copy = static_cast(ac->description); EXPECT_EQ(acd->codecs(), acd_copy->codecs()); EXPECT_EQ(1u, acd->first_ssrc()); - EXPECT_EQ(std::string(NS_JINGLE_RTP), vc->type); + EXPECT_EQ(MediaProtocolType::kRtp, vc->type); const VideoContentDescription* vcd_copy = static_cast(vc->description); EXPECT_EQ(vcd->codecs(), vcd_copy->codecs()); diff --git a/pc/peerconnection.cc b/pc/peerconnection.cc index a1793fb10b..c9e25909e2 100644 --- a/pc/peerconnection.cc +++ b/pc/peerconnection.cc @@ -54,6 +54,7 @@ using cricket::ContentInfo; using cricket::ContentInfos; using cricket::MediaContentDescription; using cricket::SessionDescription; +using cricket::MediaProtocolType; using cricket::TransportInfo; using cricket::LOCAL_PORT_TYPE; @@ -4668,7 +4669,7 @@ bool PeerConnection::ValidateBundleSettings(const SessionDescription* desc) { const cricket::ContentInfo* content = (&*citer); RTC_DCHECK(content != NULL); if (bundle_group->HasContentName(content->name) && !content->rejected && - content->type == cricket::NS_JINGLE_RTP) { + content->type == MediaProtocolType::kRtp) { if (!HasRtcpMuxEnabled(content)) return false; } diff --git a/pc/peerconnection_media_unittest.cc b/pc/peerconnection_media_unittest.cc index a5a0b42f02..e647fb201e 100644 --- a/pc/peerconnection_media_unittest.cc +++ b/pc/peerconnection_media_unittest.cc @@ -632,7 +632,7 @@ void ReverseMediaContent(cricket::SessionDescription* desc) { void ChangeMediaTypeAudioToVideo(cricket::SessionDescription* desc) { desc->RemoveContentByName(cricket::CN_AUDIO); auto* video_content = desc->GetContentByName(cricket::CN_VIDEO); - desc->AddContent(cricket::CN_AUDIO, cricket::NS_JINGLE_RTP, + desc->AddContent(cricket::CN_AUDIO, video_content->type, video_content->description->Copy()); } diff --git a/pc/sessiondescription.cc b/pc/sessiondescription.cc index 6e90ac82f0..355d6436be 100644 --- a/pc/sessiondescription.cc +++ b/pc/sessiondescription.cc @@ -10,6 +10,8 @@ #include "pc/sessiondescription.h" +#include + namespace cricket { namespace { @@ -38,14 +40,13 @@ const ContentInfo* FindContentInfoByName(const ContentInfos& contents, } const ContentInfo* FindContentInfoByType(const ContentInfos& contents, - const std::string& type) { - for (ContentInfos::const_iterator content = contents.begin(); - content != contents.end(); ++content) { - if (content->type == type) { - return &(*content); + MediaProtocolType type) { + for (const auto& content : contents) { + if (content.type == type) { + return &content; } } - return NULL; + return nullptr; } ContentGroup::ContentGroup(const std::string& semantics) @@ -147,7 +148,7 @@ ContentDescription* SessionDescription::GetContentDescriptionByName( } const ContentInfo* SessionDescription::FirstContentByType( - const std::string& type) const { + MediaProtocolType type) const { return FindContentInfoByType(contents_, type); } @@ -156,25 +157,36 @@ const ContentInfo* SessionDescription::FirstContent() const { } void SessionDescription::AddContent(const std::string& name, - const std::string& type, + MediaProtocolType type, ContentDescription* description) { - contents_.push_back(ContentInfo(name, type, description)); + ContentInfo content(type); + content.name = name; + content.description = description; + contents_.push_back(std::move(content)); } void SessionDescription::AddContent(const std::string& name, - const std::string& type, + MediaProtocolType type, bool rejected, ContentDescription* description) { - contents_.push_back(ContentInfo(name, type, rejected, description)); + ContentInfo content(type); + content.name = name; + content.rejected = rejected; + content.description = description; + contents_.push_back(std::move(content)); } void SessionDescription::AddContent(const std::string& name, - const std::string& type, + MediaProtocolType type, bool rejected, bool bundle_only, ContentDescription* description) { - contents_.push_back( - ContentInfo(name, type, rejected, bundle_only, description)); + ContentInfo content(type); + content.name = name; + content.rejected = rejected; + content.bundle_only = bundle_only; + content.description = description; + contents_.push_back(std::move(content)); } bool SessionDescription::RemoveContentByName(const std::string& name) { diff --git a/pc/sessiondescription.h b/pc/sessiondescription.h index 648b27dc1c..b45a807e34 100644 --- a/pc/sessiondescription.h +++ b/pc/sessiondescription.h @@ -48,23 +48,38 @@ extern const char kMediaProtocolTcpDtlsSctp[]; // Options to control how session descriptions are generated. const int kAutoBandwidth = -1; -// Describes a session content. Individual content types inherit from -// this class. Analagous to a or -// . -class ContentDescription { - public: - virtual ~ContentDescription() {} - virtual ContentDescription* Copy() const = 0; -}; +class AudioContentDescription; +class VideoContentDescription; +class DataContentDescription; -// "content" (as used in XEP-0166) descriptions for voice and video. -class MediaContentDescription : public ContentDescription { +// Describes a session description media section. There are subclasses for each +// media type (audio, video, data) that will have additional information. +class MediaContentDescription { public: - MediaContentDescription() {} + MediaContentDescription() = default; + virtual ~MediaContentDescription() = default; virtual MediaType type() const = 0; + + // Try to cast this media description to an AudioContentDescription. Returns + // nullptr if the cast fails. + virtual AudioContentDescription* as_audio() { return nullptr; } + virtual const AudioContentDescription* as_audio() const { return nullptr; } + + // Try to cast this media description to a VideoContentDescription. Returns + // nullptr if the cast fails. + virtual VideoContentDescription* as_video() { return nullptr; } + virtual const VideoContentDescription* as_video() const { return nullptr; } + + // Try to cast this media description to a DataContentDescription. Returns + // nullptr if the cast fails. + virtual DataContentDescription* as_data() { return nullptr; } + virtual const DataContentDescription* as_data() const { return nullptr; } + virtual bool has_codecs() const = 0; + virtual MediaContentDescription* Copy() const = 0; + // |protocol| is the expected media transport protocol, such as RTP/AVPF, // RTP/SAVPF or SCTP/DTLS. std::string protocol() const { return protocol_; } @@ -183,6 +198,10 @@ class MediaContentDescription : public ContentDescription { rtc::SocketAddress connection_address_; }; +// TODO(bugs.webrtc.org/8620): Remove this alias once downstream projects have +// updated. +using ContentDescription = MediaContentDescription; + template class MediaContentDescriptionImpl : public MediaContentDescription { public: @@ -233,6 +252,8 @@ class AudioContentDescription : public MediaContentDescriptionImpl { return new AudioContentDescription(*this); } virtual MediaType type() const { return MEDIA_TYPE_AUDIO; } + virtual AudioContentDescription* as_audio() { return this; } + virtual const AudioContentDescription* as_audio() const { return this; } }; class VideoContentDescription : public MediaContentDescriptionImpl { @@ -241,6 +262,8 @@ class VideoContentDescription : public MediaContentDescriptionImpl { return new VideoContentDescription(*this); } virtual MediaType type() const { return MEDIA_TYPE_VIDEO; } + virtual VideoContentDescription* as_video() { return this; } + virtual const VideoContentDescription* as_video() const { return this; } }; class DataContentDescription : public MediaContentDescriptionImpl { @@ -251,6 +274,8 @@ class DataContentDescription : public MediaContentDescriptionImpl { return new DataContentDescription(*this); } virtual MediaType type() const { return MEDIA_TYPE_DATA; } + virtual DataContentDescription* as_data() { return this; } + virtual const DataContentDescription* as_data() const { return this; } bool use_sctpmap() const { return use_sctpmap_; } void set_use_sctpmap(bool enable) { use_sctpmap_ = enable; } @@ -259,32 +284,40 @@ class DataContentDescription : public MediaContentDescriptionImpl { bool use_sctpmap_ = true; }; -// Analagous to a or . -// name = name of -// type = xmlns of +// Protocol used for encoding media. This is the "top level" protocol that may +// be wrapped by zero or many transport protocols (UDP, ICE, etc.). +enum class MediaProtocolType { + kRtp, // Section will use the RTP protocol (e.g., for audio or video). + // https://tools.ietf.org/html/rfc3550 + kSctp // Section will use the SCTP protocol (e.g., for a data channel). + // https://tools.ietf.org/html/rfc4960 +}; + +// TODO(bugs.webrtc.org/8620): Remove once downstream projects have updated. +constexpr MediaProtocolType NS_JINGLE_RTP = MediaProtocolType::kRtp; +constexpr MediaProtocolType NS_JINGLE_DRAFT_SCTP = MediaProtocolType::kSctp; + +// Represents a session description section. Most information about the section +// is stored in the description, which is a subclass of MediaContentDescription. struct ContentInfo { - ContentInfo() {} - ContentInfo(const std::string& name, - const std::string& type, - ContentDescription* description) - : name(name), type(type), description(description) {} - ContentInfo(const std::string& name, - const std::string& type, - bool rejected, - ContentDescription* description) - : name(name), type(type), rejected(rejected), description(description) {} - ContentInfo(const std::string& name, - const std::string& type, - bool rejected, - bool bundle_only, - ContentDescription* description) - : name(name), - type(type), - rejected(rejected), - bundle_only(bundle_only), - description(description) {} + explicit ContentInfo(MediaProtocolType type) : type(type) {} + + // Alias for |name|. + std::string mid() const { return name; } + void set_mid(const std::string& mid) { this->name = mid; } + + // Alias for |description|. + MediaContentDescription* media_description() { return description; } + const MediaContentDescription* media_description() const { + return description; + } + void set_media_description(MediaContentDescription* description) { + this->description = description; + } + + // TODO(bugs.webrtc.org/8520): Rename this to mid. std::string name; - std::string type; + MediaProtocolType type; bool rejected = false; bool bundle_only = false; ContentDescription* description = nullptr; @@ -349,20 +382,20 @@ class SessionDescription { const ContentDescription* GetContentDescriptionByName( const std::string& name) const; ContentDescription* GetContentDescriptionByName(const std::string& name); - const ContentInfo* FirstContentByType(const std::string& type) const; + const ContentInfo* FirstContentByType(MediaProtocolType type) const; const ContentInfo* FirstContent() const; // Content mutators. // Adds a content to this description. Takes ownership of ContentDescription*. void AddContent(const std::string& name, - const std::string& type, + MediaProtocolType type, ContentDescription* description); void AddContent(const std::string& name, - const std::string& type, + MediaProtocolType type, bool rejected, ContentDescription* description); void AddContent(const std::string& name, - const std::string& type, + MediaProtocolType type, bool rejected, bool bundle_only, ContentDescription* description); diff --git a/pc/webrtcsdp.cc b/pc/webrtcsdp.cc index 63265cdf26..633e228983 100644 --- a/pc/webrtcsdp.cc +++ b/pc/webrtcsdp.cc @@ -69,6 +69,7 @@ using cricket::kCodecParamAssociatedPayloadType; using cricket::MediaContentDescription; using cricket::MediaType; using cricket::RtpHeaderExtensions; +using cricket::MediaProtocolType; using cricket::SsrcGroup; using cricket::StreamParams; using cricket::StreamParamsVec; @@ -2500,8 +2501,8 @@ bool ParseMediaDescription(const std::string& message, content->set_connection_address(address); desc->AddContent(content_name, - IsDtlsSctp(protocol) ? cricket::NS_JINGLE_DRAFT_SCTP - : cricket::NS_JINGLE_RTP, + IsDtlsSctp(protocol) ? MediaProtocolType::kSctp + : MediaProtocolType::kRtp, content_rejected, bundle_only, content.release()); // Create TransportInfo with the media level "ice-pwd" and "ice-ufrag". TransportInfo transport_info(content_name, transport); diff --git a/pc/webrtcsdp_unittest.cc b/pc/webrtcsdp_unittest.cc index a454c604b0..83ec4788f2 100644 --- a/pc/webrtcsdp_unittest.cc +++ b/pc/webrtcsdp_unittest.cc @@ -42,10 +42,9 @@ using cricket::ICE_CANDIDATE_COMPONENT_RTCP; using cricket::ICE_CANDIDATE_COMPONENT_RTP; using cricket::kFecSsrcGroupSemantics; using cricket::LOCAL_PORT_TYPE; -using cricket::NS_JINGLE_DRAFT_SCTP; -using cricket::NS_JINGLE_RTP; using cricket::RELAY_PORT_TYPE; using cricket::SessionDescription; +using cricket::MediaProtocolType; using cricket::StreamParams; using cricket::STUN_PORT_TYPE; using cricket::TransportDescription; @@ -880,7 +879,7 @@ class WebRtcSdpTest : public testing::Test { audio_desc_->AddStream(audio_stream); rtc::SocketAddress audio_addr("74.125.127.126", 2345); audio_desc_->set_connection_address(audio_addr); - desc_.AddContent(kAudioContentName, NS_JINGLE_RTP, audio_desc_); + desc_.AddContent(kAudioContentName, MediaProtocolType::kRtp, audio_desc_); // VideoContentDescription video_desc_ = CreateVideoContentDescription(); @@ -895,7 +894,7 @@ class WebRtcSdpTest : public testing::Test { video_desc_->AddStream(video_stream); rtc::SocketAddress video_addr("74.125.224.39", 3457); video_desc_->set_connection_address(video_addr); - desc_.AddContent(kVideoContentName, NS_JINGLE_RTP, video_desc_); + desc_.AddContent(kVideoContentName, MediaProtocolType::kRtp, video_desc_); // TransportInfo EXPECT_TRUE(desc_.AddTransportInfo(TransportInfo( @@ -1076,8 +1075,8 @@ class WebRtcSdpTest : public testing::Test { desc_.RemoveContentByName(kAudioContentName); desc_.RemoveContentByName(kVideoContentName); - desc_.AddContent(kAudioContentName, NS_JINGLE_RTP, audio_desc_); - desc_.AddContent(kVideoContentName, NS_JINGLE_RTP, video_desc_); + desc_.AddContent(kAudioContentName, MediaProtocolType::kRtp, audio_desc_); + desc_.AddContent(kVideoContentName, MediaProtocolType::kRtp, video_desc_); ASSERT_TRUE(jdesc_.Initialize(desc_.Copy(), jdesc_.session_id(), jdesc_.session_version())); @@ -1094,7 +1093,7 @@ class WebRtcSdpTest : public testing::Test { audio_track_2.sync_label = kStreamLabel2; audio_track_2.ssrcs.push_back(kAudioTrack2Ssrc); audio_desc_2->AddStream(audio_track_2); - desc_.AddContent(kAudioContentName2, NS_JINGLE_RTP, audio_desc_2); + desc_.AddContent(kAudioContentName2, MediaProtocolType::kRtp, audio_desc_2); EXPECT_TRUE(desc_.AddTransportInfo(TransportInfo( kAudioContentName2, TransportDescription(kUfragVoice2, kPwdVoice2)))); // Video track 2, in stream 2. @@ -1105,7 +1104,7 @@ class WebRtcSdpTest : public testing::Test { video_track_2.sync_label = kStreamLabel2; video_track_2.ssrcs.push_back(kVideoTrack2Ssrc); video_desc_2->AddStream(video_track_2); - desc_.AddContent(kVideoContentName2, NS_JINGLE_RTP, video_desc_2); + desc_.AddContent(kVideoContentName2, MediaProtocolType::kRtp, video_desc_2); EXPECT_TRUE(desc_.AddTransportInfo(TransportInfo( kVideoContentName2, TransportDescription(kUfragVideo2, kPwdVideo2)))); @@ -1117,7 +1116,7 @@ class WebRtcSdpTest : public testing::Test { video_track_3.sync_label = kStreamLabel2; video_track_3.ssrcs.push_back(kVideoTrack3Ssrc); video_desc_3->AddStream(video_track_3); - desc_.AddContent(kVideoContentName3, NS_JINGLE_RTP, video_desc_3); + desc_.AddContent(kVideoContentName3, MediaProtocolType::kRtp, video_desc_3); EXPECT_TRUE(desc_.AddTransportInfo(TransportInfo( kVideoContentName3, TransportDescription(kUfragVideo3, kPwdVideo3)))); @@ -1449,8 +1448,8 @@ class WebRtcSdpTest : public testing::Test { RtpExtension(kExtmapUri, kExtmapId, encrypted)); desc_.RemoveContentByName(kAudioContentName); desc_.RemoveContentByName(kVideoContentName); - desc_.AddContent(kAudioContentName, NS_JINGLE_RTP, audio_desc_); - desc_.AddContent(kVideoContentName, NS_JINGLE_RTP, video_desc_); + desc_.AddContent(kAudioContentName, MediaProtocolType::kRtp, audio_desc_); + desc_.AddContent(kVideoContentName, MediaProtocolType::kRtp, video_desc_); } void RemoveCryptos() { @@ -1482,9 +1481,9 @@ class WebRtcSdpTest : public testing::Test { desc_.RemoveContentByName(kAudioContentName); desc_.RemoveContentByName(kVideoContentName); - desc_.AddContent(kAudioContentName, NS_JINGLE_RTP, audio_rejected, + desc_.AddContent(kAudioContentName, MediaProtocolType::kRtp, audio_rejected, audio_desc_); - desc_.AddContent(kVideoContentName, NS_JINGLE_RTP, video_rejected, + desc_.AddContent(kVideoContentName, MediaProtocolType::kRtp, video_rejected, video_desc_); SetIceUfragPwd(kAudioContentName, audio_rejected ? "" : kUfragVoice, audio_rejected ? "" : kPwdVoice); @@ -1510,7 +1509,8 @@ class WebRtcSdpTest : public testing::Test { cricket::kGoogleSctpDataCodecName); codec.SetParam(cricket::kCodecParamPort, kDefaultSctpPort); data_desc_->AddCodec(codec); - desc_.AddContent(kDataContentName, NS_JINGLE_DRAFT_SCTP, data.release()); + desc_.AddContent(kDataContentName, MediaProtocolType::kSctp, + data.release()); EXPECT_TRUE(desc_.AddTransportInfo(TransportInfo( kDataContentName, TransportDescription(kUfragData, kPwdData)))); } @@ -1530,7 +1530,7 @@ class WebRtcSdpTest : public testing::Test { 1, "AES_CM_128_HMAC_SHA1_80", "inline:FvLcvU2P3ZWmQxgPAgcDu7Zl9vftYElFOjEzhWs5", "")); data_desc_->set_protocol(cricket::kMediaProtocolSavpf); - desc_.AddContent(kDataContentName, NS_JINGLE_RTP, data.release()); + desc_.AddContent(kDataContentName, MediaProtocolType::kRtp, data.release()); EXPECT_TRUE(desc_.AddTransportInfo(TransportInfo( kDataContentName, TransportDescription(kUfragData, kPwdData)))); } @@ -1565,9 +1565,9 @@ class WebRtcSdpTest : public testing::Test { video_desc_->Copy()); desc_.RemoveContentByName(kAudioContentName); desc_.RemoveContentByName(kVideoContentName); - desc_.AddContent(kAudioContentName, NS_JINGLE_RTP, audio_rejected, + desc_.AddContent(kAudioContentName, MediaProtocolType::kRtp, audio_rejected, audio_desc_); - desc_.AddContent(kVideoContentName, NS_JINGLE_RTP, video_rejected, + desc_.AddContent(kVideoContentName, MediaProtocolType::kRtp, video_rejected, video_desc_); SetIceUfragPwd(kAudioContentName, audio_rejected ? "" : kUfragVoice, audio_rejected ? "" : kPwdVoice);