Refactoring RtpSenderInternal to share implementation for Audio & Video.

Most of the implementation in rtp_sender.cc is a copy paste for both
Audio & Video RTP senders. This change moves all the common behavior
into the base RtpSenderInternal class.
Template method pattern is used to accomodate for the very slight differences
between audio and video senders.

Bug: None
Change-Id: I6d4e93cd32fbb0fb361fd0e1883791019bde9a92
Reviewed-on: https://webrtc-review.googlesource.com/c/123411
Reviewed-by: Steve Anton <steveanton@webrtc.org>
Reviewed-by: Seth Hampson <shampson@webrtc.org>
Commit-Queue: Amit Hilbuch <amithi@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#26758}
This commit is contained in:
Amit Hilbuch
2019-02-19 15:20:21 -08:00
committed by Commit Bot
parent ba63cafe27
commit ea7ef2ad1d
6 changed files with 523 additions and 671 deletions

View File

@ -109,6 +109,20 @@ DataMediaChannel::DataMediaChannel(const MediaConfig& config)
: MediaChannel(config) {}
DataMediaChannel::~DataMediaChannel() = default;
webrtc::RtpParameters DataMediaChannel::GetRtpSendParameters(
uint32_t ssrc) const {
// GetRtpSendParameters is not supported for DataMediaChannel.
RTC_NOTREACHED();
return webrtc::RtpParameters();
}
webrtc::RTCError DataMediaChannel::SetRtpSendParameters(
uint32_t ssrc,
const webrtc::RtpParameters& parameters) {
// SetRtpSendParameters is not supported for DataMediaChannel.
RTC_NOTREACHED();
return webrtc::RTCError(webrtc::RTCErrorType::UNSUPPORTED_OPERATION);
}
cricket::MediaType DataMediaChannel::media_type() const {
return cricket::MediaType::MEDIA_TYPE_DATA;
}

View File

@ -273,6 +273,11 @@ class MediaChannel : public sigslot::has_slots<> {
}
bool ExtmapAllowMixed() const { return extmap_allow_mixed_; }
virtual webrtc::RtpParameters GetRtpSendParameters(uint32_t ssrc) const = 0;
virtual webrtc::RTCError SetRtpSendParameters(
uint32_t ssrc,
const webrtc::RtpParameters& parameters) = 0;
protected:
virtual rtc::DiffServCodePoint PreferredDscp() const;
@ -712,10 +717,6 @@ class VoiceMediaChannel : public MediaChannel {
cricket::MediaType media_type() const override;
virtual bool SetSendParameters(const AudioSendParameters& params) = 0;
virtual bool SetRecvParameters(const AudioRecvParameters& params) = 0;
virtual webrtc::RtpParameters GetRtpSendParameters(uint32_t ssrc) const = 0;
virtual webrtc::RTCError SetRtpSendParameters(
uint32_t ssrc,
const webrtc::RtpParameters& parameters) = 0;
// Get the receive parameters for the incoming stream identified by |ssrc|.
// If |ssrc| is 0, retrieve the receive parameters for the default receive
// stream, which is used when SSRCs are not signaled. Note that calling with
@ -792,10 +793,6 @@ class VideoMediaChannel : public MediaChannel {
cricket::MediaType media_type() const override;
virtual bool SetSendParameters(const VideoSendParameters& params) = 0;
virtual bool SetRecvParameters(const VideoRecvParameters& params) = 0;
virtual webrtc::RtpParameters GetRtpSendParameters(uint32_t ssrc) const = 0;
virtual webrtc::RTCError SetRtpSendParameters(
uint32_t ssrc,
const webrtc::RtpParameters& parameters) = 0;
// Get the receive parameters for the incoming stream identified by |ssrc|.
// If |ssrc| is 0, retrieve the receive parameters for the default receive
// stream, which is used when SSRCs are not signaled. Note that calling with
@ -905,6 +902,12 @@ class DataMediaChannel : public MediaChannel {
virtual bool SetSendParameters(const DataSendParameters& params) = 0;
virtual bool SetRecvParameters(const DataRecvParameters& params) = 0;
// RtpParameter methods are not supported for Data channel.
webrtc::RtpParameters GetRtpSendParameters(uint32_t ssrc) const override;
webrtc::RTCError SetRtpSendParameters(
uint32_t ssrc,
const webrtc::RtpParameters& parameters) override;
// TODO(pthatcher): Implement this.
virtual bool GetStats(DataMediaInfo* info);

View File

@ -1560,14 +1560,14 @@ PeerConnection::CreateSender(
(track->kind() == MediaStreamTrackInterface::kAudioKind));
sender = RtpSenderProxyWithInternal<RtpSenderInternal>::Create(
signaling_thread(),
new AudioRtpSender(worker_thread(), id, stats_.get()));
AudioRtpSender::Create(worker_thread(), id, stats_.get()));
NoteUsageEvent(UsageEvent::AUDIO_ADDED);
} else {
RTC_DCHECK_EQ(media_type, cricket::MEDIA_TYPE_VIDEO);
RTC_DCHECK(!track ||
(track->kind() == MediaStreamTrackInterface::kVideoKind));
sender = RtpSenderProxyWithInternal<RtpSenderInternal>::Create(
signaling_thread(), new VideoRtpSender(worker_thread(), id));
signaling_thread(), VideoRtpSender::Create(worker_thread(), id));
NoteUsageEvent(UsageEvent::VIDEO_ADDED);
}
bool set_track_succeeded = sender->SetTrack(track);
@ -1647,15 +1647,15 @@ rtc::scoped_refptr<RtpSenderInterface> PeerConnection::CreateSender(
// TODO(steveanton): Move construction of the RtpSenders to RtpTransceiver.
rtc::scoped_refptr<RtpSenderProxyWithInternal<RtpSenderInternal>> new_sender;
if (kind == MediaStreamTrackInterface::kAudioKind) {
auto* audio_sender = new AudioRtpSender(
auto audio_sender = AudioRtpSender::Create(
worker_thread(), rtc::CreateRandomUuid(), stats_.get());
audio_sender->SetMediaChannel(voice_media_channel());
new_sender = RtpSenderProxyWithInternal<RtpSenderInternal>::Create(
signaling_thread(), audio_sender);
GetAudioTransceiver()->internal()->AddSender(new_sender);
} else if (kind == MediaStreamTrackInterface::kVideoKind) {
auto* video_sender =
new VideoRtpSender(worker_thread(), rtc::CreateRandomUuid());
auto video_sender =
VideoRtpSender::Create(worker_thread(), rtc::CreateRandomUuid());
video_sender->SetMediaChannel(video_media_channel());
new_sender = RtpSenderProxyWithInternal<RtpSenderInternal>::Create(
signaling_thread(), video_sender);

View File

@ -66,22 +66,6 @@ bool PerSenderRtpEncodingParameterHasValue(
return false;
}
// Attempt to attach the frame decryptor to the current media channel on the
// correct worker thread only if both the media channel exists and a ssrc has
// been allocated to the stream.
void MaybeAttachFrameEncryptorToMediaChannel(
const uint32_t ssrc,
rtc::Thread* worker_thread,
rtc::scoped_refptr<webrtc::FrameEncryptorInterface> frame_encryptor,
cricket::MediaChannel* media_channel,
bool stopped) {
if (media_channel && frame_encryptor && ssrc && !stopped) {
worker_thread->Invoke<void>(RTC_FROM_HERE, [&] {
media_channel->SetFrameEncryptor(ssrc, frame_encryptor);
});
}
}
void RemoveEncodingLayers(const std::vector<std::string>& rids,
std::vector<RtpEncodingParameters>* encodings) {
RTC_DCHECK(encodings);
@ -134,209 +118,13 @@ bool UnimplementedRtpParameterHasValue(const RtpParameters& parameters) {
return false;
}
LocalAudioSinkAdapter::LocalAudioSinkAdapter() : sink_(nullptr) {}
LocalAudioSinkAdapter::~LocalAudioSinkAdapter() {
rtc::CritScope lock(&lock_);
if (sink_)
sink_->OnClose();
}
void LocalAudioSinkAdapter::OnData(const void* audio_data,
int bits_per_sample,
int sample_rate,
size_t number_of_channels,
size_t number_of_frames) {
rtc::CritScope lock(&lock_);
if (sink_) {
sink_->OnData(audio_data, bits_per_sample, sample_rate, number_of_channels,
number_of_frames);
}
}
void LocalAudioSinkAdapter::SetSink(cricket::AudioSource::Sink* sink) {
rtc::CritScope lock(&lock_);
RTC_DCHECK(!sink || !sink_);
sink_ = sink;
}
AudioRtpSender::AudioRtpSender(rtc::Thread* worker_thread,
const std::string& id,
StatsCollector* stats)
: worker_thread_(worker_thread),
id_(id),
stats_(stats),
dtmf_sender_proxy_(DtmfSenderProxy::Create(
rtc::Thread::Current(),
DtmfSender::Create(rtc::Thread::Current(), this))),
sink_adapter_(new LocalAudioSinkAdapter()) {
RtpSenderBase::RtpSenderBase(rtc::Thread* worker_thread, const std::string& id)
: worker_thread_(worker_thread), id_(id) {
RTC_DCHECK(worker_thread);
init_parameters_.encodings.emplace_back();
}
AudioRtpSender::~AudioRtpSender() {
// For DtmfSender.
SignalDestroyed();
Stop();
}
bool AudioRtpSender::CanInsertDtmf() {
if (!media_channel_) {
RTC_LOG(LS_ERROR) << "CanInsertDtmf: No audio channel exists.";
return false;
}
// Check that this RTP sender is active (description has been applied that
// matches an SSRC to its ID).
if (!ssrc_) {
RTC_LOG(LS_ERROR) << "CanInsertDtmf: Sender does not have SSRC.";
return false;
}
return worker_thread_->Invoke<bool>(
RTC_FROM_HERE, [&] { return media_channel_->CanInsertDtmf(); });
}
bool AudioRtpSender::InsertDtmf(int code, int duration) {
if (!media_channel_) {
RTC_LOG(LS_ERROR) << "InsertDtmf: No audio channel exists.";
return false;
}
if (!ssrc_) {
RTC_LOG(LS_ERROR) << "InsertDtmf: Sender does not have SSRC.";
return false;
}
bool success = worker_thread_->Invoke<bool>(RTC_FROM_HERE, [&] {
return media_channel_->InsertDtmf(ssrc_, code, duration);
});
if (!success) {
RTC_LOG(LS_ERROR) << "Failed to insert DTMF to channel.";
}
return success;
}
sigslot::signal0<>* AudioRtpSender::GetOnDestroyedSignal() {
return &SignalDestroyed;
}
void AudioRtpSender::OnChanged() {
TRACE_EVENT0("webrtc", "AudioRtpSender::OnChanged");
RTC_DCHECK(!stopped_);
if (cached_track_enabled_ != track_->enabled()) {
cached_track_enabled_ = track_->enabled();
if (can_send_track()) {
SetAudioSend();
}
}
}
bool AudioRtpSender::SetTrack(MediaStreamTrackInterface* track) {
TRACE_EVENT0("webrtc", "AudioRtpSender::SetTrack");
if (stopped_) {
RTC_LOG(LS_ERROR) << "SetTrack can't be called on a stopped RtpSender.";
return false;
}
if (track && track->kind() != MediaStreamTrackInterface::kAudioKind) {
RTC_LOG(LS_ERROR) << "SetTrack called on audio RtpSender with "
<< track->kind() << " track.";
return false;
}
AudioTrackInterface* audio_track = static_cast<AudioTrackInterface*>(track);
// Detach from old track.
if (track_) {
track_->RemoveSink(sink_adapter_.get());
track_->UnregisterObserver(this);
}
if (can_send_track() && stats_) {
stats_->RemoveLocalAudioTrack(track_.get(), ssrc_);
}
// Attach to new track.
bool prev_can_send_track = can_send_track();
// Keep a reference to the old track to keep it alive until we call
// SetAudioSend.
rtc::scoped_refptr<AudioTrackInterface> old_track = track_;
track_ = audio_track;
if (track_) {
cached_track_enabled_ = track_->enabled();
track_->RegisterObserver(this);
track_->AddSink(sink_adapter_.get());
}
// Update audio channel.
if (can_send_track()) {
SetAudioSend();
if (stats_) {
stats_->AddLocalAudioTrack(track_.get(), ssrc_);
}
} else if (prev_can_send_track) {
ClearAudioSend();
}
attachment_id_ = (track_ ? GenerateUniqueId() : 0);
return true;
}
RtpParameters AudioRtpSender::GetParameters() const {
if (stopped_) {
return RtpParameters();
}
if (!media_channel_ || !ssrc_) {
RtpParameters result = init_parameters_;
last_transaction_id_ = rtc::CreateRandomUuid();
result.transaction_id = last_transaction_id_.value();
return result;
}
return worker_thread_->Invoke<RtpParameters>(RTC_FROM_HERE, [&] {
RtpParameters result = media_channel_->GetRtpSendParameters(ssrc_);
last_transaction_id_ = rtc::CreateRandomUuid();
result.transaction_id = last_transaction_id_.value();
return result;
});
}
RTCError AudioRtpSender::SetParameters(const RtpParameters& parameters) {
TRACE_EVENT0("webrtc", "AudioRtpSender::SetParameters");
if (stopped_) {
return RTCError(RTCErrorType::INVALID_STATE);
}
if (!last_transaction_id_) {
LOG_AND_RETURN_ERROR(
RTCErrorType::INVALID_STATE,
"Failed to set parameters since getParameters() has never been called"
" on this sender");
}
if (last_transaction_id_ != parameters.transaction_id) {
LOG_AND_RETURN_ERROR(
RTCErrorType::INVALID_MODIFICATION,
"Failed to set parameters since the transaction_id doesn't match"
" the last value returned from getParameters()");
}
if (UnimplementedRtpParameterHasValue(parameters)) {
LOG_AND_RETURN_ERROR(
RTCErrorType::UNSUPPORTED_PARAMETER,
"Attempted to set an unimplemented parameter of RtpParameters.");
}
if (!media_channel_ || !ssrc_) {
auto result = cricket::CheckRtpParametersInvalidModificationAndValues(
init_parameters_, parameters);
if (result.ok()) {
init_parameters_ = parameters;
}
return result;
}
return worker_thread_->Invoke<RTCError>(RTC_FROM_HERE, [&] {
RTCError result = media_channel_->SetRtpSendParameters(ssrc_, parameters);
last_transaction_id_.reset();
return result;
});
}
rtc::scoped_refptr<DtmfSenderInterface> AudioRtpSender::GetDtmfSender() const {
return dtmf_sender_proxy_;
}
void AudioRtpSender::SetFrameEncryptor(
void RtpSenderBase::SetFrameEncryptor(
rtc::scoped_refptr<FrameEncryptorInterface> frame_encryptor) {
frame_encryptor_ = std::move(frame_encryptor);
// Special Case: Set the frame encryptor to any value on any existing channel.
@ -347,198 +135,13 @@ void AudioRtpSender::SetFrameEncryptor(
}
}
rtc::scoped_refptr<FrameEncryptorInterface> AudioRtpSender::GetFrameEncryptor()
const {
return frame_encryptor_;
}
void AudioRtpSender::SetSsrc(uint32_t ssrc) {
TRACE_EVENT0("webrtc", "AudioRtpSender::SetSsrc");
if (stopped_ || ssrc == ssrc_) {
return;
}
// If we are already sending with a particular SSRC, stop sending.
if (can_send_track()) {
ClearAudioSend();
if (stats_) {
stats_->RemoveLocalAudioTrack(track_.get(), ssrc_);
}
}
ssrc_ = ssrc;
if (can_send_track()) {
SetAudioSend();
if (stats_) {
stats_->AddLocalAudioTrack(track_.get(), ssrc_);
}
}
if (!init_parameters_.encodings.empty()) {
worker_thread_->Invoke<void>(RTC_FROM_HERE, [&] {
RTC_DCHECK(media_channel_);
// Get the current parameters, which are constructed from the SDP.
// The number of layers in the SDP is currently authoritative to support
// SDP munging for Plan-B simulcast with "a=ssrc-group:SIM <ssrc-id>..."
// lines as described in RFC 5576.
// All fields should be default constructed and the SSRC field set, which
// we need to copy.
RtpParameters current_parameters =
media_channel_->GetRtpSendParameters(ssrc_);
for (size_t i = 0; i < init_parameters_.encodings.size(); ++i) {
init_parameters_.encodings[i].ssrc =
current_parameters.encodings[i].ssrc;
current_parameters.encodings[i] = init_parameters_.encodings[i];
}
current_parameters.degradation_preference =
init_parameters_.degradation_preference;
media_channel_->SetRtpSendParameters(ssrc_, current_parameters);
init_parameters_.encodings.clear();
});
}
// Each time there is an ssrc update.
MaybeAttachFrameEncryptorToMediaChannel(
ssrc_, worker_thread_, frame_encryptor_, media_channel_, stopped_);
}
void AudioRtpSender::Stop() {
TRACE_EVENT0("webrtc", "AudioRtpSender::Stop");
// TODO(deadbeef): Need to do more here to fully stop sending packets.
if (stopped_) {
return;
}
if (track_) {
track_->RemoveSink(sink_adapter_.get());
track_->UnregisterObserver(this);
}
if (can_send_track()) {
ClearAudioSend();
if (stats_) {
stats_->RemoveLocalAudioTrack(track_.get(), ssrc_);
}
}
media_channel_ = nullptr;
stopped_ = true;
}
void AudioRtpSender::SetMediaChannel(cricket::MediaChannel* media_channel) {
void RtpSenderBase::SetMediaChannel(cricket::MediaChannel* media_channel) {
RTC_DCHECK(media_channel == nullptr ||
media_channel->media_type() == media_type());
media_channel_ = static_cast<cricket::VoiceMediaChannel*>(media_channel);
media_channel_ = media_channel;
}
void AudioRtpSender::SetAudioSend() {
RTC_DCHECK(!stopped_);
RTC_DCHECK(can_send_track());
if (!media_channel_) {
RTC_LOG(LS_ERROR) << "SetAudioSend: No audio channel exists.";
return;
}
cricket::AudioOptions options;
#if !defined(WEBRTC_CHROMIUM_BUILD) && !defined(WEBRTC_WEBKIT_BUILD)
// TODO(tommi): Remove this hack when we move CreateAudioSource out of
// PeerConnection. This is a bit of a strange way to apply local audio
// options since it is also applied to all streams/channels, local or remote.
if (track_->enabled() && track_->GetSource() &&
!track_->GetSource()->remote()) {
options = track_->GetSource()->options();
}
#endif
// |track_->enabled()| hops to the signaling thread, so call it before we hop
// to the worker thread or else it will deadlock.
bool track_enabled = track_->enabled();
bool success = worker_thread_->Invoke<bool>(RTC_FROM_HERE, [&] {
return media_channel_->SetAudioSend(ssrc_, track_enabled, &options,
sink_adapter_.get());
});
if (!success) {
RTC_LOG(LS_ERROR) << "SetAudioSend: ssrc is incorrect: " << ssrc_;
}
}
void AudioRtpSender::ClearAudioSend() {
RTC_DCHECK(ssrc_ != 0);
RTC_DCHECK(!stopped_);
if (!media_channel_) {
RTC_LOG(LS_WARNING) << "ClearAudioSend: No audio channel exists.";
return;
}
cricket::AudioOptions options;
bool success = worker_thread_->Invoke<bool>(RTC_FROM_HERE, [&] {
return media_channel_->SetAudioSend(ssrc_, false, &options, nullptr);
});
if (!success) {
RTC_LOG(LS_WARNING) << "ClearAudioSend: ssrc is incorrect: " << ssrc_;
}
}
RTCError AudioRtpSender::DisableEncodingLayers(
const std::vector<std::string>& rids) {
// Multiple encoding layers (and simulcast) are not supported in audio.
return rids.empty() ? RTCError::OK()
: RTCError(RTCErrorType::UNSUPPORTED_OPERATION);
}
VideoRtpSender::VideoRtpSender(rtc::Thread* worker_thread,
const std::string& id)
: worker_thread_(worker_thread), id_(id) {
RTC_DCHECK(worker_thread);
init_parameters_.encodings.emplace_back();
}
VideoRtpSender::~VideoRtpSender() {
Stop();
}
void VideoRtpSender::OnChanged() {
TRACE_EVENT0("webrtc", "VideoRtpSender::OnChanged");
RTC_DCHECK(!stopped_);
if (cached_track_content_hint_ != track_->content_hint()) {
cached_track_content_hint_ = track_->content_hint();
if (can_send_track()) {
SetVideoSend();
}
}
}
bool VideoRtpSender::SetTrack(MediaStreamTrackInterface* track) {
TRACE_EVENT0("webrtc", "VideoRtpSender::SetTrack");
if (stopped_) {
RTC_LOG(LS_ERROR) << "SetTrack can't be called on a stopped RtpSender.";
return false;
}
if (track && track->kind() != MediaStreamTrackInterface::kVideoKind) {
RTC_LOG(LS_ERROR) << "SetTrack called on video RtpSender with "
<< track->kind() << " track.";
return false;
}
VideoTrackInterface* video_track = static_cast<VideoTrackInterface*>(track);
// Detach from old track.
if (track_) {
track_->UnregisterObserver(this);
}
// Attach to new track.
bool prev_can_send_track = can_send_track();
// Keep a reference to the old track to keep it alive until we call
// SetVideoSend.
rtc::scoped_refptr<VideoTrackInterface> old_track = track_;
track_ = video_track;
if (track_) {
cached_track_content_hint_ = track_->content_hint();
track_->RegisterObserver(this);
}
// Update video channel.
if (can_send_track()) {
SetVideoSend();
} else if (prev_can_send_track) {
ClearVideoSend();
}
attachment_id_ = (track_ ? GenerateUniqueId() : 0);
return true;
}
RtpParameters VideoRtpSender::GetParameters() const {
RtpParameters RtpSenderBase::GetParameters() const {
if (stopped_) {
return RtpParameters();
}
@ -557,8 +160,8 @@ RtpParameters VideoRtpSender::GetParameters() const {
});
}
RTCError VideoRtpSender::SetParameters(const RtpParameters& parameters) {
TRACE_EVENT0("webrtc", "VideoRtpSender::SetParameters");
RTCError RtpSenderBase::SetParameters(const RtpParameters& parameters) {
TRACE_EVENT0("webrtc", "RtpSenderBase::SetParameters");
if (stopped_) {
return RTCError(RTCErrorType::INVALID_STATE);
}
@ -604,39 +207,61 @@ RTCError VideoRtpSender::SetParameters(const RtpParameters& parameters) {
});
}
rtc::scoped_refptr<DtmfSenderInterface> VideoRtpSender::GetDtmfSender() const {
RTC_LOG(LS_ERROR) << "Tried to get DTMF sender from video sender.";
return nullptr;
}
void VideoRtpSender::SetFrameEncryptor(
rtc::scoped_refptr<FrameEncryptorInterface> frame_encryptor) {
frame_encryptor_ = std::move(frame_encryptor);
// Special Case: Set the frame encryptor to any value on any existing channel.
if (media_channel_ && ssrc_ && !stopped_) {
worker_thread_->Invoke<void>(RTC_FROM_HERE, [&] {
media_channel_->SetFrameEncryptor(ssrc_, frame_encryptor_);
});
bool RtpSenderBase::SetTrack(MediaStreamTrackInterface* track) {
TRACE_EVENT0("webrtc", "RtpSenderBase::SetTrack");
if (stopped_) {
RTC_LOG(LS_ERROR) << "SetTrack can't be called on a stopped RtpSender.";
return false;
}
if (track && track->kind() != track_kind()) {
RTC_LOG(LS_ERROR) << "SetTrack with " << track->kind()
<< " called on RtpSender with " << track_kind()
<< " track.";
return false;
}
// Detach from old track.
if (track_) {
DetachTrack();
track_->UnregisterObserver(this);
RemoveTrackFromStats();
}
// Attach to new track.
bool prev_can_send_track = can_send_track();
// Keep a reference to the old track to keep it alive until we call SetSend.
rtc::scoped_refptr<MediaStreamTrackInterface> old_track = track_;
track_ = track;
if (track_) {
track_->RegisterObserver(this);
AttachTrack();
}
// Update channel.
if (can_send_track()) {
SetSend();
AddTrackToStats();
} else if (prev_can_send_track) {
ClearSend();
}
attachment_id_ = (track_ ? GenerateUniqueId() : 0);
return true;
}
rtc::scoped_refptr<FrameEncryptorInterface> VideoRtpSender::GetFrameEncryptor()
const {
return frame_encryptor_;
}
void VideoRtpSender::SetSsrc(uint32_t ssrc) {
TRACE_EVENT0("webrtc", "VideoRtpSender::SetSsrc");
void RtpSenderBase::SetSsrc(uint32_t ssrc) {
TRACE_EVENT0("webrtc", "RtpSenderBase::SetSsrc");
if (stopped_ || ssrc == ssrc_) {
return;
}
// If we are already sending with a particular SSRC, stop sending.
if (can_send_track()) {
ClearVideoSend();
ClearSend();
RemoveTrackFromStats();
}
ssrc_ = ssrc;
if (can_send_track()) {
SetVideoSend();
SetSend();
AddTrackToStats();
}
if (!init_parameters_.encodings.empty()) {
worker_thread_->Invoke<void>(RTC_FROM_HERE, [&] {
@ -649,6 +274,8 @@ void VideoRtpSender::SetSsrc(uint32_t ssrc) {
// we need to copy.
RtpParameters current_parameters =
media_channel_->GetRtpSendParameters(ssrc_);
RTC_DCHECK_GE(current_parameters.encodings.size(),
init_parameters_.encodings.size());
for (size_t i = 0; i < init_parameters_.encodings.size(); ++i) {
init_parameters_.encodings[i].ssrc =
current_parameters.encodings[i].ssrc;
@ -661,78 +288,31 @@ void VideoRtpSender::SetSsrc(uint32_t ssrc) {
init_parameters_.encodings.clear();
});
}
MaybeAttachFrameEncryptorToMediaChannel(
ssrc_, worker_thread_, frame_encryptor_, media_channel_, stopped_);
// Attempt to attach the frame decryptor to the current media channel.
if (frame_encryptor_) {
SetFrameEncryptor(frame_encryptor_);
}
}
void VideoRtpSender::Stop() {
TRACE_EVENT0("webrtc", "VideoRtpSender::Stop");
void RtpSenderBase::Stop() {
TRACE_EVENT0("webrtc", "RtpSenderBase::Stop");
// TODO(deadbeef): Need to do more here to fully stop sending packets.
if (stopped_) {
return;
}
if (track_) {
DetachTrack();
track_->UnregisterObserver(this);
}
if (can_send_track()) {
ClearVideoSend();
ClearSend();
RemoveTrackFromStats();
}
media_channel_ = nullptr;
stopped_ = true;
}
void VideoRtpSender::SetMediaChannel(cricket::MediaChannel* media_channel) {
RTC_DCHECK(media_channel == nullptr ||
media_channel->media_type() == media_type());
media_channel_ = static_cast<cricket::VideoMediaChannel*>(media_channel);
}
void VideoRtpSender::SetVideoSend() {
RTC_DCHECK(!stopped_);
RTC_DCHECK(can_send_track());
if (!media_channel_) {
RTC_LOG(LS_ERROR) << "SetVideoSend: No video channel exists.";
return;
}
cricket::VideoOptions options;
VideoTrackSourceInterface* source = track_->GetSource();
if (source) {
options.is_screencast = source->is_screencast();
options.video_noise_reduction = source->needs_denoising();
}
switch (cached_track_content_hint_) {
case VideoTrackInterface::ContentHint::kNone:
break;
case VideoTrackInterface::ContentHint::kFluid:
options.is_screencast = false;
break;
case VideoTrackInterface::ContentHint::kDetailed:
case VideoTrackInterface::ContentHint::kText:
options.is_screencast = true;
break;
}
bool success = worker_thread_->Invoke<bool>(RTC_FROM_HERE, [&] {
return media_channel_->SetVideoSend(ssrc_, &options, track_);
});
RTC_DCHECK(success);
}
void VideoRtpSender::ClearVideoSend() {
RTC_DCHECK(ssrc_ != 0);
RTC_DCHECK(!stopped_);
if (!media_channel_) {
RTC_LOG(LS_WARNING) << "SetVideoSend: No video channel exists.";
return;
}
// Allow SetVideoSend to fail since |enable| is false and |source| is null.
// This the normal case when the underlying media channel has already been
// deleted.
worker_thread_->Invoke<bool>(RTC_FROM_HERE, [&] {
return media_channel_->SetVideoSend(ssrc_, nullptr, nullptr);
});
}
RTCError VideoRtpSender::DisableEncodingLayers(
RTCError RtpSenderBase::DisableEncodingLayers(
const std::vector<std::string>& rids) {
if (stopped_) {
return RTCError(RTCErrorType::INVALID_STATE);
@ -764,4 +344,256 @@ RTCError VideoRtpSender::DisableEncodingLayers(
return result;
}
LocalAudioSinkAdapter::LocalAudioSinkAdapter() : sink_(nullptr) {}
LocalAudioSinkAdapter::~LocalAudioSinkAdapter() {
rtc::CritScope lock(&lock_);
if (sink_)
sink_->OnClose();
}
void LocalAudioSinkAdapter::OnData(const void* audio_data,
int bits_per_sample,
int sample_rate,
size_t number_of_channels,
size_t number_of_frames) {
rtc::CritScope lock(&lock_);
if (sink_) {
sink_->OnData(audio_data, bits_per_sample, sample_rate, number_of_channels,
number_of_frames);
}
}
void LocalAudioSinkAdapter::SetSink(cricket::AudioSource::Sink* sink) {
rtc::CritScope lock(&lock_);
RTC_DCHECK(!sink || !sink_);
sink_ = sink;
}
rtc::scoped_refptr<AudioRtpSender> AudioRtpSender::Create(
rtc::Thread* worker_thread,
const std::string& id,
StatsCollector* stats) {
return rtc::scoped_refptr<AudioRtpSender>(
new rtc::RefCountedObject<AudioRtpSender>(worker_thread, id, stats));
}
AudioRtpSender::AudioRtpSender(rtc::Thread* worker_thread,
const std::string& id,
StatsCollector* stats)
: RtpSenderBase(worker_thread, id),
stats_(stats),
dtmf_sender_proxy_(DtmfSenderProxy::Create(
rtc::Thread::Current(),
DtmfSender::Create(rtc::Thread::Current(), this))),
sink_adapter_(new LocalAudioSinkAdapter()) {}
AudioRtpSender::~AudioRtpSender() {
// For DtmfSender.
SignalDestroyed();
Stop();
}
bool AudioRtpSender::CanInsertDtmf() {
if (!media_channel_) {
RTC_LOG(LS_ERROR) << "CanInsertDtmf: No audio channel exists.";
return false;
}
// Check that this RTP sender is active (description has been applied that
// matches an SSRC to its ID).
if (!ssrc_) {
RTC_LOG(LS_ERROR) << "CanInsertDtmf: Sender does not have SSRC.";
return false;
}
return worker_thread_->Invoke<bool>(
RTC_FROM_HERE, [&] { return voice_media_channel()->CanInsertDtmf(); });
}
bool AudioRtpSender::InsertDtmf(int code, int duration) {
if (!media_channel_) {
RTC_LOG(LS_ERROR) << "InsertDtmf: No audio channel exists.";
return false;
}
if (!ssrc_) {
RTC_LOG(LS_ERROR) << "InsertDtmf: Sender does not have SSRC.";
return false;
}
bool success = worker_thread_->Invoke<bool>(RTC_FROM_HERE, [&] {
return voice_media_channel()->InsertDtmf(ssrc_, code, duration);
});
if (!success) {
RTC_LOG(LS_ERROR) << "Failed to insert DTMF to channel.";
}
return success;
}
sigslot::signal0<>* AudioRtpSender::GetOnDestroyedSignal() {
return &SignalDestroyed;
}
void AudioRtpSender::OnChanged() {
TRACE_EVENT0("webrtc", "AudioRtpSender::OnChanged");
RTC_DCHECK(!stopped_);
if (cached_track_enabled_ != track_->enabled()) {
cached_track_enabled_ = track_->enabled();
if (can_send_track()) {
SetSend();
}
}
}
void AudioRtpSender::DetachTrack() {
RTC_DCHECK(track_);
audio_track()->RemoveSink(sink_adapter_.get());
}
void AudioRtpSender::AttachTrack() {
RTC_DCHECK(track_);
cached_track_enabled_ = track_->enabled();
audio_track()->AddSink(sink_adapter_.get());
}
void AudioRtpSender::AddTrackToStats() {
if (can_send_track() && stats_) {
stats_->AddLocalAudioTrack(audio_track().get(), ssrc_);
}
}
void AudioRtpSender::RemoveTrackFromStats() {
if (can_send_track() && stats_) {
stats_->RemoveLocalAudioTrack(audio_track().get(), ssrc_);
}
}
rtc::scoped_refptr<DtmfSenderInterface> AudioRtpSender::GetDtmfSender() const {
return dtmf_sender_proxy_;
}
void AudioRtpSender::SetSend() {
RTC_DCHECK(!stopped_);
RTC_DCHECK(can_send_track());
if (!media_channel_) {
RTC_LOG(LS_ERROR) << "SetAudioSend: No audio channel exists.";
return;
}
cricket::AudioOptions options;
#if !defined(WEBRTC_CHROMIUM_BUILD) && !defined(WEBRTC_WEBKIT_BUILD)
// TODO(tommi): Remove this hack when we move CreateAudioSource out of
// PeerConnection. This is a bit of a strange way to apply local audio
// options since it is also applied to all streams/channels, local or remote.
if (track_->enabled() && audio_track()->GetSource() &&
!audio_track()->GetSource()->remote()) {
options = audio_track()->GetSource()->options();
}
#endif
// |track_->enabled()| hops to the signaling thread, so call it before we hop
// to the worker thread or else it will deadlock.
bool track_enabled = track_->enabled();
bool success = worker_thread_->Invoke<bool>(RTC_FROM_HERE, [&] {
return voice_media_channel()->SetAudioSend(ssrc_, track_enabled, &options,
sink_adapter_.get());
});
if (!success) {
RTC_LOG(LS_ERROR) << "SetAudioSend: ssrc is incorrect: " << ssrc_;
}
}
void AudioRtpSender::ClearSend() {
RTC_DCHECK(ssrc_ != 0);
RTC_DCHECK(!stopped_);
if (!media_channel_) {
RTC_LOG(LS_WARNING) << "ClearAudioSend: No audio channel exists.";
return;
}
cricket::AudioOptions options;
bool success = worker_thread_->Invoke<bool>(RTC_FROM_HERE, [&] {
return voice_media_channel()->SetAudioSend(ssrc_, false, &options, nullptr);
});
if (!success) {
RTC_LOG(LS_WARNING) << "ClearAudioSend: ssrc is incorrect: " << ssrc_;
}
}
rtc::scoped_refptr<VideoRtpSender> VideoRtpSender::Create(
rtc::Thread* worker_thread,
const std::string& id) {
return rtc::scoped_refptr<VideoRtpSender>(
new rtc::RefCountedObject<VideoRtpSender>(worker_thread, id));
}
VideoRtpSender::VideoRtpSender(rtc::Thread* worker_thread,
const std::string& id)
: RtpSenderBase(worker_thread, id) {}
VideoRtpSender::~VideoRtpSender() {
Stop();
}
void VideoRtpSender::OnChanged() {
TRACE_EVENT0("webrtc", "VideoRtpSender::OnChanged");
RTC_DCHECK(!stopped_);
if (cached_track_content_hint_ != video_track()->content_hint()) {
cached_track_content_hint_ = video_track()->content_hint();
if (can_send_track()) {
SetSend();
}
}
}
void VideoRtpSender::AttachTrack() {
RTC_DCHECK(track_);
cached_track_content_hint_ = video_track()->content_hint();
}
rtc::scoped_refptr<DtmfSenderInterface> VideoRtpSender::GetDtmfSender() const {
RTC_LOG(LS_ERROR) << "Tried to get DTMF sender from video sender.";
return nullptr;
}
void VideoRtpSender::SetSend() {
RTC_DCHECK(!stopped_);
RTC_DCHECK(can_send_track());
if (!media_channel_) {
RTC_LOG(LS_ERROR) << "SetVideoSend: No video channel exists.";
return;
}
cricket::VideoOptions options;
VideoTrackSourceInterface* source = video_track()->GetSource();
if (source) {
options.is_screencast = source->is_screencast();
options.video_noise_reduction = source->needs_denoising();
}
switch (cached_track_content_hint_) {
case VideoTrackInterface::ContentHint::kNone:
break;
case VideoTrackInterface::ContentHint::kFluid:
options.is_screencast = false;
break;
case VideoTrackInterface::ContentHint::kDetailed:
case VideoTrackInterface::ContentHint::kText:
options.is_screencast = true;
break;
}
bool success = worker_thread_->Invoke<bool>(RTC_FROM_HERE, [&] {
return video_media_channel()->SetVideoSend(ssrc_, &options, video_track());
});
RTC_DCHECK(success);
}
void VideoRtpSender::ClearSend() {
RTC_DCHECK(ssrc_ != 0);
RTC_DCHECK(!stopped_);
if (!media_channel_) {
RTC_LOG(LS_WARNING) << "SetVideoSend: No video channel exists.";
return;
}
// Allow SetVideoSend to fail since |enable| is false and |source| is null.
// This the normal case when the underlying media channel has already been
// deleted.
worker_thread_->Invoke<bool>(RTC_FROM_HERE, [&] {
return video_media_channel()->SetVideoSend(ssrc_, nullptr, nullptr);
});
}
} // namespace webrtc

View File

@ -66,6 +66,115 @@ class RtpSenderInternal : public RtpSenderInterface {
const std::vector<std::string>& rid) = 0;
};
// Shared implementation for RtpSenderInternal interface.
class RtpSenderBase : public RtpSenderInternal, public ObserverInterface {
public:
// Sets the underlying MediaEngine channel associated with this RtpSender.
// A VoiceMediaChannel should be used for audio RtpSenders and
// a VideoMediaChannel should be used for video RtpSenders.
// Must call SetMediaChannel(nullptr) before the media channel is destroyed.
void SetMediaChannel(cricket::MediaChannel* media_channel) override;
bool SetTrack(MediaStreamTrackInterface* track) override;
rtc::scoped_refptr<MediaStreamTrackInterface> track() const override {
return track_;
}
RtpParameters GetParameters() const override;
RTCError SetParameters(const RtpParameters& parameters) override;
// Used to set the SSRC of the sender, once a local description has been set.
// If |ssrc| is 0, this indiates that the sender should disconnect from the
// underlying transport (this occurs if the sender isn't seen in a local
// description).
void SetSsrc(uint32_t ssrc) override;
uint32_t ssrc() const override { return ssrc_; }
std::vector<std::string> stream_ids() const override { return stream_ids_; }
void set_stream_ids(const std::vector<std::string>& stream_ids) override {
stream_ids_ = stream_ids;
}
std::string id() const override { return id_; }
void set_init_send_encodings(
const std::vector<RtpEncodingParameters>& init_send_encodings) override {
init_parameters_.encodings = init_send_encodings;
}
std::vector<RtpEncodingParameters> init_send_encodings() const override {
return init_parameters_.encodings;
}
void set_transport(
rtc::scoped_refptr<DtlsTransportInterface> dtls_transport) override {
dtls_transport_ = dtls_transport;
}
rtc::scoped_refptr<DtlsTransportInterface> dtls_transport() const override {
return dtls_transport_;
}
void SetFrameEncryptor(
rtc::scoped_refptr<FrameEncryptorInterface> frame_encryptor) override;
rtc::scoped_refptr<FrameEncryptorInterface> GetFrameEncryptor()
const override {
return frame_encryptor_;
}
void Stop() override;
// Returns an ID that changes every time SetTrack() is called, but
// otherwise remains constant. Used to generate IDs for stats.
// The special value zero means that no track is attached.
int AttachmentId() const override { return attachment_id_; }
// Disables the layers identified by the specified RIDs.
// If the specified list is empty, this is a no-op.
RTCError DisableEncodingLayers(const std::vector<std::string>& rid) override;
protected:
RtpSenderBase(rtc::Thread* worker_thread, const std::string& id);
// TODO(nisse): Since SSRC == 0 is technically valid, figure out
// some other way to test if we have a valid SSRC.
bool can_send_track() const { return track_ && ssrc_; }
virtual std::string track_kind() const = 0;
// Enable sending on the media channel.
virtual void SetSend() = 0;
// Disable sending on the media channel.
virtual void ClearSend() = 0;
// Template method pattern to allow subclasses to add custom behavior for
// when tracks are attached, detached, and for adding tracks to statistics.
virtual void AttachTrack() {}
virtual void DetachTrack() {}
virtual void AddTrackToStats() {}
virtual void RemoveTrackFromStats() {}
rtc::Thread* worker_thread_;
uint32_t ssrc_ = 0;
bool stopped_ = false;
int attachment_id_ = 0;
const std::string id_;
std::vector<std::string> stream_ids_;
RtpParameters init_parameters_;
cricket::MediaChannel* media_channel_ = nullptr;
rtc::scoped_refptr<MediaStreamTrackInterface> track_;
rtc::scoped_refptr<DtlsTransportInterface> dtls_transport_;
rtc::scoped_refptr<FrameEncryptorInterface> frame_encryptor_;
// |last_transaction_id_| is used to verify that |SetParameters| is receiving
// the parameters object that was last returned from |GetParameters|.
// As such, it is used for internal verification and is not observable by the
// the client. It is marked as mutable to enable |GetParameters| to be a
// const method.
mutable absl::optional<std::string> last_transaction_id_;
std::vector<std::string> disabled_rids_;
};
// LocalAudioSinkAdapter receives data callback as a sink to the local
// AudioTrack, and passes the data to the sink of AudioSource.
class LocalAudioSinkAdapter : public AudioTrackSinkInterface,
@ -90,19 +199,15 @@ class LocalAudioSinkAdapter : public AudioTrackSinkInterface,
rtc::CriticalSection lock_;
};
class AudioRtpSender : public DtmfProviderInterface,
public ObserverInterface,
public rtc::RefCountedObject<RtpSenderInternal> {
class AudioRtpSender : public DtmfProviderInterface, public RtpSenderBase {
public:
// StatsCollector provided so that Add/RemoveLocalAudioTrack can be called
// at the appropriate times.
// Construct an RtpSender for audio with the given sender ID.
// The sender is initialized with no track to send and no associated streams.
AudioRtpSender(rtc::Thread* worker_thread,
const std::string& id,
StatsCollector* stats);
// StatsCollector provided so that Add/RemoveLocalAudioTrack can be called
// at the appropriate times.
static rtc::scoped_refptr<AudioRtpSender> Create(rtc::Thread* worker_thread,
const std::string& id,
StatsCollector* stats);
virtual ~AudioRtpSender();
// DtmfSenderProvider implementation.
@ -113,197 +218,88 @@ class AudioRtpSender : public DtmfProviderInterface,
// ObserverInterface implementation.
void OnChanged() override;
// RtpSenderInterface implementation.
bool SetTrack(MediaStreamTrackInterface* track) override;
rtc::scoped_refptr<MediaStreamTrackInterface> track() const override {
return track_;
}
rtc::scoped_refptr<DtlsTransportInterface> dtls_transport() const override {
return dtls_transport_;
}
uint32_t ssrc() const override { return ssrc_; }
cricket::MediaType media_type() const override {
return cricket::MEDIA_TYPE_AUDIO;
}
std::string id() const override { return id_; }
std::vector<std::string> stream_ids() const override { return stream_ids_; }
RtpParameters GetParameters() const override;
RTCError SetParameters(const RtpParameters& parameters) override;
std::string track_kind() const override {
return MediaStreamTrackInterface::kAudioKind;
}
rtc::scoped_refptr<DtmfSenderInterface> GetDtmfSender() const override;
void SetFrameEncryptor(
rtc::scoped_refptr<FrameEncryptorInterface> frame_encryptor) override;
protected:
AudioRtpSender(rtc::Thread* worker_thread,
const std::string& id,
StatsCollector* stats);
rtc::scoped_refptr<FrameEncryptorInterface> GetFrameEncryptor()
const override;
void SetSend() override;
void ClearSend() override;
// RtpSenderInternal implementation.
void SetSsrc(uint32_t ssrc) override;
void set_stream_ids(const std::vector<std::string>& stream_ids) override {
stream_ids_ = stream_ids;
}
void set_init_send_encodings(
const std::vector<RtpEncodingParameters>& init_send_encodings) override {
init_parameters_.encodings = init_send_encodings;
}
std::vector<RtpEncodingParameters> init_send_encodings() const override {
return init_parameters_.encodings;
}
void set_transport(
rtc::scoped_refptr<DtlsTransportInterface> dtls_transport) override {
dtls_transport_ = dtls_transport;
}
void Stop() override;
int AttachmentId() const override { return attachment_id_; }
void SetMediaChannel(cricket::MediaChannel* media_channel) override;
RTCError DisableEncodingLayers(const std::vector<std::string>& rids) override;
// Hooks to allow custom logic when tracks are attached and detached.
void AttachTrack() override;
void DetachTrack() override;
void AddTrackToStats() override;
void RemoveTrackFromStats() override;
private:
// TODO(nisse): Since SSRC == 0 is technically valid, figure out
// some other way to test if we have a valid SSRC.
bool can_send_track() const { return track_ && ssrc_; }
// Helper function to construct options for
// AudioProviderInterface::SetAudioSend.
void SetAudioSend();
// Helper function to call SetAudioSend with "stop sending" parameters.
void ClearAudioSend();
cricket::VoiceMediaChannel* voice_media_channel() {
return static_cast<cricket::VoiceMediaChannel*>(media_channel_);
}
rtc::scoped_refptr<AudioTrackInterface> audio_track() const {
return rtc::scoped_refptr<AudioTrackInterface>(
static_cast<AudioTrackInterface*>(track_.get()));
}
sigslot::signal0<> SignalDestroyed;
rtc::Thread* const worker_thread_;
const std::string id_;
std::vector<std::string> stream_ids_;
RtpParameters init_parameters_;
cricket::VoiceMediaChannel* media_channel_ = nullptr;
StatsCollector* stats_ = nullptr;
rtc::scoped_refptr<AudioTrackInterface> track_;
rtc::scoped_refptr<DtlsTransportInterface> dtls_transport_;
rtc::scoped_refptr<DtmfSenderInterface> dtmf_sender_proxy_;
// |last_transaction_id_| is used to verify that |SetParameters| is receiving
// the parameters object that was last returned from |GetParameters|.
// As such, it is used for internal verification and is not observable by the
// the client. It is marked as mutable to enable |GetParameters| to be a
// const method.
mutable absl::optional<std::string> last_transaction_id_;
uint32_t ssrc_ = 0;
bool cached_track_enabled_ = false;
bool stopped_ = false;
// Used to pass the data callback from the |track_| to the other end of
// cricket::AudioSource.
std::unique_ptr<LocalAudioSinkAdapter> sink_adapter_;
int attachment_id_ = 0;
rtc::scoped_refptr<FrameEncryptorInterface> frame_encryptor_;
};
class VideoRtpSender : public ObserverInterface,
public rtc::RefCountedObject<RtpSenderInternal> {
class VideoRtpSender : public RtpSenderBase {
public:
// Construct an RtpSender for video with the given sender ID.
// The sender is initialized with no track to send and no associated streams.
VideoRtpSender(rtc::Thread* worker_thread, const std::string& id);
static rtc::scoped_refptr<VideoRtpSender> Create(rtc::Thread* worker_thread,
const std::string& id);
virtual ~VideoRtpSender();
// ObserverInterface implementation
void OnChanged() override;
// RtpSenderInterface implementation
bool SetTrack(MediaStreamTrackInterface* track) override;
rtc::scoped_refptr<MediaStreamTrackInterface> track() const override {
return track_;
}
uint32_t ssrc() const override { return ssrc_; }
rtc::scoped_refptr<DtlsTransportInterface> dtls_transport() const override {
return dtls_transport_;
}
cricket::MediaType media_type() const override {
return cricket::MEDIA_TYPE_VIDEO;
}
std::string id() const override { return id_; }
std::vector<std::string> stream_ids() const override { return stream_ids_; }
void set_init_send_encodings(
const std::vector<RtpEncodingParameters>& init_send_encodings) override {
init_parameters_.encodings = init_send_encodings;
std::string track_kind() const override {
return MediaStreamTrackInterface::kVideoKind;
}
std::vector<RtpEncodingParameters> init_send_encodings() const override {
return init_parameters_.encodings;
}
void set_transport(
rtc::scoped_refptr<DtlsTransportInterface> dtls_transport) override {
dtls_transport_ = dtls_transport;
}
RtpParameters GetParameters() const override;
RTCError SetParameters(const RtpParameters& parameters) override;
rtc::scoped_refptr<DtmfSenderInterface> GetDtmfSender() const override;
void SetFrameEncryptor(
rtc::scoped_refptr<FrameEncryptorInterface> frame_encryptor) override;
protected:
VideoRtpSender(rtc::Thread* worker_thread, const std::string& id);
rtc::scoped_refptr<FrameEncryptorInterface> GetFrameEncryptor()
const override;
void SetSend() override;
void ClearSend() override;
// RtpSenderInternal implementation.
void SetSsrc(uint32_t ssrc) override;
void set_stream_ids(const std::vector<std::string>& stream_ids) override {
stream_ids_ = stream_ids;
}
void Stop() override;
int AttachmentId() const override { return attachment_id_; }
void SetMediaChannel(cricket::MediaChannel* media_channel) override;
RTCError DisableEncodingLayers(const std::vector<std::string>& rids) override;
// Hook to allow custom logic when tracks are attached.
void AttachTrack() override;
private:
bool can_send_track() const { return track_ && ssrc_; }
// Helper function to construct options for
// VideoProviderInterface::SetVideoSend.
void SetVideoSend();
// Helper function to call SetVideoSend with "stop sending" parameters.
void ClearVideoSend();
cricket::VideoMediaChannel* video_media_channel() {
return static_cast<cricket::VideoMediaChannel*>(media_channel_);
}
rtc::scoped_refptr<VideoTrackInterface> video_track() const {
return rtc::scoped_refptr<VideoTrackInterface>(
static_cast<VideoTrackInterface*>(track_.get()));
}
rtc::Thread* worker_thread_;
const std::string id_;
std::vector<std::string> stream_ids_;
RtpParameters init_parameters_;
cricket::VideoMediaChannel* media_channel_ = nullptr;
rtc::scoped_refptr<VideoTrackInterface> track_;
// |last_transaction_id_| is used to verify that |SetParameters| is receiving
// the parameters object that was last returned from |GetParameters|.
// As such, it is used for internal verification and is not observable by the
// the client. It is marked as mutable to enable |GetParameters| to be a
// const method.
mutable absl::optional<std::string> last_transaction_id_;
uint32_t ssrc_ = 0;
VideoTrackInterface::ContentHint cached_track_content_hint_ =
VideoTrackInterface::ContentHint::kNone;
bool stopped_ = false;
int attachment_id_ = 0;
rtc::scoped_refptr<FrameEncryptorInterface> frame_encryptor_;
rtc::scoped_refptr<DtlsTransportInterface> dtls_transport_;
std::vector<std::string> disabled_rids_;
};
} // namespace webrtc

View File

@ -185,7 +185,7 @@ class RtpSenderReceiverTest
audio_track_ = AudioTrack::Create(kAudioTrackId, source);
EXPECT_TRUE(local_stream_->AddTrack(audio_track_));
audio_rtp_sender_ =
new AudioRtpSender(worker_thread_, audio_track_->id(), nullptr);
AudioRtpSender::Create(worker_thread_, audio_track_->id(), nullptr);
ASSERT_TRUE(audio_rtp_sender_->SetTrack(audio_track_));
audio_rtp_sender_->set_stream_ids({local_stream_->id()});
audio_rtp_sender_->SetMediaChannel(voice_media_channel_);
@ -196,7 +196,8 @@ class RtpSenderReceiverTest
}
void CreateAudioRtpSenderWithNoTrack() {
audio_rtp_sender_ = new AudioRtpSender(worker_thread_, /*id=*/"", nullptr);
audio_rtp_sender_ =
AudioRtpSender::Create(worker_thread_, /*id=*/"", nullptr);
audio_rtp_sender_->SetMediaChannel(voice_media_channel_);
}
@ -244,7 +245,8 @@ class RtpSenderReceiverTest
void CreateVideoRtpSender(bool is_screencast, uint32_t ssrc = kVideoSsrc) {
AddVideoTrack(is_screencast);
video_rtp_sender_ = new VideoRtpSender(worker_thread_, video_track_->id());
video_rtp_sender_ =
VideoRtpSender::Create(worker_thread_, video_track_->id());
ASSERT_TRUE(video_rtp_sender_->SetTrack(video_track_));
video_rtp_sender_->set_stream_ids({local_stream_->id()});
video_rtp_sender_->SetMediaChannel(video_media_channel_);
@ -252,7 +254,7 @@ class RtpSenderReceiverTest
VerifyVideoChannelInput(ssrc);
}
void CreateVideoRtpSenderWithNoTrack() {
video_rtp_sender_ = new VideoRtpSender(worker_thread_, /*id=*/"");
video_rtp_sender_ = VideoRtpSender::Create(worker_thread_, /*id=*/"");
video_rtp_sender_->SetMediaChannel(video_media_channel_);
}
@ -423,15 +425,15 @@ class RtpSenderReceiverTest
void RunDisableSimulcastLayersWithoutMediaEngineTest(
const std::vector<std::string>& all_layers,
const std::vector<std::string>& disabled_layers) {
VideoRtpSender sender(rtc::Thread::Current(), "1");
auto sender = VideoRtpSender::Create(rtc::Thread::Current(), "1");
RtpParameters parameters;
parameters.encodings.resize(all_layers.size());
for (size_t i = 0; i < all_layers.size(); ++i) {
parameters.encodings[i].rid = all_layers[i];
}
sender.set_init_send_encodings(parameters.encodings);
RunDisableEncodingLayersTest(all_layers, disabled_layers, &sender);
RunSetLastLayerAsInactiveTest(&sender);
sender->set_init_send_encodings(parameters.encodings);
RunDisableEncodingLayersTest(all_layers, disabled_layers, sender.get());
RunSetLastLayerAsInactiveTest(sender.get());
}
// Runs a test for disabling the encoding layers on a sender with a media
@ -886,7 +888,8 @@ TEST_F(RtpSenderReceiverTest, AudioSenderCanSetParameters) {
}
TEST_F(RtpSenderReceiverTest, AudioSenderCanSetParametersBeforeNegotiation) {
audio_rtp_sender_ = new AudioRtpSender(worker_thread_, /*id=*/"", nullptr);
audio_rtp_sender_ =
AudioRtpSender::Create(worker_thread_, /*id=*/"", nullptr);
RtpParameters params = audio_rtp_sender_->GetParameters();
ASSERT_EQ(1u, params.encodings.size());
@ -905,7 +908,7 @@ TEST_F(RtpSenderReceiverTest, AudioSenderInitParametersMovedAfterNegotiation) {
EXPECT_TRUE(local_stream_->AddTrack(audio_track_));
audio_rtp_sender_ =
new AudioRtpSender(worker_thread_, audio_track_->id(), nullptr);
AudioRtpSender::Create(worker_thread_, audio_track_->id(), nullptr);
ASSERT_TRUE(audio_rtp_sender_->SetTrack(audio_track_));
audio_rtp_sender_->set_stream_ids({local_stream_->id()});
@ -934,7 +937,8 @@ TEST_F(RtpSenderReceiverTest, AudioSenderInitParametersMovedAfterNegotiation) {
TEST_F(RtpSenderReceiverTest,
AudioSenderMustCallGetParametersBeforeSetParametersBeforeNegotiation) {
audio_rtp_sender_ = new AudioRtpSender(worker_thread_, /*id=*/"", nullptr);
audio_rtp_sender_ =
AudioRtpSender::Create(worker_thread_, /*id=*/"", nullptr);
RtpParameters params;
RTCError result = audio_rtp_sender_->SetParameters(params);
@ -1113,7 +1117,7 @@ TEST_F(RtpSenderReceiverTest, VideoSenderCanSetParameters) {
}
TEST_F(RtpSenderReceiverTest, VideoSenderCanSetParametersBeforeNegotiation) {
video_rtp_sender_ = new VideoRtpSender(worker_thread_, /*id=*/"");
video_rtp_sender_ = VideoRtpSender::Create(worker_thread_, /*id=*/"");
RtpParameters params = video_rtp_sender_->GetParameters();
ASSERT_EQ(1u, params.encodings.size());
@ -1130,7 +1134,8 @@ TEST_F(RtpSenderReceiverTest, VideoSenderCanSetParametersBeforeNegotiation) {
TEST_F(RtpSenderReceiverTest, VideoSenderInitParametersMovedAfterNegotiation) {
AddVideoTrack(false);
video_rtp_sender_ = new VideoRtpSender(worker_thread_, video_track_->id());
video_rtp_sender_ =
VideoRtpSender::Create(worker_thread_, video_track_->id());
ASSERT_TRUE(video_rtp_sender_->SetTrack(video_track_));
video_rtp_sender_->set_stream_ids({local_stream_->id()});
@ -1167,7 +1172,8 @@ TEST_F(RtpSenderReceiverTest,
VideoSenderInitParametersMovedAfterManualSimulcastAndNegotiation) {
AddVideoTrack(false);
video_rtp_sender_ = new VideoRtpSender(worker_thread_, video_track_->id());
video_rtp_sender_ =
VideoRtpSender::Create(worker_thread_, video_track_->id());
ASSERT_TRUE(video_rtp_sender_->SetTrack(video_track_));
video_rtp_sender_->set_stream_ids({local_stream_->id()});
@ -1200,7 +1206,7 @@ TEST_F(RtpSenderReceiverTest,
TEST_F(RtpSenderReceiverTest,
VideoSenderMustCallGetParametersBeforeSetParametersBeforeNegotiation) {
video_rtp_sender_ = new VideoRtpSender(worker_thread_, /*id=*/"");
video_rtp_sender_ = VideoRtpSender::Create(worker_thread_, /*id=*/"");
RtpParameters params;
RTCError result = video_rtp_sender_->SetParameters(params);
@ -1592,7 +1598,8 @@ TEST_F(RtpSenderReceiverTest,
// Setting detailed overrides the default non-screencast mode. This should be
// applied even if the track is set on construction.
video_track_->set_content_hint(VideoTrackInterface::ContentHint::kDetailed);
video_rtp_sender_ = new VideoRtpSender(worker_thread_, video_track_->id());
video_rtp_sender_ =
VideoRtpSender::Create(worker_thread_, video_track_->id());
ASSERT_TRUE(video_rtp_sender_->SetTrack(video_track_));
video_rtp_sender_->set_stream_ids({local_stream_->id()});
video_rtp_sender_->SetMediaChannel(video_media_channel_);