Exposing RtpSenders and RtpReceivers from PeerConnection.
This CL essentially converts [Local|Remote]TrackHandler to Rtp[Sender|Receiver], and adds a "SetTrack" method for RtpSender. It also gets rid of MediaStreamHandler and MediaStreamHandlerContainer, since these classes weren't really anything more than containers. PeerConnection now manages the RtpSenders and RtpReceivers directly. Review URL: https://codereview.webrtc.org/1351803002 Cr-Commit-Position: refs/heads/master@{#10100}
This commit is contained in:
@ -25,462 +25,5 @@
|
||||
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
#include "talk/app/webrtc/mediastreamhandler.h"
|
||||
|
||||
#include "talk/app/webrtc/localaudiosource.h"
|
||||
#include "talk/app/webrtc/videosource.h"
|
||||
#include "talk/app/webrtc/videosourceinterface.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
TrackHandler::TrackHandler(MediaStreamTrackInterface* track, uint32 ssrc)
|
||||
: track_(track),
|
||||
ssrc_(ssrc),
|
||||
state_(track->state()),
|
||||
enabled_(track->enabled()) {
|
||||
track_->RegisterObserver(this);
|
||||
}
|
||||
|
||||
TrackHandler::~TrackHandler() {
|
||||
track_->UnregisterObserver(this);
|
||||
}
|
||||
|
||||
void TrackHandler::OnChanged() {
|
||||
if (state_ != track_->state()) {
|
||||
state_ = track_->state();
|
||||
OnStateChanged();
|
||||
}
|
||||
if (enabled_ != track_->enabled()) {
|
||||
enabled_ = track_->enabled();
|
||||
OnEnabledChanged();
|
||||
}
|
||||
}
|
||||
|
||||
LocalAudioSinkAdapter::LocalAudioSinkAdapter() : sink_(NULL) {}
|
||||
|
||||
LocalAudioSinkAdapter::~LocalAudioSinkAdapter() {
|
||||
rtc::CritScope lock(&lock_);
|
||||
if (sink_)
|
||||
sink_->OnClose();
|
||||
}
|
||||
|
||||
void LocalAudioSinkAdapter::OnData(const void* audio_data,
|
||||
int bits_per_sample,
|
||||
int sample_rate,
|
||||
int number_of_channels,
|
||||
size_t number_of_frames) {
|
||||
rtc::CritScope lock(&lock_);
|
||||
if (sink_) {
|
||||
sink_->OnData(audio_data, bits_per_sample, sample_rate,
|
||||
number_of_channels, number_of_frames);
|
||||
}
|
||||
}
|
||||
|
||||
void LocalAudioSinkAdapter::SetSink(cricket::AudioRenderer::Sink* sink) {
|
||||
rtc::CritScope lock(&lock_);
|
||||
ASSERT(!sink || !sink_);
|
||||
sink_ = sink;
|
||||
}
|
||||
|
||||
LocalAudioTrackHandler::LocalAudioTrackHandler(
|
||||
AudioTrackInterface* track,
|
||||
uint32 ssrc,
|
||||
AudioProviderInterface* provider)
|
||||
: TrackHandler(track, ssrc),
|
||||
audio_track_(track),
|
||||
provider_(provider),
|
||||
sink_adapter_(new LocalAudioSinkAdapter()) {
|
||||
OnEnabledChanged();
|
||||
track->AddSink(sink_adapter_.get());
|
||||
}
|
||||
|
||||
LocalAudioTrackHandler::~LocalAudioTrackHandler() {
|
||||
}
|
||||
|
||||
void LocalAudioTrackHandler::OnStateChanged() {
|
||||
// TODO(perkj): What should happen when the state change?
|
||||
}
|
||||
|
||||
void LocalAudioTrackHandler::Stop() {
|
||||
audio_track_->RemoveSink(sink_adapter_.get());
|
||||
cricket::AudioOptions options;
|
||||
provider_->SetAudioSend(ssrc(), false, options, NULL);
|
||||
}
|
||||
|
||||
void LocalAudioTrackHandler::OnEnabledChanged() {
|
||||
cricket::AudioOptions options;
|
||||
if (audio_track_->enabled() && audio_track_->GetSource()) {
|
||||
// TODO(xians): Remove this static_cast since we should be able to connect
|
||||
// a remote audio track to peer connection.
|
||||
options = static_cast<LocalAudioSource*>(
|
||||
audio_track_->GetSource())->options();
|
||||
}
|
||||
|
||||
// Use the renderer if the audio track has one, otherwise use the sink
|
||||
// adapter owned by this class.
|
||||
cricket::AudioRenderer* renderer = audio_track_->GetRenderer() ?
|
||||
audio_track_->GetRenderer() : sink_adapter_.get();
|
||||
ASSERT(renderer != NULL);
|
||||
provider_->SetAudioSend(ssrc(), audio_track_->enabled(), options, renderer);
|
||||
}
|
||||
|
||||
RemoteAudioTrackHandler::RemoteAudioTrackHandler(
|
||||
AudioTrackInterface* track,
|
||||
uint32 ssrc,
|
||||
AudioProviderInterface* provider)
|
||||
: TrackHandler(track, ssrc),
|
||||
audio_track_(track),
|
||||
provider_(provider) {
|
||||
track->GetSource()->RegisterAudioObserver(this);
|
||||
OnEnabledChanged();
|
||||
}
|
||||
|
||||
RemoteAudioTrackHandler::~RemoteAudioTrackHandler() {
|
||||
audio_track_->GetSource()->UnregisterAudioObserver(this);
|
||||
}
|
||||
|
||||
void RemoteAudioTrackHandler::Stop() {
|
||||
provider_->SetAudioPlayout(ssrc(), false, NULL);
|
||||
}
|
||||
|
||||
void RemoteAudioTrackHandler::OnStateChanged() {
|
||||
}
|
||||
|
||||
void RemoteAudioTrackHandler::OnEnabledChanged() {
|
||||
provider_->SetAudioPlayout(ssrc(), audio_track_->enabled(),
|
||||
audio_track_->GetRenderer());
|
||||
}
|
||||
|
||||
void RemoteAudioTrackHandler::OnSetVolume(double volume) {
|
||||
// When the track is disabled, the volume of the source, which is the
|
||||
// corresponding WebRtc Voice Engine channel will be 0. So we do not allow
|
||||
// setting the volume to the source when the track is disabled.
|
||||
if (audio_track_->enabled())
|
||||
provider_->SetAudioPlayoutVolume(ssrc(), volume);
|
||||
}
|
||||
|
||||
LocalVideoTrackHandler::LocalVideoTrackHandler(
|
||||
VideoTrackInterface* track,
|
||||
uint32 ssrc,
|
||||
VideoProviderInterface* provider)
|
||||
: TrackHandler(track, ssrc),
|
||||
local_video_track_(track),
|
||||
provider_(provider) {
|
||||
VideoSourceInterface* source = local_video_track_->GetSource();
|
||||
if (source)
|
||||
provider_->SetCaptureDevice(ssrc, source->GetVideoCapturer());
|
||||
OnEnabledChanged();
|
||||
}
|
||||
|
||||
LocalVideoTrackHandler::~LocalVideoTrackHandler() {
|
||||
}
|
||||
|
||||
void LocalVideoTrackHandler::OnStateChanged() {
|
||||
}
|
||||
|
||||
void LocalVideoTrackHandler::Stop() {
|
||||
provider_->SetCaptureDevice(ssrc(), NULL);
|
||||
provider_->SetVideoSend(ssrc(), false, NULL);
|
||||
}
|
||||
|
||||
void LocalVideoTrackHandler::OnEnabledChanged() {
|
||||
const cricket::VideoOptions* options = NULL;
|
||||
VideoSourceInterface* source = local_video_track_->GetSource();
|
||||
if (local_video_track_->enabled() && source) {
|
||||
options = source->options();
|
||||
}
|
||||
provider_->SetVideoSend(ssrc(), local_video_track_->enabled(), options);
|
||||
}
|
||||
|
||||
RemoteVideoTrackHandler::RemoteVideoTrackHandler(
|
||||
VideoTrackInterface* track,
|
||||
uint32 ssrc,
|
||||
VideoProviderInterface* provider)
|
||||
: TrackHandler(track, ssrc),
|
||||
remote_video_track_(track),
|
||||
provider_(provider) {
|
||||
OnEnabledChanged();
|
||||
provider_->SetVideoPlayout(ssrc, true,
|
||||
remote_video_track_->GetSource()->FrameInput());
|
||||
}
|
||||
|
||||
RemoteVideoTrackHandler::~RemoteVideoTrackHandler() {
|
||||
}
|
||||
|
||||
void RemoteVideoTrackHandler::Stop() {
|
||||
// Since cricket::VideoRenderer is not reference counted
|
||||
// we need to remove the renderer before we are deleted.
|
||||
provider_->SetVideoPlayout(ssrc(), false, NULL);
|
||||
}
|
||||
|
||||
void RemoteVideoTrackHandler::OnStateChanged() {
|
||||
}
|
||||
|
||||
void RemoteVideoTrackHandler::OnEnabledChanged() {
|
||||
}
|
||||
|
||||
MediaStreamHandler::MediaStreamHandler(MediaStreamInterface* stream,
|
||||
AudioProviderInterface* audio_provider,
|
||||
VideoProviderInterface* video_provider)
|
||||
: stream_(stream),
|
||||
audio_provider_(audio_provider),
|
||||
video_provider_(video_provider) {
|
||||
}
|
||||
|
||||
MediaStreamHandler::~MediaStreamHandler() {
|
||||
for (TrackHandlers::iterator it = track_handlers_.begin();
|
||||
it != track_handlers_.end(); ++it) {
|
||||
delete *it;
|
||||
}
|
||||
}
|
||||
|
||||
void MediaStreamHandler::RemoveTrack(MediaStreamTrackInterface* track) {
|
||||
for (TrackHandlers::iterator it = track_handlers_.begin();
|
||||
it != track_handlers_.end(); ++it) {
|
||||
if ((*it)->track() == track) {
|
||||
TrackHandler* track = *it;
|
||||
track->Stop();
|
||||
delete track;
|
||||
track_handlers_.erase(it);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
TrackHandler* MediaStreamHandler::FindTrackHandler(
|
||||
MediaStreamTrackInterface* track) {
|
||||
TrackHandlers::iterator it = track_handlers_.begin();
|
||||
for (; it != track_handlers_.end(); ++it) {
|
||||
if ((*it)->track() == track) {
|
||||
return *it;
|
||||
break;
|
||||
}
|
||||
}
|
||||
return NULL;
|
||||
}
|
||||
|
||||
MediaStreamInterface* MediaStreamHandler::stream() {
|
||||
return stream_.get();
|
||||
}
|
||||
|
||||
void MediaStreamHandler::OnChanged() {
|
||||
}
|
||||
|
||||
void MediaStreamHandler::Stop() {
|
||||
for (TrackHandlers::const_iterator it = track_handlers_.begin();
|
||||
it != track_handlers_.end(); ++it) {
|
||||
(*it)->Stop();
|
||||
}
|
||||
}
|
||||
|
||||
LocalMediaStreamHandler::LocalMediaStreamHandler(
|
||||
MediaStreamInterface* stream,
|
||||
AudioProviderInterface* audio_provider,
|
||||
VideoProviderInterface* video_provider)
|
||||
: MediaStreamHandler(stream, audio_provider, video_provider) {
|
||||
}
|
||||
|
||||
LocalMediaStreamHandler::~LocalMediaStreamHandler() {
|
||||
}
|
||||
|
||||
void LocalMediaStreamHandler::AddAudioTrack(AudioTrackInterface* audio_track,
|
||||
uint32 ssrc) {
|
||||
ASSERT(!FindTrackHandler(audio_track));
|
||||
|
||||
TrackHandler* handler(new LocalAudioTrackHandler(audio_track, ssrc,
|
||||
audio_provider_));
|
||||
track_handlers_.push_back(handler);
|
||||
}
|
||||
|
||||
void LocalMediaStreamHandler::AddVideoTrack(VideoTrackInterface* video_track,
|
||||
uint32 ssrc) {
|
||||
ASSERT(!FindTrackHandler(video_track));
|
||||
|
||||
TrackHandler* handler(new LocalVideoTrackHandler(video_track, ssrc,
|
||||
video_provider_));
|
||||
track_handlers_.push_back(handler);
|
||||
}
|
||||
|
||||
RemoteMediaStreamHandler::RemoteMediaStreamHandler(
|
||||
MediaStreamInterface* stream,
|
||||
AudioProviderInterface* audio_provider,
|
||||
VideoProviderInterface* video_provider)
|
||||
: MediaStreamHandler(stream, audio_provider, video_provider) {
|
||||
}
|
||||
|
||||
RemoteMediaStreamHandler::~RemoteMediaStreamHandler() {
|
||||
}
|
||||
|
||||
void RemoteMediaStreamHandler::AddAudioTrack(AudioTrackInterface* audio_track,
|
||||
uint32 ssrc) {
|
||||
ASSERT(!FindTrackHandler(audio_track));
|
||||
TrackHandler* handler(
|
||||
new RemoteAudioTrackHandler(audio_track, ssrc, audio_provider_));
|
||||
track_handlers_.push_back(handler);
|
||||
}
|
||||
|
||||
void RemoteMediaStreamHandler::AddVideoTrack(VideoTrackInterface* video_track,
|
||||
uint32 ssrc) {
|
||||
ASSERT(!FindTrackHandler(video_track));
|
||||
TrackHandler* handler(
|
||||
new RemoteVideoTrackHandler(video_track, ssrc, video_provider_));
|
||||
track_handlers_.push_back(handler);
|
||||
}
|
||||
|
||||
MediaStreamHandlerContainer::MediaStreamHandlerContainer(
|
||||
AudioProviderInterface* audio_provider,
|
||||
VideoProviderInterface* video_provider)
|
||||
: audio_provider_(audio_provider),
|
||||
video_provider_(video_provider) {
|
||||
}
|
||||
|
||||
MediaStreamHandlerContainer::~MediaStreamHandlerContainer() {
|
||||
ASSERT(remote_streams_handlers_.empty());
|
||||
ASSERT(local_streams_handlers_.empty());
|
||||
}
|
||||
|
||||
void MediaStreamHandlerContainer::TearDown() {
|
||||
for (StreamHandlerList::iterator it = remote_streams_handlers_.begin();
|
||||
it != remote_streams_handlers_.end(); ++it) {
|
||||
(*it)->Stop();
|
||||
delete *it;
|
||||
}
|
||||
remote_streams_handlers_.clear();
|
||||
for (StreamHandlerList::iterator it = local_streams_handlers_.begin();
|
||||
it != local_streams_handlers_.end(); ++it) {
|
||||
(*it)->Stop();
|
||||
delete *it;
|
||||
}
|
||||
local_streams_handlers_.clear();
|
||||
}
|
||||
|
||||
void MediaStreamHandlerContainer::RemoveRemoteStream(
|
||||
MediaStreamInterface* stream) {
|
||||
DeleteStreamHandler(&remote_streams_handlers_, stream);
|
||||
}
|
||||
|
||||
void MediaStreamHandlerContainer::AddRemoteAudioTrack(
|
||||
MediaStreamInterface* stream,
|
||||
AudioTrackInterface* audio_track,
|
||||
uint32 ssrc) {
|
||||
MediaStreamHandler* handler = FindStreamHandler(remote_streams_handlers_,
|
||||
stream);
|
||||
if (handler == NULL) {
|
||||
handler = CreateRemoteStreamHandler(stream);
|
||||
}
|
||||
handler->AddAudioTrack(audio_track, ssrc);
|
||||
}
|
||||
|
||||
void MediaStreamHandlerContainer::AddRemoteVideoTrack(
|
||||
MediaStreamInterface* stream,
|
||||
VideoTrackInterface* video_track,
|
||||
uint32 ssrc) {
|
||||
MediaStreamHandler* handler = FindStreamHandler(remote_streams_handlers_,
|
||||
stream);
|
||||
if (handler == NULL) {
|
||||
handler = CreateRemoteStreamHandler(stream);
|
||||
}
|
||||
handler->AddVideoTrack(video_track, ssrc);
|
||||
}
|
||||
|
||||
void MediaStreamHandlerContainer::RemoveRemoteTrack(
|
||||
MediaStreamInterface* stream,
|
||||
MediaStreamTrackInterface* track) {
|
||||
MediaStreamHandler* handler = FindStreamHandler(remote_streams_handlers_,
|
||||
stream);
|
||||
if (!VERIFY(handler != NULL)) {
|
||||
LOG(LS_WARNING) << "Local MediaStreamHandler for stream with id "
|
||||
<< stream->label() << "doesnt't exist.";
|
||||
return;
|
||||
}
|
||||
handler->RemoveTrack(track);
|
||||
}
|
||||
|
||||
void MediaStreamHandlerContainer::RemoveLocalStream(
|
||||
MediaStreamInterface* stream) {
|
||||
DeleteStreamHandler(&local_streams_handlers_, stream);
|
||||
}
|
||||
|
||||
void MediaStreamHandlerContainer::AddLocalAudioTrack(
|
||||
MediaStreamInterface* stream,
|
||||
AudioTrackInterface* audio_track,
|
||||
uint32 ssrc) {
|
||||
MediaStreamHandler* handler = FindStreamHandler(local_streams_handlers_,
|
||||
stream);
|
||||
if (handler == NULL) {
|
||||
handler = CreateLocalStreamHandler(stream);
|
||||
}
|
||||
handler->AddAudioTrack(audio_track, ssrc);
|
||||
}
|
||||
|
||||
void MediaStreamHandlerContainer::AddLocalVideoTrack(
|
||||
MediaStreamInterface* stream,
|
||||
VideoTrackInterface* video_track,
|
||||
uint32 ssrc) {
|
||||
MediaStreamHandler* handler = FindStreamHandler(local_streams_handlers_,
|
||||
stream);
|
||||
if (handler == NULL) {
|
||||
handler = CreateLocalStreamHandler(stream);
|
||||
}
|
||||
handler->AddVideoTrack(video_track, ssrc);
|
||||
}
|
||||
|
||||
void MediaStreamHandlerContainer::RemoveLocalTrack(
|
||||
MediaStreamInterface* stream,
|
||||
MediaStreamTrackInterface* track) {
|
||||
MediaStreamHandler* handler = FindStreamHandler(local_streams_handlers_,
|
||||
stream);
|
||||
if (!VERIFY(handler != NULL)) {
|
||||
LOG(LS_WARNING) << "Remote MediaStreamHandler for stream with id "
|
||||
<< stream->label() << "doesnt't exist.";
|
||||
return;
|
||||
}
|
||||
handler->RemoveTrack(track);
|
||||
}
|
||||
|
||||
MediaStreamHandler* MediaStreamHandlerContainer::CreateRemoteStreamHandler(
|
||||
MediaStreamInterface* stream) {
|
||||
ASSERT(!FindStreamHandler(remote_streams_handlers_, stream));
|
||||
|
||||
RemoteMediaStreamHandler* handler =
|
||||
new RemoteMediaStreamHandler(stream, audio_provider_, video_provider_);
|
||||
remote_streams_handlers_.push_back(handler);
|
||||
return handler;
|
||||
}
|
||||
|
||||
MediaStreamHandler* MediaStreamHandlerContainer::CreateLocalStreamHandler(
|
||||
MediaStreamInterface* stream) {
|
||||
ASSERT(!FindStreamHandler(local_streams_handlers_, stream));
|
||||
|
||||
LocalMediaStreamHandler* handler =
|
||||
new LocalMediaStreamHandler(stream, audio_provider_, video_provider_);
|
||||
local_streams_handlers_.push_back(handler);
|
||||
return handler;
|
||||
}
|
||||
|
||||
MediaStreamHandler* MediaStreamHandlerContainer::FindStreamHandler(
|
||||
const StreamHandlerList& handlers,
|
||||
MediaStreamInterface* stream) {
|
||||
StreamHandlerList::const_iterator it = handlers.begin();
|
||||
for (; it != handlers.end(); ++it) {
|
||||
if ((*it)->stream() == stream) {
|
||||
return *it;
|
||||
}
|
||||
}
|
||||
return NULL;
|
||||
}
|
||||
|
||||
void MediaStreamHandlerContainer::DeleteStreamHandler(
|
||||
StreamHandlerList* streamhandlers, MediaStreamInterface* stream) {
|
||||
StreamHandlerList::iterator it = streamhandlers->begin();
|
||||
for (; it != streamhandlers->end(); ++it) {
|
||||
if ((*it)->stream() == stream) {
|
||||
(*it)->Stop();
|
||||
delete *it;
|
||||
streamhandlers->erase(it);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
// TODO(deadbeef): Remove this file once Chrome build files no longer reference
|
||||
// it.
|
||||
|
||||
@ -25,269 +25,5 @@
|
||||
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
// This file contains classes for listening on changes on MediaStreams and
|
||||
// MediaTracks that are connected to a certain PeerConnection.
|
||||
// Example: If a user sets a rendererer on a remote video track the renderer is
|
||||
// connected to the appropriate remote video stream.
|
||||
|
||||
#ifndef TALK_APP_WEBRTC_MEDIASTREAMHANDLER_H_
|
||||
#define TALK_APP_WEBRTC_MEDIASTREAMHANDLER_H_
|
||||
|
||||
#include <list>
|
||||
#include <vector>
|
||||
|
||||
#include "talk/app/webrtc/mediastreaminterface.h"
|
||||
#include "talk/app/webrtc/mediastreamprovider.h"
|
||||
#include "talk/app/webrtc/peerconnectioninterface.h"
|
||||
#include "talk/media/base/audiorenderer.h"
|
||||
#include "webrtc/base/thread.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// TrackHandler listen to events on a MediaStreamTrackInterface that is
|
||||
// connected to a certain PeerConnection.
|
||||
class TrackHandler : public ObserverInterface {
|
||||
public:
|
||||
TrackHandler(MediaStreamTrackInterface* track, uint32 ssrc);
|
||||
virtual ~TrackHandler();
|
||||
virtual void OnChanged();
|
||||
// Stop using |track_| on this PeerConnection.
|
||||
virtual void Stop() = 0;
|
||||
|
||||
MediaStreamTrackInterface* track() { return track_; }
|
||||
uint32 ssrc() const { return ssrc_; }
|
||||
|
||||
protected:
|
||||
virtual void OnStateChanged() = 0;
|
||||
virtual void OnEnabledChanged() = 0;
|
||||
|
||||
private:
|
||||
rtc::scoped_refptr<MediaStreamTrackInterface> track_;
|
||||
uint32 ssrc_;
|
||||
MediaStreamTrackInterface::TrackState state_;
|
||||
bool enabled_;
|
||||
};
|
||||
|
||||
// LocalAudioSinkAdapter receives data callback as a sink to the local
|
||||
// AudioTrack, and passes the data to the sink of AudioRenderer.
|
||||
class LocalAudioSinkAdapter : public AudioTrackSinkInterface,
|
||||
public cricket::AudioRenderer {
|
||||
public:
|
||||
LocalAudioSinkAdapter();
|
||||
virtual ~LocalAudioSinkAdapter();
|
||||
|
||||
private:
|
||||
// AudioSinkInterface implementation.
|
||||
void OnData(const void* audio_data,
|
||||
int bits_per_sample,
|
||||
int sample_rate,
|
||||
int number_of_channels,
|
||||
size_t number_of_frames) override;
|
||||
|
||||
// cricket::AudioRenderer implementation.
|
||||
void SetSink(cricket::AudioRenderer::Sink* sink) override;
|
||||
|
||||
cricket::AudioRenderer::Sink* sink_;
|
||||
// Critical section protecting |sink_|.
|
||||
rtc::CriticalSection lock_;
|
||||
};
|
||||
|
||||
// LocalAudioTrackHandler listen to events on a local AudioTrack instance
|
||||
// connected to a PeerConnection and orders the |provider| to executes the
|
||||
// requested change.
|
||||
class LocalAudioTrackHandler : public TrackHandler {
|
||||
public:
|
||||
LocalAudioTrackHandler(AudioTrackInterface* track,
|
||||
uint32 ssrc,
|
||||
AudioProviderInterface* provider);
|
||||
virtual ~LocalAudioTrackHandler();
|
||||
|
||||
void Stop() override;
|
||||
|
||||
protected:
|
||||
void OnStateChanged() override;
|
||||
void OnEnabledChanged() override;
|
||||
|
||||
private:
|
||||
AudioTrackInterface* audio_track_;
|
||||
AudioProviderInterface* provider_;
|
||||
|
||||
// Used to pass the data callback from the |audio_track_| to the other
|
||||
// end of cricket::AudioRenderer.
|
||||
rtc::scoped_ptr<LocalAudioSinkAdapter> sink_adapter_;
|
||||
};
|
||||
|
||||
// RemoteAudioTrackHandler listen to events on a remote AudioTrack instance
|
||||
// connected to a PeerConnection and orders the |provider| to executes the
|
||||
// requested change.
|
||||
class RemoteAudioTrackHandler : public AudioSourceInterface::AudioObserver,
|
||||
public TrackHandler {
|
||||
public:
|
||||
RemoteAudioTrackHandler(AudioTrackInterface* track,
|
||||
uint32 ssrc,
|
||||
AudioProviderInterface* provider);
|
||||
virtual ~RemoteAudioTrackHandler();
|
||||
void Stop() override;
|
||||
|
||||
protected:
|
||||
void OnStateChanged() override;
|
||||
void OnEnabledChanged() override;
|
||||
|
||||
private:
|
||||
// AudioSourceInterface::AudioObserver implementation.
|
||||
void OnSetVolume(double volume) override;
|
||||
|
||||
AudioTrackInterface* audio_track_;
|
||||
AudioProviderInterface* provider_;
|
||||
};
|
||||
|
||||
// LocalVideoTrackHandler listen to events on a local VideoTrack instance
|
||||
// connected to a PeerConnection and orders the |provider| to executes the
|
||||
// requested change.
|
||||
class LocalVideoTrackHandler : public TrackHandler {
|
||||
public:
|
||||
LocalVideoTrackHandler(VideoTrackInterface* track,
|
||||
uint32 ssrc,
|
||||
VideoProviderInterface* provider);
|
||||
virtual ~LocalVideoTrackHandler();
|
||||
void Stop() override;
|
||||
|
||||
protected:
|
||||
void OnStateChanged() override;
|
||||
void OnEnabledChanged() override;
|
||||
|
||||
private:
|
||||
VideoTrackInterface* local_video_track_;
|
||||
VideoProviderInterface* provider_;
|
||||
};
|
||||
|
||||
// RemoteVideoTrackHandler listen to events on a remote VideoTrack instance
|
||||
// connected to a PeerConnection and orders the |provider| to execute
|
||||
// requested changes.
|
||||
class RemoteVideoTrackHandler : public TrackHandler {
|
||||
public:
|
||||
RemoteVideoTrackHandler(VideoTrackInterface* track,
|
||||
uint32 ssrc,
|
||||
VideoProviderInterface* provider);
|
||||
virtual ~RemoteVideoTrackHandler();
|
||||
void Stop() override;
|
||||
|
||||
protected:
|
||||
void OnStateChanged() override;
|
||||
void OnEnabledChanged() override;
|
||||
|
||||
private:
|
||||
VideoTrackInterface* remote_video_track_;
|
||||
VideoProviderInterface* provider_;
|
||||
};
|
||||
|
||||
class MediaStreamHandler : public ObserverInterface {
|
||||
public:
|
||||
MediaStreamHandler(MediaStreamInterface* stream,
|
||||
AudioProviderInterface* audio_provider,
|
||||
VideoProviderInterface* video_provider);
|
||||
~MediaStreamHandler();
|
||||
MediaStreamInterface* stream();
|
||||
void Stop();
|
||||
|
||||
virtual void AddAudioTrack(AudioTrackInterface* audio_track, uint32 ssrc) = 0;
|
||||
virtual void AddVideoTrack(VideoTrackInterface* video_track, uint32 ssrc) = 0;
|
||||
|
||||
virtual void RemoveTrack(MediaStreamTrackInterface* track);
|
||||
void OnChanged() override;
|
||||
|
||||
protected:
|
||||
TrackHandler* FindTrackHandler(MediaStreamTrackInterface* track);
|
||||
rtc::scoped_refptr<MediaStreamInterface> stream_;
|
||||
AudioProviderInterface* audio_provider_;
|
||||
VideoProviderInterface* video_provider_;
|
||||
typedef std::vector<TrackHandler*> TrackHandlers;
|
||||
TrackHandlers track_handlers_;
|
||||
};
|
||||
|
||||
class LocalMediaStreamHandler : public MediaStreamHandler {
|
||||
public:
|
||||
LocalMediaStreamHandler(MediaStreamInterface* stream,
|
||||
AudioProviderInterface* audio_provider,
|
||||
VideoProviderInterface* video_provider);
|
||||
~LocalMediaStreamHandler();
|
||||
|
||||
void AddAudioTrack(AudioTrackInterface* audio_track, uint32 ssrc) override;
|
||||
void AddVideoTrack(VideoTrackInterface* video_track, uint32 ssrc) override;
|
||||
};
|
||||
|
||||
class RemoteMediaStreamHandler : public MediaStreamHandler {
|
||||
public:
|
||||
RemoteMediaStreamHandler(MediaStreamInterface* stream,
|
||||
AudioProviderInterface* audio_provider,
|
||||
VideoProviderInterface* video_provider);
|
||||
~RemoteMediaStreamHandler();
|
||||
void AddAudioTrack(AudioTrackInterface* audio_track, uint32 ssrc) override;
|
||||
void AddVideoTrack(VideoTrackInterface* video_track, uint32 ssrc) override;
|
||||
};
|
||||
|
||||
// Container for MediaStreamHandlers of currently known local and remote
|
||||
// MediaStreams.
|
||||
class MediaStreamHandlerContainer {
|
||||
public:
|
||||
MediaStreamHandlerContainer(AudioProviderInterface* audio_provider,
|
||||
VideoProviderInterface* video_provider);
|
||||
~MediaStreamHandlerContainer();
|
||||
|
||||
// Notify all referenced objects that MediaStreamHandlerContainer will be
|
||||
// destroyed. This method must be called prior to the dtor and prior to the
|
||||
// |audio_provider| and |video_provider| is destroyed.
|
||||
void TearDown();
|
||||
|
||||
// Remove all TrackHandlers for tracks in |stream| and make sure
|
||||
// the audio_provider and video_provider is notified that the tracks has been
|
||||
// removed.
|
||||
void RemoveRemoteStream(MediaStreamInterface* stream);
|
||||
|
||||
// Create a RemoteAudioTrackHandler and associate |audio_track| with |ssrc|.
|
||||
void AddRemoteAudioTrack(MediaStreamInterface* stream,
|
||||
AudioTrackInterface* audio_track,
|
||||
uint32 ssrc);
|
||||
// Create a RemoteVideoTrackHandler and associate |video_track| with |ssrc|.
|
||||
void AddRemoteVideoTrack(MediaStreamInterface* stream,
|
||||
VideoTrackInterface* video_track,
|
||||
uint32 ssrc);
|
||||
// Remove the TrackHandler for |track|.
|
||||
void RemoveRemoteTrack(MediaStreamInterface* stream,
|
||||
MediaStreamTrackInterface* track);
|
||||
|
||||
// Remove all TrackHandlers for tracks in |stream| and make sure
|
||||
// the audio_provider and video_provider is notified that the tracks has been
|
||||
// removed.
|
||||
void RemoveLocalStream(MediaStreamInterface* stream);
|
||||
|
||||
// Create a LocalAudioTrackHandler and associate |audio_track| with |ssrc|.
|
||||
void AddLocalAudioTrack(MediaStreamInterface* stream,
|
||||
AudioTrackInterface* audio_track,
|
||||
uint32 ssrc);
|
||||
// Create a LocalVideoTrackHandler and associate |video_track| with |ssrc|.
|
||||
void AddLocalVideoTrack(MediaStreamInterface* stream,
|
||||
VideoTrackInterface* video_track,
|
||||
uint32 ssrc);
|
||||
// Remove the TrackHandler for |track|.
|
||||
void RemoveLocalTrack(MediaStreamInterface* stream,
|
||||
MediaStreamTrackInterface* track);
|
||||
|
||||
private:
|
||||
typedef std::list<MediaStreamHandler*> StreamHandlerList;
|
||||
MediaStreamHandler* FindStreamHandler(const StreamHandlerList& handlers,
|
||||
MediaStreamInterface* stream);
|
||||
MediaStreamHandler* CreateRemoteStreamHandler(MediaStreamInterface* stream);
|
||||
MediaStreamHandler* CreateLocalStreamHandler(MediaStreamInterface* stream);
|
||||
void DeleteStreamHandler(StreamHandlerList* streamhandlers,
|
||||
MediaStreamInterface* stream);
|
||||
|
||||
StreamHandlerList local_streams_handlers_;
|
||||
StreamHandlerList remote_streams_handlers_;
|
||||
AudioProviderInterface* audio_provider_;
|
||||
VideoProviderInterface* video_provider_;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // TALK_APP_WEBRTC_MEDIASTREAMHANDLER_H_
|
||||
// TODO(deadbeef): Remove this file once Chrome build files no longer reference
|
||||
// it.
|
||||
|
||||
@ -28,6 +28,8 @@
|
||||
#ifndef TALK_APP_WEBRTC_MEDIASTREAMPROVIDER_H_
|
||||
#define TALK_APP_WEBRTC_MEDIASTREAMPROVIDER_H_
|
||||
|
||||
#include "webrtc/base/basictypes.h"
|
||||
|
||||
namespace cricket {
|
||||
|
||||
class AudioRenderer;
|
||||
@ -40,6 +42,14 @@ struct VideoOptions;
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// TODO(deadbeef): Change the key from an ssrc to a "sender_id" or
|
||||
// "receiver_id" string, which will be the MSID in the short term and MID in
|
||||
// the long term.
|
||||
|
||||
// TODO(deadbeef): These interfaces are effectively just a way for the
|
||||
// RtpSenders/Receivers to get to the BaseChannels. These interfaces should be
|
||||
// refactored away eventually, as the classes converge.
|
||||
|
||||
// This interface is called by AudioTrackHandler classes in mediastreamhandler.h
|
||||
// to change the settings of an audio track connected to certain PeerConnection.
|
||||
class AudioProviderInterface {
|
||||
|
||||
@ -739,14 +739,14 @@ void MediaStreamSignaling::MaybeCreateDefaultStream() {
|
||||
kDefaultAudioTrackLabel, 0));
|
||||
|
||||
OnRemoteTrackSeen(kDefaultStreamLabel, kDefaultAudioTrackLabel, 0,
|
||||
cricket::MEDIA_TYPE_AUDIO);
|
||||
cricket::MEDIA_TYPE_AUDIO);
|
||||
}
|
||||
if (remote_info_.default_video_track_needed &&
|
||||
default_remote_stream->GetVideoTracks().size() == 0) {
|
||||
remote_video_tracks_.push_back(TrackInfo(kDefaultStreamLabel,
|
||||
kDefaultVideoTrackLabel, 0));
|
||||
OnRemoteTrackSeen(kDefaultStreamLabel, kDefaultVideoTrackLabel, 0,
|
||||
cricket::MEDIA_TYPE_VIDEO);
|
||||
cricket::MEDIA_TYPE_VIDEO);
|
||||
}
|
||||
if (default_created) {
|
||||
stream_observer_->OnAddRemoteStream(default_remote_stream);
|
||||
@ -807,17 +807,15 @@ void MediaStreamSignaling::UpdateLocalTracks(
|
||||
track_id);
|
||||
if (!track_info) {
|
||||
current_tracks->push_back(TrackInfo(stream_label, track_id, ssrc));
|
||||
OnLocalTrackSeen(stream_label, track_id, it->first_ssrc(),
|
||||
media_type);
|
||||
OnLocalTrackSeen(stream_label, track_id, it->first_ssrc(), media_type);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void MediaStreamSignaling::OnLocalTrackSeen(
|
||||
const std::string& stream_label,
|
||||
const std::string& track_id,
|
||||
uint32 ssrc,
|
||||
cricket::MediaType media_type) {
|
||||
void MediaStreamSignaling::OnLocalTrackSeen(const std::string& stream_label,
|
||||
const std::string& track_id,
|
||||
uint32 ssrc,
|
||||
cricket::MediaType media_type) {
|
||||
MediaStreamInterface* stream = local_streams_->find(stream_label);
|
||||
if (!stream) {
|
||||
LOG(LS_WARNING) << "An unknown local MediaStream with label "
|
||||
|
||||
@ -287,10 +287,7 @@ class MediaStreamSignaling : public sigslot::has_slots<> {
|
||||
TrackInfo(const std::string& stream_label,
|
||||
const std::string track_id,
|
||||
uint32 ssrc)
|
||||
: stream_label(stream_label),
|
||||
track_id(track_id),
|
||||
ssrc(ssrc) {
|
||||
}
|
||||
: stream_label(stream_label), track_id(track_id), ssrc(ssrc) {}
|
||||
std::string stream_label;
|
||||
std::string track_id;
|
||||
uint32 ssrc;
|
||||
|
||||
@ -367,55 +367,53 @@ class MockSignalingObserver : public webrtc::MediaStreamSignalingObserver {
|
||||
|
||||
size_t NumberOfRemoteAudioTracks() { return remote_audio_tracks_.size(); }
|
||||
|
||||
void VerifyRemoteAudioTrack(const std::string& stream_label,
|
||||
const std::string& track_id,
|
||||
uint32 ssrc) {
|
||||
void VerifyRemoteAudioTrack(const std::string& stream_label,
|
||||
const std::string& track_id,
|
||||
uint32 ssrc) {
|
||||
VerifyTrack(remote_audio_tracks_, stream_label, track_id, ssrc);
|
||||
}
|
||||
|
||||
size_t NumberOfRemoteVideoTracks() { return remote_video_tracks_.size(); }
|
||||
|
||||
void VerifyRemoteVideoTrack(const std::string& stream_label,
|
||||
const std::string& track_id,
|
||||
uint32 ssrc) {
|
||||
void VerifyRemoteVideoTrack(const std::string& stream_label,
|
||||
const std::string& track_id,
|
||||
uint32 ssrc) {
|
||||
VerifyTrack(remote_video_tracks_, stream_label, track_id, ssrc);
|
||||
}
|
||||
|
||||
size_t NumberOfLocalAudioTracks() { return local_audio_tracks_.size(); }
|
||||
void VerifyLocalAudioTrack(const std::string& stream_label,
|
||||
const std::string& track_id,
|
||||
uint32 ssrc) {
|
||||
void VerifyLocalAudioTrack(const std::string& stream_label,
|
||||
const std::string& track_id,
|
||||
uint32 ssrc) {
|
||||
VerifyTrack(local_audio_tracks_, stream_label, track_id, ssrc);
|
||||
}
|
||||
|
||||
size_t NumberOfLocalVideoTracks() { return local_video_tracks_.size(); }
|
||||
|
||||
void VerifyLocalVideoTrack(const std::string& stream_label,
|
||||
const std::string& track_id,
|
||||
uint32 ssrc) {
|
||||
void VerifyLocalVideoTrack(const std::string& stream_label,
|
||||
const std::string& track_id,
|
||||
uint32 ssrc) {
|
||||
VerifyTrack(local_video_tracks_, stream_label, track_id, ssrc);
|
||||
}
|
||||
|
||||
private:
|
||||
struct TrackInfo {
|
||||
TrackInfo() {}
|
||||
TrackInfo(const std::string& stream_label, const std::string track_id,
|
||||
TrackInfo(const std::string& stream_label,
|
||||
const std::string track_id,
|
||||
uint32 ssrc)
|
||||
: stream_label(stream_label),
|
||||
track_id(track_id),
|
||||
ssrc(ssrc) {
|
||||
}
|
||||
: stream_label(stream_label), track_id(track_id), ssrc(ssrc) {}
|
||||
std::string stream_label;
|
||||
std::string track_id;
|
||||
uint32 ssrc;
|
||||
};
|
||||
typedef std::vector<TrackInfo> TrackInfos;
|
||||
|
||||
void AddTrack(TrackInfos* track_infos, MediaStreamInterface* stream,
|
||||
void AddTrack(TrackInfos* track_infos,
|
||||
MediaStreamInterface* stream,
|
||||
MediaStreamTrackInterface* track,
|
||||
uint32 ssrc) {
|
||||
(*track_infos).push_back(TrackInfo(stream->label(), track->id(),
|
||||
ssrc));
|
||||
(*track_infos).push_back(TrackInfo(stream->label(), track->id(), ssrc));
|
||||
}
|
||||
|
||||
void RemoveTrack(TrackInfos* track_infos, MediaStreamInterface* stream,
|
||||
@ -441,7 +439,6 @@ class MockSignalingObserver : public webrtc::MediaStreamSignalingObserver {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
|
||||
void VerifyTrack(const TrackInfos& track_infos,
|
||||
const std::string& stream_label,
|
||||
const std::string& track_id,
|
||||
|
||||
@ -33,7 +33,8 @@
|
||||
#include "talk/app/webrtc/jsepicecandidate.h"
|
||||
#include "talk/app/webrtc/jsepsessiondescription.h"
|
||||
#include "talk/app/webrtc/mediaconstraintsinterface.h"
|
||||
#include "talk/app/webrtc/mediastreamhandler.h"
|
||||
#include "talk/app/webrtc/rtpreceiver.h"
|
||||
#include "talk/app/webrtc/rtpsender.h"
|
||||
#include "talk/app/webrtc/streamcollection.h"
|
||||
#include "webrtc/p2p/client/basicportallocator.h"
|
||||
#include "talk/session/media/channelmanager.h"
|
||||
@ -339,10 +340,17 @@ PeerConnection::PeerConnection(PeerConnectionFactory* factory)
|
||||
|
||||
PeerConnection::~PeerConnection() {
|
||||
ASSERT(signaling_thread()->IsCurrent());
|
||||
if (mediastream_signaling_)
|
||||
if (mediastream_signaling_) {
|
||||
mediastream_signaling_->TearDown();
|
||||
if (stream_handler_container_)
|
||||
stream_handler_container_->TearDown();
|
||||
}
|
||||
// Need to detach RTP senders/receivers from WebRtcSession,
|
||||
// since it's about to be destroyed.
|
||||
for (const auto& sender : senders_) {
|
||||
sender->Stop();
|
||||
}
|
||||
for (const auto& receiver : receivers_) {
|
||||
receiver->Stop();
|
||||
}
|
||||
}
|
||||
|
||||
bool PeerConnection::Initialize(
|
||||
@ -398,8 +406,6 @@ bool PeerConnection::Initialize(
|
||||
factory_->worker_thread(),
|
||||
port_allocator_.get(),
|
||||
mediastream_signaling_.get()));
|
||||
stream_handler_container_.reset(new MediaStreamHandlerContainer(
|
||||
session_.get(), session_.get()));
|
||||
stats_.reset(new StatsCollector(session_.get()));
|
||||
|
||||
// Initialize the WebRtcSession. It creates transport channels etc.
|
||||
@ -424,6 +430,8 @@ PeerConnection::remote_streams() {
|
||||
return mediastream_signaling_->remote_streams();
|
||||
}
|
||||
|
||||
// TODO(deadbeef): Create RtpSenders immediately here, even if local
|
||||
// description hasn't yet been set.
|
||||
bool PeerConnection::AddStream(MediaStreamInterface* local_stream) {
|
||||
if (IsClosed()) {
|
||||
return false;
|
||||
@ -468,6 +476,25 @@ rtc::scoped_refptr<DtmfSenderInterface> PeerConnection::CreateDtmfSender(
|
||||
return DtmfSenderProxy::Create(signaling_thread(), sender.get());
|
||||
}
|
||||
|
||||
std::vector<rtc::scoped_refptr<RtpSenderInterface>> PeerConnection::GetSenders()
|
||||
const {
|
||||
std::vector<rtc::scoped_refptr<RtpSenderInterface>> senders;
|
||||
for (const auto& sender : senders_) {
|
||||
senders.push_back(RtpSenderProxy::Create(signaling_thread(), sender.get()));
|
||||
}
|
||||
return senders;
|
||||
}
|
||||
|
||||
std::vector<rtc::scoped_refptr<RtpReceiverInterface>>
|
||||
PeerConnection::GetReceivers() const {
|
||||
std::vector<rtc::scoped_refptr<RtpReceiverInterface>> receivers;
|
||||
for (const auto& receiver : receivers_) {
|
||||
receivers.push_back(
|
||||
RtpReceiverProxy::Create(signaling_thread(), receiver.get()));
|
||||
}
|
||||
return receivers;
|
||||
}
|
||||
|
||||
bool PeerConnection::GetStats(StatsObserver* observer,
|
||||
MediaStreamTrackInterface* track,
|
||||
StatsOutputLevel level) {
|
||||
@ -808,7 +835,6 @@ void PeerConnection::OnAddRemoteStream(MediaStreamInterface* stream) {
|
||||
}
|
||||
|
||||
void PeerConnection::OnRemoveRemoteStream(MediaStreamInterface* stream) {
|
||||
stream_handler_container_->RemoveRemoteStream(stream);
|
||||
observer_->OnRemoveStream(stream);
|
||||
}
|
||||
|
||||
@ -820,52 +846,87 @@ void PeerConnection::OnAddDataChannel(DataChannelInterface* data_channel) {
|
||||
void PeerConnection::OnAddRemoteAudioTrack(MediaStreamInterface* stream,
|
||||
AudioTrackInterface* audio_track,
|
||||
uint32 ssrc) {
|
||||
stream_handler_container_->AddRemoteAudioTrack(stream, audio_track, ssrc);
|
||||
receivers_.push_back(new AudioRtpReceiver(audio_track, ssrc, session_.get()));
|
||||
}
|
||||
|
||||
void PeerConnection::OnAddRemoteVideoTrack(MediaStreamInterface* stream,
|
||||
VideoTrackInterface* video_track,
|
||||
uint32 ssrc) {
|
||||
stream_handler_container_->AddRemoteVideoTrack(stream, video_track, ssrc);
|
||||
receivers_.push_back(new VideoRtpReceiver(video_track, ssrc, session_.get()));
|
||||
}
|
||||
|
||||
// TODO(deadbeef): Keep RtpReceivers around even if track goes away in remote
|
||||
// description.
|
||||
void PeerConnection::OnRemoveRemoteAudioTrack(
|
||||
MediaStreamInterface* stream,
|
||||
AudioTrackInterface* audio_track) {
|
||||
stream_handler_container_->RemoveRemoteTrack(stream, audio_track);
|
||||
auto it = FindReceiverForTrack(audio_track);
|
||||
if (it == receivers_.end()) {
|
||||
LOG(LS_WARNING) << "RtpReceiver for track with id " << audio_track->id()
|
||||
<< " doesn't exist.";
|
||||
} else {
|
||||
(*it)->Stop();
|
||||
receivers_.erase(it);
|
||||
}
|
||||
}
|
||||
|
||||
void PeerConnection::OnRemoveRemoteVideoTrack(
|
||||
MediaStreamInterface* stream,
|
||||
VideoTrackInterface* video_track) {
|
||||
stream_handler_container_->RemoveRemoteTrack(stream, video_track);
|
||||
auto it = FindReceiverForTrack(video_track);
|
||||
if (it == receivers_.end()) {
|
||||
LOG(LS_WARNING) << "RtpReceiver for track with id " << video_track->id()
|
||||
<< " doesn't exist.";
|
||||
} else {
|
||||
(*it)->Stop();
|
||||
receivers_.erase(it);
|
||||
}
|
||||
}
|
||||
|
||||
void PeerConnection::OnAddLocalAudioTrack(MediaStreamInterface* stream,
|
||||
AudioTrackInterface* audio_track,
|
||||
uint32 ssrc) {
|
||||
stream_handler_container_->AddLocalAudioTrack(stream, audio_track, ssrc);
|
||||
senders_.push_back(new AudioRtpSender(audio_track, ssrc, session_.get()));
|
||||
stats_->AddLocalAudioTrack(audio_track, ssrc);
|
||||
}
|
||||
|
||||
void PeerConnection::OnAddLocalVideoTrack(MediaStreamInterface* stream,
|
||||
VideoTrackInterface* video_track,
|
||||
uint32 ssrc) {
|
||||
stream_handler_container_->AddLocalVideoTrack(stream, video_track, ssrc);
|
||||
senders_.push_back(new VideoRtpSender(video_track, ssrc, session_.get()));
|
||||
}
|
||||
|
||||
// TODO(deadbeef): Keep RtpSenders around even if track goes away in local
|
||||
// description.
|
||||
void PeerConnection::OnRemoveLocalAudioTrack(MediaStreamInterface* stream,
|
||||
AudioTrackInterface* audio_track,
|
||||
uint32 ssrc) {
|
||||
stream_handler_container_->RemoveLocalTrack(stream, audio_track);
|
||||
auto it = FindSenderForTrack(audio_track);
|
||||
if (it == senders_.end()) {
|
||||
LOG(LS_WARNING) << "RtpSender for track with id " << audio_track->id()
|
||||
<< " doesn't exist.";
|
||||
return;
|
||||
} else {
|
||||
(*it)->Stop();
|
||||
senders_.erase(it);
|
||||
}
|
||||
stats_->RemoveLocalAudioTrack(audio_track, ssrc);
|
||||
}
|
||||
|
||||
void PeerConnection::OnRemoveLocalVideoTrack(MediaStreamInterface* stream,
|
||||
VideoTrackInterface* video_track) {
|
||||
stream_handler_container_->RemoveLocalTrack(stream, video_track);
|
||||
auto it = FindSenderForTrack(video_track);
|
||||
if (it == senders_.end()) {
|
||||
LOG(LS_WARNING) << "RtpSender for track with id " << video_track->id()
|
||||
<< " doesn't exist.";
|
||||
return;
|
||||
} else {
|
||||
(*it)->Stop();
|
||||
senders_.erase(it);
|
||||
}
|
||||
}
|
||||
|
||||
void PeerConnection::OnRemoveLocalStream(MediaStreamInterface* stream) {
|
||||
stream_handler_container_->RemoveLocalStream(stream);
|
||||
}
|
||||
|
||||
void PeerConnection::OnIceConnectionChange(
|
||||
@ -920,4 +981,22 @@ void PeerConnection::ChangeSignalingState(
|
||||
observer_->OnStateChange(PeerConnectionObserver::kSignalingState);
|
||||
}
|
||||
|
||||
std::vector<rtc::scoped_refptr<RtpSenderInterface>>::iterator
|
||||
PeerConnection::FindSenderForTrack(MediaStreamTrackInterface* track) {
|
||||
return std::find_if(
|
||||
senders_.begin(), senders_.end(),
|
||||
[track](const rtc::scoped_refptr<RtpSenderInterface>& sender) {
|
||||
return sender->track() == track;
|
||||
});
|
||||
}
|
||||
|
||||
std::vector<rtc::scoped_refptr<RtpReceiverInterface>>::iterator
|
||||
PeerConnection::FindReceiverForTrack(MediaStreamTrackInterface* track) {
|
||||
return std::find_if(
|
||||
receivers_.begin(), receivers_.end(),
|
||||
[track](const rtc::scoped_refptr<RtpReceiverInterface>& receiver) {
|
||||
return receiver->track() == track;
|
||||
});
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
@ -34,20 +34,21 @@
|
||||
#include "talk/app/webrtc/mediastreamsignaling.h"
|
||||
#include "talk/app/webrtc/peerconnectionfactory.h"
|
||||
#include "talk/app/webrtc/peerconnectioninterface.h"
|
||||
#include "talk/app/webrtc/rtpreceiverinterface.h"
|
||||
#include "talk/app/webrtc/rtpsenderinterface.h"
|
||||
#include "talk/app/webrtc/statscollector.h"
|
||||
#include "talk/app/webrtc/streamcollection.h"
|
||||
#include "talk/app/webrtc/webrtcsession.h"
|
||||
#include "webrtc/base/scoped_ptr.h"
|
||||
|
||||
namespace webrtc {
|
||||
class MediaStreamHandlerContainer;
|
||||
|
||||
typedef std::vector<PortAllocatorFactoryInterface::StunConfiguration>
|
||||
StunConfigurations;
|
||||
typedef std::vector<PortAllocatorFactoryInterface::TurnConfiguration>
|
||||
TurnConfigurations;
|
||||
|
||||
// PeerConnectionImpl implements the PeerConnection interface.
|
||||
// PeerConnection implements the PeerConnectionInterface interface.
|
||||
// It uses MediaStreamSignaling and WebRtcSession to implement
|
||||
// the PeerConnection functionality.
|
||||
class PeerConnection : public PeerConnectionInterface,
|
||||
@ -72,6 +73,11 @@ class PeerConnection : public PeerConnectionInterface,
|
||||
virtual rtc::scoped_refptr<DtmfSenderInterface> CreateDtmfSender(
|
||||
AudioTrackInterface* track);
|
||||
|
||||
std::vector<rtc::scoped_refptr<RtpSenderInterface>> GetSenders()
|
||||
const override;
|
||||
std::vector<rtc::scoped_refptr<RtpReceiverInterface>> GetReceivers()
|
||||
const override;
|
||||
|
||||
virtual rtc::scoped_refptr<DataChannelInterface> CreateDataChannel(
|
||||
const std::string& label,
|
||||
const DataChannelInit* config);
|
||||
@ -168,6 +174,11 @@ class PeerConnection : public PeerConnectionInterface,
|
||||
return signaling_state_ == PeerConnectionInterface::kClosed;
|
||||
}
|
||||
|
||||
std::vector<rtc::scoped_refptr<RtpSenderInterface>>::iterator
|
||||
FindSenderForTrack(MediaStreamTrackInterface* track);
|
||||
std::vector<rtc::scoped_refptr<RtpReceiverInterface>>::iterator
|
||||
FindReceiverForTrack(MediaStreamTrackInterface* track);
|
||||
|
||||
// Storing the factory as a scoped reference pointer ensures that the memory
|
||||
// in the PeerConnectionFactoryImpl remains available as long as the
|
||||
// PeerConnection is running. It is passed to PeerConnection as a raw pointer.
|
||||
@ -186,8 +197,10 @@ class PeerConnection : public PeerConnectionInterface,
|
||||
rtc::scoped_ptr<cricket::PortAllocator> port_allocator_;
|
||||
rtc::scoped_ptr<WebRtcSession> session_;
|
||||
rtc::scoped_ptr<MediaStreamSignaling> mediastream_signaling_;
|
||||
rtc::scoped_ptr<MediaStreamHandlerContainer> stream_handler_container_;
|
||||
rtc::scoped_ptr<StatsCollector> stats_;
|
||||
|
||||
std::vector<rtc::scoped_refptr<RtpSenderInterface>> senders_;
|
||||
std::vector<rtc::scoped_refptr<RtpReceiverInterface>> receivers_;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
@ -77,6 +77,8 @@
|
||||
#include "talk/app/webrtc/dtlsidentitystore.h"
|
||||
#include "talk/app/webrtc/jsep.h"
|
||||
#include "talk/app/webrtc/mediastreaminterface.h"
|
||||
#include "talk/app/webrtc/rtpreceiverinterface.h"
|
||||
#include "talk/app/webrtc/rtpsenderinterface.h"
|
||||
#include "talk/app/webrtc/statstypes.h"
|
||||
#include "talk/app/webrtc/umametrics.h"
|
||||
#include "webrtc/base/fileutils.h"
|
||||
@ -329,6 +331,17 @@ class PeerConnectionInterface : public rtc::RefCountInterface {
|
||||
virtual rtc::scoped_refptr<DtmfSenderInterface> CreateDtmfSender(
|
||||
AudioTrackInterface* track) = 0;
|
||||
|
||||
// TODO(deadbeef): Make these pure virtual once all subclasses implement them.
|
||||
virtual std::vector<rtc::scoped_refptr<RtpSenderInterface>> GetSenders()
|
||||
const {
|
||||
return std::vector<rtc::scoped_refptr<RtpSenderInterface>>();
|
||||
}
|
||||
|
||||
virtual std::vector<rtc::scoped_refptr<RtpReceiverInterface>> GetReceivers()
|
||||
const {
|
||||
return std::vector<rtc::scoped_refptr<RtpReceiverInterface>>();
|
||||
}
|
||||
|
||||
virtual bool GetStats(StatsObserver* observer,
|
||||
MediaStreamTrackInterface* track,
|
||||
StatsOutputLevel level) = 0;
|
||||
|
||||
@ -43,6 +43,10 @@ BEGIN_PROXY_MAP(PeerConnection)
|
||||
PROXY_METHOD1(void, RemoveStream, MediaStreamInterface*)
|
||||
PROXY_METHOD1(rtc::scoped_refptr<DtmfSenderInterface>,
|
||||
CreateDtmfSender, AudioTrackInterface*)
|
||||
PROXY_CONSTMETHOD0(std::vector<rtc::scoped_refptr<RtpSenderInterface>>,
|
||||
GetSenders)
|
||||
PROXY_CONSTMETHOD0(std::vector<rtc::scoped_refptr<RtpReceiverInterface>>,
|
||||
GetReceivers)
|
||||
PROXY_METHOD3(bool, GetStats, StatsObserver*,
|
||||
MediaStreamTrackInterface*,
|
||||
StatsOutputLevel)
|
||||
|
||||
@ -27,4 +27,80 @@
|
||||
|
||||
#include "talk/app/webrtc/rtpreceiver.h"
|
||||
|
||||
// This file is currently stubbed so that Chromium's build files can be updated.
|
||||
#include "talk/app/webrtc/videosourceinterface.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
AudioRtpReceiver::AudioRtpReceiver(AudioTrackInterface* track,
|
||||
uint32 ssrc,
|
||||
AudioProviderInterface* provider)
|
||||
: id_(track->id()),
|
||||
track_(track),
|
||||
ssrc_(ssrc),
|
||||
provider_(provider),
|
||||
cached_track_enabled_(track->enabled()) {
|
||||
track_->RegisterObserver(this);
|
||||
track_->GetSource()->RegisterAudioObserver(this);
|
||||
Reconfigure();
|
||||
}
|
||||
|
||||
AudioRtpReceiver::~AudioRtpReceiver() {
|
||||
track_->GetSource()->UnregisterAudioObserver(this);
|
||||
track_->UnregisterObserver(this);
|
||||
Stop();
|
||||
}
|
||||
|
||||
void AudioRtpReceiver::OnChanged() {
|
||||
if (cached_track_enabled_ != track_->enabled()) {
|
||||
cached_track_enabled_ = track_->enabled();
|
||||
Reconfigure();
|
||||
}
|
||||
}
|
||||
|
||||
void AudioRtpReceiver::OnSetVolume(double volume) {
|
||||
// When the track is disabled, the volume of the source, which is the
|
||||
// corresponding WebRtc Voice Engine channel will be 0. So we do not allow
|
||||
// setting the volume to the source when the track is disabled.
|
||||
if (provider_ && track_->enabled())
|
||||
provider_->SetAudioPlayoutVolume(ssrc_, volume);
|
||||
}
|
||||
|
||||
void AudioRtpReceiver::Stop() {
|
||||
// TODO(deadbeef): Need to do more here to fully stop receiving packets.
|
||||
if (!provider_) {
|
||||
return;
|
||||
}
|
||||
provider_->SetAudioPlayout(ssrc_, false, nullptr);
|
||||
provider_ = nullptr;
|
||||
}
|
||||
|
||||
void AudioRtpReceiver::Reconfigure() {
|
||||
if (!provider_) {
|
||||
return;
|
||||
}
|
||||
provider_->SetAudioPlayout(ssrc_, track_->enabled(), track_->GetRenderer());
|
||||
}
|
||||
|
||||
VideoRtpReceiver::VideoRtpReceiver(VideoTrackInterface* track,
|
||||
uint32 ssrc,
|
||||
VideoProviderInterface* provider)
|
||||
: id_(track->id()), track_(track), ssrc_(ssrc), provider_(provider) {
|
||||
provider_->SetVideoPlayout(ssrc_, true, track_->GetSource()->FrameInput());
|
||||
}
|
||||
|
||||
VideoRtpReceiver::~VideoRtpReceiver() {
|
||||
// Since cricket::VideoRenderer is not reference counted,
|
||||
// we need to remove it from the provider before we are deleted.
|
||||
Stop();
|
||||
}
|
||||
|
||||
void VideoRtpReceiver::Stop() {
|
||||
// TODO(deadbeef): Need to do more here to fully stop receiving packets.
|
||||
if (!provider_) {
|
||||
return;
|
||||
}
|
||||
provider_->SetVideoPlayout(ssrc_, false, nullptr);
|
||||
provider_ = nullptr;
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
@ -25,4 +25,80 @@
|
||||
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
// This file is currently stubbed so that Chromium's build files can be updated.
|
||||
// This file contains classes that implement RtpReceiverInterface.
|
||||
// An RtpReceiver associates a MediaStreamTrackInterface with an underlying
|
||||
// transport (provided by AudioProviderInterface/VideoProviderInterface)
|
||||
|
||||
#ifndef TALK_APP_WEBRTC_RTPRECEIVER_H_
|
||||
#define TALK_APP_WEBRTC_RTPRECEIVER_H_
|
||||
|
||||
#include <string>
|
||||
|
||||
#include "talk/app/webrtc/mediastreamprovider.h"
|
||||
#include "talk/app/webrtc/rtpreceiverinterface.h"
|
||||
#include "webrtc/base/basictypes.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
class AudioRtpReceiver : public ObserverInterface,
|
||||
public AudioSourceInterface::AudioObserver,
|
||||
public rtc::RefCountedObject<RtpReceiverInterface> {
|
||||
public:
|
||||
AudioRtpReceiver(AudioTrackInterface* track,
|
||||
uint32 ssrc,
|
||||
AudioProviderInterface* provider);
|
||||
|
||||
virtual ~AudioRtpReceiver();
|
||||
|
||||
// ObserverInterface implementation
|
||||
void OnChanged() override;
|
||||
|
||||
// AudioSourceInterface::AudioObserver implementation
|
||||
void OnSetVolume(double volume) override;
|
||||
|
||||
// RtpReceiverInterface implementation
|
||||
rtc::scoped_refptr<MediaStreamTrackInterface> track() const override {
|
||||
return track_.get();
|
||||
}
|
||||
|
||||
std::string id() const override { return id_; }
|
||||
|
||||
void Stop() override;
|
||||
|
||||
private:
|
||||
void Reconfigure();
|
||||
|
||||
std::string id_;
|
||||
rtc::scoped_refptr<AudioTrackInterface> track_;
|
||||
uint32 ssrc_;
|
||||
AudioProviderInterface* provider_;
|
||||
bool cached_track_enabled_;
|
||||
};
|
||||
|
||||
class VideoRtpReceiver : public rtc::RefCountedObject<RtpReceiverInterface> {
|
||||
public:
|
||||
VideoRtpReceiver(VideoTrackInterface* track,
|
||||
uint32 ssrc,
|
||||
VideoProviderInterface* provider);
|
||||
|
||||
virtual ~VideoRtpReceiver();
|
||||
|
||||
// RtpReceiverInterface implementation
|
||||
rtc::scoped_refptr<MediaStreamTrackInterface> track() const override {
|
||||
return track_.get();
|
||||
}
|
||||
|
||||
std::string id() const override { return id_; }
|
||||
|
||||
void Stop() override;
|
||||
|
||||
private:
|
||||
std::string id_;
|
||||
rtc::scoped_refptr<VideoTrackInterface> track_;
|
||||
uint32 ssrc_;
|
||||
VideoProviderInterface* provider_;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // TALK_APP_WEBRTC_RTPRECEIVER_H_
|
||||
|
||||
@ -25,4 +25,42 @@
|
||||
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
// This file is currently stubbed so that Chromium's build files can be updated.
|
||||
// This file contains interfaces for RtpReceivers
|
||||
// http://w3c.github.io/webrtc-pc/#rtcrtpreceiver-interface
|
||||
|
||||
#ifndef TALK_APP_WEBRTC_RTPRECEIVERINTERFACE_H_
|
||||
#define TALK_APP_WEBRTC_RTPRECEIVERINTERFACE_H_
|
||||
|
||||
#include <string>
|
||||
|
||||
#include "talk/app/webrtc/proxy.h"
|
||||
#include "talk/app/webrtc/mediastreaminterface.h"
|
||||
#include "webrtc/base/refcount.h"
|
||||
#include "webrtc/base/scoped_ref_ptr.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
class RtpReceiverInterface : public rtc::RefCountInterface {
|
||||
public:
|
||||
virtual rtc::scoped_refptr<MediaStreamTrackInterface> track() const = 0;
|
||||
|
||||
// Not to be confused with "mid", this is a field we can temporarily use
|
||||
// to uniquely identify a receiver until we implement Unified Plan SDP.
|
||||
virtual std::string id() const = 0;
|
||||
|
||||
virtual void Stop() = 0;
|
||||
|
||||
protected:
|
||||
virtual ~RtpReceiverInterface() {}
|
||||
};
|
||||
|
||||
// Define proxy for RtpReceiverInterface.
|
||||
BEGIN_PROXY_MAP(RtpReceiver)
|
||||
PROXY_CONSTMETHOD0(rtc::scoped_refptr<MediaStreamTrackInterface>, track)
|
||||
PROXY_CONSTMETHOD0(std::string, id)
|
||||
PROXY_METHOD0(void, Stop)
|
||||
END_PROXY()
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // TALK_APP_WEBRTC_RTPRECEIVERINTERFACE_H_
|
||||
|
||||
@ -27,4 +27,181 @@
|
||||
|
||||
#include "talk/app/webrtc/rtpsender.h"
|
||||
|
||||
// This file is currently stubbed so that Chromium's build files can be updated.
|
||||
#include "talk/app/webrtc/localaudiosource.h"
|
||||
#include "talk/app/webrtc/videosourceinterface.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
LocalAudioSinkAdapter::LocalAudioSinkAdapter() : sink_(nullptr) {}
|
||||
|
||||
LocalAudioSinkAdapter::~LocalAudioSinkAdapter() {
|
||||
rtc::CritScope lock(&lock_);
|
||||
if (sink_)
|
||||
sink_->OnClose();
|
||||
}
|
||||
|
||||
void LocalAudioSinkAdapter::OnData(const void* audio_data,
|
||||
int bits_per_sample,
|
||||
int sample_rate,
|
||||
int number_of_channels,
|
||||
size_t number_of_frames) {
|
||||
rtc::CritScope lock(&lock_);
|
||||
if (sink_) {
|
||||
sink_->OnData(audio_data, bits_per_sample, sample_rate, number_of_channels,
|
||||
number_of_frames);
|
||||
}
|
||||
}
|
||||
|
||||
void LocalAudioSinkAdapter::SetSink(cricket::AudioRenderer::Sink* sink) {
|
||||
rtc::CritScope lock(&lock_);
|
||||
ASSERT(!sink || !sink_);
|
||||
sink_ = sink;
|
||||
}
|
||||
|
||||
AudioRtpSender::AudioRtpSender(AudioTrackInterface* track,
|
||||
uint32 ssrc,
|
||||
AudioProviderInterface* provider)
|
||||
: id_(track->id()),
|
||||
track_(track),
|
||||
ssrc_(ssrc),
|
||||
provider_(provider),
|
||||
cached_track_enabled_(track->enabled()),
|
||||
sink_adapter_(new LocalAudioSinkAdapter()) {
|
||||
track_->RegisterObserver(this);
|
||||
track_->AddSink(sink_adapter_.get());
|
||||
Reconfigure();
|
||||
}
|
||||
|
||||
AudioRtpSender::~AudioRtpSender() {
|
||||
track_->RemoveSink(sink_adapter_.get());
|
||||
track_->UnregisterObserver(this);
|
||||
Stop();
|
||||
}
|
||||
|
||||
void AudioRtpSender::OnChanged() {
|
||||
if (cached_track_enabled_ != track_->enabled()) {
|
||||
cached_track_enabled_ = track_->enabled();
|
||||
Reconfigure();
|
||||
}
|
||||
}
|
||||
|
||||
bool AudioRtpSender::SetTrack(MediaStreamTrackInterface* track) {
|
||||
if (track->kind() != "audio") {
|
||||
LOG(LS_ERROR) << "SetTrack called on audio RtpSender with " << track->kind()
|
||||
<< " track.";
|
||||
return false;
|
||||
}
|
||||
AudioTrackInterface* audio_track = static_cast<AudioTrackInterface*>(track);
|
||||
|
||||
// Detach from old track.
|
||||
track_->RemoveSink(sink_adapter_.get());
|
||||
track_->UnregisterObserver(this);
|
||||
|
||||
// Attach to new track.
|
||||
track_ = audio_track;
|
||||
cached_track_enabled_ = track_->enabled();
|
||||
track_->RegisterObserver(this);
|
||||
track_->AddSink(sink_adapter_.get());
|
||||
Reconfigure();
|
||||
return true;
|
||||
}
|
||||
|
||||
void AudioRtpSender::Stop() {
|
||||
// TODO(deadbeef): Need to do more here to fully stop sending packets.
|
||||
if (!provider_) {
|
||||
return;
|
||||
}
|
||||
cricket::AudioOptions options;
|
||||
provider_->SetAudioSend(ssrc_, false, options, nullptr);
|
||||
provider_ = nullptr;
|
||||
}
|
||||
|
||||
void AudioRtpSender::Reconfigure() {
|
||||
if (!provider_) {
|
||||
return;
|
||||
}
|
||||
cricket::AudioOptions options;
|
||||
if (track_->enabled() && track_->GetSource()) {
|
||||
// TODO(xians): Remove this static_cast since we should be able to connect
|
||||
// a remote audio track to peer connection.
|
||||
options = static_cast<LocalAudioSource*>(track_->GetSource())->options();
|
||||
}
|
||||
|
||||
// Use the renderer if the audio track has one, otherwise use the sink
|
||||
// adapter owned by this class.
|
||||
cricket::AudioRenderer* renderer =
|
||||
track_->GetRenderer() ? track_->GetRenderer() : sink_adapter_.get();
|
||||
ASSERT(renderer != nullptr);
|
||||
provider_->SetAudioSend(ssrc_, track_->enabled(), options, renderer);
|
||||
}
|
||||
|
||||
VideoRtpSender::VideoRtpSender(VideoTrackInterface* track,
|
||||
uint32 ssrc,
|
||||
VideoProviderInterface* provider)
|
||||
: id_(track->id()),
|
||||
track_(track),
|
||||
ssrc_(ssrc),
|
||||
provider_(provider),
|
||||
cached_track_enabled_(track->enabled()) {
|
||||
track_->RegisterObserver(this);
|
||||
VideoSourceInterface* source = track_->GetSource();
|
||||
if (source) {
|
||||
provider_->SetCaptureDevice(ssrc_, source->GetVideoCapturer());
|
||||
}
|
||||
Reconfigure();
|
||||
}
|
||||
|
||||
VideoRtpSender::~VideoRtpSender() {
|
||||
track_->UnregisterObserver(this);
|
||||
Stop();
|
||||
}
|
||||
|
||||
void VideoRtpSender::OnChanged() {
|
||||
if (cached_track_enabled_ != track_->enabled()) {
|
||||
cached_track_enabled_ = track_->enabled();
|
||||
Reconfigure();
|
||||
}
|
||||
}
|
||||
|
||||
bool VideoRtpSender::SetTrack(MediaStreamTrackInterface* track) {
|
||||
if (track->kind() != "video") {
|
||||
LOG(LS_ERROR) << "SetTrack called on video RtpSender with " << track->kind()
|
||||
<< " track.";
|
||||
return false;
|
||||
}
|
||||
VideoTrackInterface* video_track = static_cast<VideoTrackInterface*>(track);
|
||||
|
||||
// Detach from old track.
|
||||
track_->UnregisterObserver(this);
|
||||
|
||||
// Attach to new track.
|
||||
track_ = video_track;
|
||||
cached_track_enabled_ = track_->enabled();
|
||||
track_->RegisterObserver(this);
|
||||
Reconfigure();
|
||||
return true;
|
||||
}
|
||||
|
||||
void VideoRtpSender::Stop() {
|
||||
// TODO(deadbeef): Need to do more here to fully stop sending packets.
|
||||
if (!provider_) {
|
||||
return;
|
||||
}
|
||||
provider_->SetCaptureDevice(ssrc_, nullptr);
|
||||
provider_->SetVideoSend(ssrc_, false, nullptr);
|
||||
provider_ = nullptr;
|
||||
}
|
||||
|
||||
void VideoRtpSender::Reconfigure() {
|
||||
if (!provider_) {
|
||||
return;
|
||||
}
|
||||
const cricket::VideoOptions* options = nullptr;
|
||||
VideoSourceInterface* source = track_->GetSource();
|
||||
if (track_->enabled() && source) {
|
||||
options = source->options();
|
||||
}
|
||||
provider_->SetVideoSend(ssrc_, track_->enabled(), options);
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
@ -25,4 +25,116 @@
|
||||
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
// This file is currently stubbed so that Chromium's build files can be updated.
|
||||
// This file contains classes that implement RtpSenderInterface.
|
||||
// An RtpSender associates a MediaStreamTrackInterface with an underlying
|
||||
// transport (provided by AudioProviderInterface/VideoProviderInterface)
|
||||
|
||||
#ifndef TALK_APP_WEBRTC_RTPSENDER_H_
|
||||
#define TALK_APP_WEBRTC_RTPSENDER_H_
|
||||
|
||||
#include <string>
|
||||
|
||||
#include "talk/app/webrtc/mediastreamprovider.h"
|
||||
#include "talk/app/webrtc/rtpsenderinterface.h"
|
||||
#include "talk/media/base/audiorenderer.h"
|
||||
#include "webrtc/base/basictypes.h"
|
||||
#include "webrtc/base/criticalsection.h"
|
||||
#include "webrtc/base/scoped_ptr.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// LocalAudioSinkAdapter receives data callback as a sink to the local
|
||||
// AudioTrack, and passes the data to the sink of AudioRenderer.
|
||||
class LocalAudioSinkAdapter : public AudioTrackSinkInterface,
|
||||
public cricket::AudioRenderer {
|
||||
public:
|
||||
LocalAudioSinkAdapter();
|
||||
virtual ~LocalAudioSinkAdapter();
|
||||
|
||||
private:
|
||||
// AudioSinkInterface implementation.
|
||||
void OnData(const void* audio_data,
|
||||
int bits_per_sample,
|
||||
int sample_rate,
|
||||
int number_of_channels,
|
||||
size_t number_of_frames) override;
|
||||
|
||||
// cricket::AudioRenderer implementation.
|
||||
void SetSink(cricket::AudioRenderer::Sink* sink) override;
|
||||
|
||||
cricket::AudioRenderer::Sink* sink_;
|
||||
// Critical section protecting |sink_|.
|
||||
rtc::CriticalSection lock_;
|
||||
};
|
||||
|
||||
class AudioRtpSender : public ObserverInterface,
|
||||
public rtc::RefCountedObject<RtpSenderInterface> {
|
||||
public:
|
||||
AudioRtpSender(AudioTrackInterface* track,
|
||||
uint32 ssrc,
|
||||
AudioProviderInterface* provider);
|
||||
|
||||
virtual ~AudioRtpSender();
|
||||
|
||||
// ObserverInterface implementation
|
||||
void OnChanged() override;
|
||||
|
||||
// RtpSenderInterface implementation
|
||||
bool SetTrack(MediaStreamTrackInterface* track) override;
|
||||
rtc::scoped_refptr<MediaStreamTrackInterface> track() const override {
|
||||
return track_.get();
|
||||
}
|
||||
|
||||
std::string id() const override { return id_; }
|
||||
|
||||
void Stop() override;
|
||||
|
||||
private:
|
||||
void Reconfigure();
|
||||
|
||||
std::string id_;
|
||||
rtc::scoped_refptr<AudioTrackInterface> track_;
|
||||
uint32 ssrc_;
|
||||
AudioProviderInterface* provider_;
|
||||
bool cached_track_enabled_;
|
||||
|
||||
// Used to pass the data callback from the |track_| to the other end of
|
||||
// cricket::AudioRenderer.
|
||||
rtc::scoped_ptr<LocalAudioSinkAdapter> sink_adapter_;
|
||||
};
|
||||
|
||||
class VideoRtpSender : public ObserverInterface,
|
||||
public rtc::RefCountedObject<RtpSenderInterface> {
|
||||
public:
|
||||
VideoRtpSender(VideoTrackInterface* track,
|
||||
uint32 ssrc,
|
||||
VideoProviderInterface* provider);
|
||||
|
||||
virtual ~VideoRtpSender();
|
||||
|
||||
// ObserverInterface implementation
|
||||
void OnChanged() override;
|
||||
|
||||
// RtpSenderInterface implementation
|
||||
bool SetTrack(MediaStreamTrackInterface* track) override;
|
||||
rtc::scoped_refptr<MediaStreamTrackInterface> track() const override {
|
||||
return track_.get();
|
||||
}
|
||||
|
||||
std::string id() const override { return id_; }
|
||||
|
||||
void Stop() override;
|
||||
|
||||
private:
|
||||
void Reconfigure();
|
||||
|
||||
std::string id_;
|
||||
rtc::scoped_refptr<VideoTrackInterface> track_;
|
||||
uint32 ssrc_;
|
||||
VideoProviderInterface* provider_;
|
||||
bool cached_track_enabled_;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // TALK_APP_WEBRTC_RTPSENDER_H_
|
||||
|
||||
@ -25,4 +25,46 @@
|
||||
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
// This file is currently stubbed so that Chromium's build files can be updated.
|
||||
// This file contains interfaces for RtpSenders
|
||||
// http://w3c.github.io/webrtc-pc/#rtcrtpsender-interface
|
||||
|
||||
#ifndef TALK_APP_WEBRTC_RTPSENDERINTERFACE_H_
|
||||
#define TALK_APP_WEBRTC_RTPSENDERINTERFACE_H_
|
||||
|
||||
#include <string>
|
||||
|
||||
#include "talk/app/webrtc/proxy.h"
|
||||
#include "talk/app/webrtc/mediastreaminterface.h"
|
||||
#include "webrtc/base/refcount.h"
|
||||
#include "webrtc/base/scoped_ref_ptr.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
class RtpSenderInterface : public rtc::RefCountInterface {
|
||||
public:
|
||||
// Returns true if successful in setting the track.
|
||||
// Fails if an audio track is set on a video RtpSender, or vice-versa.
|
||||
virtual bool SetTrack(MediaStreamTrackInterface* track) = 0;
|
||||
virtual rtc::scoped_refptr<MediaStreamTrackInterface> track() const = 0;
|
||||
|
||||
// Not to be confused with "mid", this is a field we can temporarily use
|
||||
// to uniquely identify a receiver until we implement Unified Plan SDP.
|
||||
virtual std::string id() const = 0;
|
||||
|
||||
virtual void Stop() = 0;
|
||||
|
||||
protected:
|
||||
virtual ~RtpSenderInterface() {}
|
||||
};
|
||||
|
||||
// Define proxy for RtpSenderInterface.
|
||||
BEGIN_PROXY_MAP(RtpSender)
|
||||
PROXY_METHOD1(bool, SetTrack, MediaStreamTrackInterface*)
|
||||
PROXY_CONSTMETHOD0(rtc::scoped_refptr<MediaStreamTrackInterface>, track)
|
||||
PROXY_CONSTMETHOD0(std::string, id)
|
||||
PROXY_METHOD0(void, Stop)
|
||||
END_PROXY()
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // TALK_APP_WEBRTC_RTPSENDERINTERFACE_H_
|
||||
|
||||
@ -25,13 +25,13 @@
|
||||
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
#include "talk/app/webrtc/mediastreamhandler.h"
|
||||
|
||||
#include <string>
|
||||
|
||||
#include "talk/app/webrtc/audiotrack.h"
|
||||
#include "talk/app/webrtc/mediastream.h"
|
||||
#include "talk/app/webrtc/remoteaudiosource.h"
|
||||
#include "talk/app/webrtc/rtpreceiver.h"
|
||||
#include "talk/app/webrtc/rtpsender.h"
|
||||
#include "talk/app/webrtc/streamcollection.h"
|
||||
#include "talk/app/webrtc/videosource.h"
|
||||
#include "talk/app/webrtc/videotrack.h"
|
||||
@ -52,29 +52,36 @@ static const uint32 kAudioSsrc = 99;
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// Helper class to test MediaStreamHandler.
|
||||
// Helper class to test RtpSender/RtpReceiver.
|
||||
class MockAudioProvider : public AudioProviderInterface {
|
||||
public:
|
||||
virtual ~MockAudioProvider() {}
|
||||
MOCK_METHOD3(SetAudioPlayout, void(uint32 ssrc, bool enable,
|
||||
cricket::AudioRenderer* renderer));
|
||||
MOCK_METHOD4(SetAudioSend, void(uint32 ssrc, bool enable,
|
||||
const cricket::AudioOptions& options,
|
||||
cricket::AudioRenderer* renderer));
|
||||
MOCK_METHOD3(SetAudioPlayout,
|
||||
void(uint32 ssrc,
|
||||
bool enable,
|
||||
cricket::AudioRenderer* renderer));
|
||||
MOCK_METHOD4(SetAudioSend,
|
||||
void(uint32 ssrc,
|
||||
bool enable,
|
||||
const cricket::AudioOptions& options,
|
||||
cricket::AudioRenderer* renderer));
|
||||
MOCK_METHOD2(SetAudioPlayoutVolume, void(uint32 ssrc, double volume));
|
||||
};
|
||||
|
||||
// Helper class to test MediaStreamHandler.
|
||||
// Helper class to test RtpSender/RtpReceiver.
|
||||
class MockVideoProvider : public VideoProviderInterface {
|
||||
public:
|
||||
virtual ~MockVideoProvider() {}
|
||||
MOCK_METHOD2(SetCaptureDevice, bool(uint32 ssrc,
|
||||
cricket::VideoCapturer* camera));
|
||||
MOCK_METHOD3(SetVideoPlayout, void(uint32 ssrc,
|
||||
bool enable,
|
||||
cricket::VideoRenderer* renderer));
|
||||
MOCK_METHOD3(SetVideoSend, void(uint32 ssrc, bool enable,
|
||||
const cricket::VideoOptions* options));
|
||||
MOCK_METHOD2(SetCaptureDevice,
|
||||
bool(uint32 ssrc, cricket::VideoCapturer* camera));
|
||||
MOCK_METHOD3(SetVideoPlayout,
|
||||
void(uint32 ssrc,
|
||||
bool enable,
|
||||
cricket::VideoRenderer* renderer));
|
||||
MOCK_METHOD3(SetVideoSend,
|
||||
void(uint32 ssrc,
|
||||
bool enable,
|
||||
const cricket::VideoOptions* options));
|
||||
};
|
||||
|
||||
class FakeVideoSource : public Notifier<VideoSourceInterface> {
|
||||
@ -82,9 +89,7 @@ class FakeVideoSource : public Notifier<VideoSourceInterface> {
|
||||
static rtc::scoped_refptr<FakeVideoSource> Create() {
|
||||
return new rtc::RefCountedObject<FakeVideoSource>();
|
||||
}
|
||||
virtual cricket::VideoCapturer* GetVideoCapturer() {
|
||||
return &fake_capturer_;
|
||||
}
|
||||
virtual cricket::VideoCapturer* GetVideoCapturer() { return &fake_capturer_; }
|
||||
virtual void Stop() {}
|
||||
virtual void Restart() {}
|
||||
virtual void AddSink(cricket::VideoRenderer* output) {}
|
||||
@ -103,154 +108,113 @@ class FakeVideoSource : public Notifier<VideoSourceInterface> {
|
||||
cricket::VideoOptions options_;
|
||||
};
|
||||
|
||||
class MediaStreamHandlerTest : public testing::Test {
|
||||
class RtpSenderReceiverTest : public testing::Test {
|
||||
public:
|
||||
MediaStreamHandlerTest()
|
||||
: handlers_(&audio_provider_, &video_provider_) {
|
||||
}
|
||||
|
||||
virtual void SetUp() {
|
||||
stream_ = MediaStream::Create(kStreamLabel1);
|
||||
rtc::scoped_refptr<VideoSourceInterface> source(
|
||||
FakeVideoSource::Create());
|
||||
rtc::scoped_refptr<VideoSourceInterface> source(FakeVideoSource::Create());
|
||||
video_track_ = VideoTrack::Create(kVideoTrackId, source);
|
||||
EXPECT_TRUE(stream_->AddTrack(video_track_));
|
||||
}
|
||||
|
||||
void AddLocalAudioTrack() {
|
||||
void CreateAudioRtpSender() {
|
||||
audio_track_ = AudioTrack::Create(kAudioTrackId, NULL);
|
||||
EXPECT_TRUE(stream_->AddTrack(audio_track_));
|
||||
EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, true, _, _));
|
||||
handlers_.AddLocalAudioTrack(stream_, stream_->GetAudioTracks()[0],
|
||||
kAudioSsrc);
|
||||
audio_rtp_sender_ = new AudioRtpSender(stream_->GetAudioTracks()[0],
|
||||
kAudioSsrc, &audio_provider_);
|
||||
}
|
||||
|
||||
void AddLocalVideoTrack() {
|
||||
EXPECT_CALL(video_provider_, SetCaptureDevice(
|
||||
kVideoSsrc, video_track_->GetSource()->GetVideoCapturer()));
|
||||
void CreateVideoRtpSender() {
|
||||
EXPECT_CALL(video_provider_,
|
||||
SetCaptureDevice(
|
||||
kVideoSsrc, video_track_->GetSource()->GetVideoCapturer()));
|
||||
EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, true, _));
|
||||
handlers_.AddLocalVideoTrack(stream_, stream_->GetVideoTracks()[0],
|
||||
kVideoSsrc);
|
||||
video_rtp_sender_ = new VideoRtpSender(stream_->GetVideoTracks()[0],
|
||||
kVideoSsrc, &video_provider_);
|
||||
}
|
||||
|
||||
void RemoveLocalAudioTrack() {
|
||||
void DestroyAudioRtpSender() {
|
||||
EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, false, _, _))
|
||||
.Times(1);
|
||||
handlers_.RemoveLocalTrack(stream_, audio_track_);
|
||||
audio_rtp_sender_ = nullptr;
|
||||
}
|
||||
|
||||
void RemoveLocalVideoTrack() {
|
||||
EXPECT_CALL(video_provider_, SetCaptureDevice(kVideoSsrc, NULL))
|
||||
.Times(1);
|
||||
EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, false, _))
|
||||
.Times(1);
|
||||
handlers_.RemoveLocalTrack(stream_, video_track_);
|
||||
void DestroyVideoRtpSender() {
|
||||
EXPECT_CALL(video_provider_, SetCaptureDevice(kVideoSsrc, NULL)).Times(1);
|
||||
EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, false, _)).Times(1);
|
||||
video_rtp_sender_ = nullptr;
|
||||
}
|
||||
|
||||
void AddRemoteAudioTrack() {
|
||||
audio_track_ = AudioTrack::Create(kAudioTrackId,
|
||||
RemoteAudioSource::Create().get());
|
||||
void CreateAudioRtpReceiver() {
|
||||
audio_track_ =
|
||||
AudioTrack::Create(kAudioTrackId, RemoteAudioSource::Create().get());
|
||||
EXPECT_TRUE(stream_->AddTrack(audio_track_));
|
||||
EXPECT_CALL(audio_provider_, SetAudioPlayout(kAudioSsrc, true, _));
|
||||
handlers_.AddRemoteAudioTrack(stream_, stream_->GetAudioTracks()[0],
|
||||
kAudioSsrc);
|
||||
audio_rtp_receiver_ = new AudioRtpReceiver(stream_->GetAudioTracks()[0],
|
||||
kAudioSsrc, &audio_provider_);
|
||||
}
|
||||
|
||||
void AddRemoteVideoTrack() {
|
||||
EXPECT_CALL(video_provider_, SetVideoPlayout(
|
||||
kVideoSsrc, true, video_track_->GetSource()->FrameInput()));
|
||||
handlers_.AddRemoteVideoTrack(stream_, stream_->GetVideoTracks()[0],
|
||||
kVideoSsrc);
|
||||
void CreateVideoRtpReceiver() {
|
||||
EXPECT_CALL(video_provider_,
|
||||
SetVideoPlayout(kVideoSsrc, true,
|
||||
video_track_->GetSource()->FrameInput()));
|
||||
video_rtp_receiver_ = new VideoRtpReceiver(stream_->GetVideoTracks()[0],
|
||||
kVideoSsrc, &video_provider_);
|
||||
}
|
||||
|
||||
void RemoveRemoteAudioTrack() {
|
||||
void DestroyAudioRtpReceiver() {
|
||||
EXPECT_CALL(audio_provider_, SetAudioPlayout(kAudioSsrc, false, _));
|
||||
handlers_.RemoveRemoteTrack(stream_, stream_->GetAudioTracks()[0]);
|
||||
audio_rtp_receiver_ = nullptr;
|
||||
}
|
||||
|
||||
void RemoveRemoteVideoTrack() {
|
||||
void DestroyVideoRtpReceiver() {
|
||||
EXPECT_CALL(video_provider_, SetVideoPlayout(kVideoSsrc, false, NULL));
|
||||
handlers_.RemoveRemoteTrack(stream_, stream_->GetVideoTracks()[0]);
|
||||
video_rtp_receiver_ = nullptr;
|
||||
}
|
||||
|
||||
protected:
|
||||
MockAudioProvider audio_provider_;
|
||||
MockVideoProvider video_provider_;
|
||||
MediaStreamHandlerContainer handlers_;
|
||||
rtc::scoped_refptr<AudioRtpSender> audio_rtp_sender_;
|
||||
rtc::scoped_refptr<VideoRtpSender> video_rtp_sender_;
|
||||
rtc::scoped_refptr<AudioRtpReceiver> audio_rtp_receiver_;
|
||||
rtc::scoped_refptr<VideoRtpReceiver> video_rtp_receiver_;
|
||||
rtc::scoped_refptr<MediaStreamInterface> stream_;
|
||||
rtc::scoped_refptr<VideoTrackInterface> video_track_;
|
||||
rtc::scoped_refptr<AudioTrackInterface> audio_track_;
|
||||
};
|
||||
|
||||
// Test that |audio_provider_| is notified when an audio track is associated
|
||||
// and disassociated with a MediaStreamHandler.
|
||||
TEST_F(MediaStreamHandlerTest, AddAndRemoveLocalAudioTrack) {
|
||||
AddLocalAudioTrack();
|
||||
RemoveLocalAudioTrack();
|
||||
|
||||
handlers_.RemoveLocalStream(stream_);
|
||||
// and disassociated with an AudioRtpSender.
|
||||
TEST_F(RtpSenderReceiverTest, AddAndDestroyAudioRtpSender) {
|
||||
CreateAudioRtpSender();
|
||||
DestroyAudioRtpSender();
|
||||
}
|
||||
|
||||
// Test that |video_provider_| is notified when a video track is associated and
|
||||
// disassociated with a MediaStreamHandler.
|
||||
TEST_F(MediaStreamHandlerTest, AddAndRemoveLocalVideoTrack) {
|
||||
AddLocalVideoTrack();
|
||||
RemoveLocalVideoTrack();
|
||||
|
||||
handlers_.RemoveLocalStream(stream_);
|
||||
// disassociated with a VideoRtpSender.
|
||||
TEST_F(RtpSenderReceiverTest, AddAndDestroyVideoRtpSender) {
|
||||
CreateVideoRtpSender();
|
||||
DestroyVideoRtpSender();
|
||||
}
|
||||
|
||||
// Test that |video_provider_| and |audio_provider_| is notified when an audio
|
||||
// and video track is disassociated with a MediaStreamHandler by calling
|
||||
// RemoveLocalStream.
|
||||
TEST_F(MediaStreamHandlerTest, RemoveLocalStream) {
|
||||
AddLocalAudioTrack();
|
||||
AddLocalVideoTrack();
|
||||
|
||||
EXPECT_CALL(video_provider_, SetCaptureDevice(kVideoSsrc, NULL))
|
||||
.Times(1);
|
||||
EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, false, _))
|
||||
.Times(1);
|
||||
EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, false, _, _))
|
||||
.Times(1);
|
||||
handlers_.RemoveLocalStream(stream_);
|
||||
}
|
||||
|
||||
|
||||
// Test that |audio_provider_| is notified when a remote audio and track is
|
||||
// associated and disassociated with a MediaStreamHandler.
|
||||
TEST_F(MediaStreamHandlerTest, AddAndRemoveRemoteAudioTrack) {
|
||||
AddRemoteAudioTrack();
|
||||
RemoveRemoteAudioTrack();
|
||||
|
||||
handlers_.RemoveRemoteStream(stream_);
|
||||
// associated and disassociated with an AudioRtpReceiver.
|
||||
TEST_F(RtpSenderReceiverTest, AddAndDestroyAudioRtpReceiver) {
|
||||
CreateAudioRtpReceiver();
|
||||
DestroyAudioRtpReceiver();
|
||||
}
|
||||
|
||||
// Test that |video_provider_| is notified when a remote
|
||||
// video track is associated and disassociated with a MediaStreamHandler.
|
||||
TEST_F(MediaStreamHandlerTest, AddAndRemoveRemoteVideoTrack) {
|
||||
AddRemoteVideoTrack();
|
||||
RemoveRemoteVideoTrack();
|
||||
|
||||
handlers_.RemoveRemoteStream(stream_);
|
||||
// video track is associated and disassociated with a VideoRtpReceiver.
|
||||
TEST_F(RtpSenderReceiverTest, AddAndDestroyVideoRtpReceiver) {
|
||||
CreateVideoRtpReceiver();
|
||||
DestroyVideoRtpReceiver();
|
||||
}
|
||||
|
||||
// Test that |audio_provider_| and |video_provider_| is notified when an audio
|
||||
// and video track is disassociated with a MediaStreamHandler by calling
|
||||
// RemoveRemoveStream.
|
||||
TEST_F(MediaStreamHandlerTest, RemoveRemoteStream) {
|
||||
AddRemoteAudioTrack();
|
||||
AddRemoteVideoTrack();
|
||||
|
||||
EXPECT_CALL(video_provider_, SetVideoPlayout(kVideoSsrc, false, NULL))
|
||||
.Times(1);
|
||||
EXPECT_CALL(audio_provider_, SetAudioPlayout(kAudioSsrc, false, _))
|
||||
.Times(1);
|
||||
handlers_.RemoveRemoteStream(stream_);
|
||||
}
|
||||
|
||||
TEST_F(MediaStreamHandlerTest, LocalAudioTrackDisable) {
|
||||
AddLocalAudioTrack();
|
||||
TEST_F(RtpSenderReceiverTest, LocalAudioTrackDisable) {
|
||||
CreateAudioRtpSender();
|
||||
|
||||
EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, false, _, _));
|
||||
audio_track_->set_enabled(false);
|
||||
@ -258,12 +222,11 @@ TEST_F(MediaStreamHandlerTest, LocalAudioTrackDisable) {
|
||||
EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, true, _, _));
|
||||
audio_track_->set_enabled(true);
|
||||
|
||||
RemoveLocalAudioTrack();
|
||||
handlers_.TearDown();
|
||||
DestroyAudioRtpSender();
|
||||
}
|
||||
|
||||
TEST_F(MediaStreamHandlerTest, RemoteAudioTrackDisable) {
|
||||
AddRemoteAudioTrack();
|
||||
TEST_F(RtpSenderReceiverTest, RemoteAudioTrackDisable) {
|
||||
CreateAudioRtpReceiver();
|
||||
|
||||
EXPECT_CALL(audio_provider_, SetAudioPlayout(kAudioSsrc, false, _));
|
||||
audio_track_->set_enabled(false);
|
||||
@ -271,12 +234,11 @@ TEST_F(MediaStreamHandlerTest, RemoteAudioTrackDisable) {
|
||||
EXPECT_CALL(audio_provider_, SetAudioPlayout(kAudioSsrc, true, _));
|
||||
audio_track_->set_enabled(true);
|
||||
|
||||
RemoveRemoteAudioTrack();
|
||||
handlers_.TearDown();
|
||||
DestroyAudioRtpReceiver();
|
||||
}
|
||||
|
||||
TEST_F(MediaStreamHandlerTest, LocalVideoTrackDisable) {
|
||||
AddLocalVideoTrack();
|
||||
TEST_F(RtpSenderReceiverTest, LocalVideoTrackDisable) {
|
||||
CreateVideoRtpSender();
|
||||
|
||||
EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, false, _));
|
||||
video_track_->set_enabled(false);
|
||||
@ -284,23 +246,21 @@ TEST_F(MediaStreamHandlerTest, LocalVideoTrackDisable) {
|
||||
EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, true, _));
|
||||
video_track_->set_enabled(true);
|
||||
|
||||
RemoveLocalVideoTrack();
|
||||
handlers_.TearDown();
|
||||
DestroyVideoRtpSender();
|
||||
}
|
||||
|
||||
TEST_F(MediaStreamHandlerTest, RemoteVideoTrackDisable) {
|
||||
AddRemoteVideoTrack();
|
||||
TEST_F(RtpSenderReceiverTest, RemoteVideoTrackDisable) {
|
||||
CreateVideoRtpReceiver();
|
||||
|
||||
video_track_->set_enabled(false);
|
||||
|
||||
video_track_->set_enabled(true);
|
||||
|
||||
RemoveRemoteVideoTrack();
|
||||
handlers_.TearDown();
|
||||
DestroyVideoRtpReceiver();
|
||||
}
|
||||
|
||||
TEST_F(MediaStreamHandlerTest, RemoteAudioTrackSetVolume) {
|
||||
AddRemoteAudioTrack();
|
||||
TEST_F(RtpSenderReceiverTest, RemoteAudioTrackSetVolume) {
|
||||
CreateAudioRtpReceiver();
|
||||
|
||||
double volume = 0.5;
|
||||
EXPECT_CALL(audio_provider_, SetAudioPlayoutVolume(kAudioSsrc, volume));
|
||||
@ -318,8 +278,7 @@ TEST_F(MediaStreamHandlerTest, RemoteAudioTrackSetVolume) {
|
||||
EXPECT_CALL(audio_provider_, SetAudioPlayoutVolume(kAudioSsrc, new_volume));
|
||||
audio_track_->GetSource()->SetVolume(new_volume);
|
||||
|
||||
RemoveRemoteAudioTrack();
|
||||
handlers_.TearDown();
|
||||
DestroyAudioRtpReceiver();
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
@ -85,51 +85,34 @@ class FakeMediaStreamSignaling : public webrtc::MediaStreamSignaling,
|
||||
}
|
||||
|
||||
// Implements MediaStreamSignalingObserver.
|
||||
virtual void OnAddRemoteStream(webrtc::MediaStreamInterface* stream) {
|
||||
}
|
||||
virtual void OnRemoveRemoteStream(webrtc::MediaStreamInterface* stream) {
|
||||
}
|
||||
virtual void OnAddDataChannel(webrtc::DataChannelInterface* data_channel) {
|
||||
}
|
||||
virtual void OnAddRemoteStream(webrtc::MediaStreamInterface* stream) {}
|
||||
virtual void OnRemoveRemoteStream(webrtc::MediaStreamInterface* stream) {}
|
||||
virtual void OnAddDataChannel(webrtc::DataChannelInterface* data_channel) {}
|
||||
virtual void OnAddLocalAudioTrack(webrtc::MediaStreamInterface* stream,
|
||||
webrtc::AudioTrackInterface* audio_track,
|
||||
uint32 ssrc) {
|
||||
}
|
||||
uint32 ssrc) {}
|
||||
virtual void OnAddLocalVideoTrack(webrtc::MediaStreamInterface* stream,
|
||||
webrtc::VideoTrackInterface* video_track,
|
||||
uint32 ssrc) {
|
||||
}
|
||||
uint32 ssrc) {}
|
||||
virtual void OnAddRemoteAudioTrack(webrtc::MediaStreamInterface* stream,
|
||||
webrtc::AudioTrackInterface* audio_track,
|
||||
uint32 ssrc) {
|
||||
}
|
||||
|
||||
uint32 ssrc) {}
|
||||
virtual void OnAddRemoteVideoTrack(webrtc::MediaStreamInterface* stream,
|
||||
webrtc::VideoTrackInterface* video_track,
|
||||
uint32 ssrc) {
|
||||
}
|
||||
|
||||
uint32 ssrc) {}
|
||||
virtual void OnRemoveRemoteAudioTrack(
|
||||
webrtc::MediaStreamInterface* stream,
|
||||
webrtc::AudioTrackInterface* audio_track) {
|
||||
}
|
||||
|
||||
webrtc::AudioTrackInterface* audio_track) {}
|
||||
virtual void OnRemoveRemoteVideoTrack(
|
||||
webrtc::MediaStreamInterface* stream,
|
||||
webrtc::VideoTrackInterface* video_track) {
|
||||
}
|
||||
|
||||
virtual void OnRemoveLocalAudioTrack(
|
||||
webrtc::MediaStreamInterface* stream,
|
||||
webrtc::AudioTrackInterface* audio_track,
|
||||
uint32 ssrc) {
|
||||
}
|
||||
webrtc::VideoTrackInterface* video_track) {}
|
||||
virtual void OnRemoveLocalAudioTrack(webrtc::MediaStreamInterface* stream,
|
||||
webrtc::AudioTrackInterface* audio_track,
|
||||
uint32 ssrc) {}
|
||||
virtual void OnRemoveLocalVideoTrack(
|
||||
webrtc::MediaStreamInterface* stream,
|
||||
webrtc::VideoTrackInterface* video_track) {
|
||||
}
|
||||
virtual void OnRemoveLocalStream(webrtc::MediaStreamInterface* stream) {
|
||||
}
|
||||
webrtc::VideoTrackInterface* video_track) {}
|
||||
virtual void OnRemoveLocalStream(webrtc::MediaStreamInterface* stream) {}
|
||||
|
||||
private:
|
||||
rtc::scoped_refptr<webrtc::MediaStreamInterface> CreateStream(
|
||||
|
||||
@ -733,8 +733,6 @@
|
||||
'app/webrtc/mediacontroller.h',
|
||||
'app/webrtc/mediastream.cc',
|
||||
'app/webrtc/mediastream.h',
|
||||
'app/webrtc/mediastreamhandler.cc',
|
||||
'app/webrtc/mediastreamhandler.h',
|
||||
'app/webrtc/mediastreaminterface.h',
|
||||
'app/webrtc/mediastreamprovider.h',
|
||||
'app/webrtc/mediastreamproxy.h',
|
||||
@ -757,6 +755,12 @@
|
||||
'app/webrtc/remoteaudiosource.h',
|
||||
'app/webrtc/remotevideocapturer.cc',
|
||||
'app/webrtc/remotevideocapturer.h',
|
||||
'app/webrtc/rtpreceiver.cc',
|
||||
'app/webrtc/rtpreceiver.h',
|
||||
'app/webrtc/rtpreceiverinterface.h',
|
||||
'app/webrtc/rtpsender.cc',
|
||||
'app/webrtc/rtpsender.h',
|
||||
'app/webrtc/rtpsenderinterface.h',
|
||||
'app/webrtc/sctputils.cc',
|
||||
'app/webrtc/sctputils.h',
|
||||
'app/webrtc/statscollector.cc',
|
||||
|
||||
@ -200,7 +200,6 @@
|
||||
'app/webrtc/jsepsessiondescription_unittest.cc',
|
||||
'app/webrtc/localaudiosource_unittest.cc',
|
||||
'app/webrtc/mediastream_unittest.cc',
|
||||
'app/webrtc/mediastreamhandler_unittest.cc',
|
||||
'app/webrtc/mediastreamsignaling_unittest.cc',
|
||||
'app/webrtc/peerconnection_unittest.cc',
|
||||
'app/webrtc/peerconnectionendtoend_unittest.cc',
|
||||
@ -208,6 +207,7 @@
|
||||
'app/webrtc/peerconnectioninterface_unittest.cc',
|
||||
# 'app/webrtc/peerconnectionproxy_unittest.cc',
|
||||
'app/webrtc/remotevideocapturer_unittest.cc',
|
||||
'app/webrtc/rtpsenderreceiver_unittest.cc',
|
||||
'app/webrtc/sctputils.cc',
|
||||
'app/webrtc/statscollector_unittest.cc',
|
||||
'app/webrtc/test/fakeaudiocapturemodule.cc',
|
||||
|
||||
Reference in New Issue
Block a user