(Auto)update libjingle 73927775-> 74032598

git-svn-id: http://webrtc.googlecode.com/svn/trunk@6965 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
buildbot@webrtc.org
2014-08-25 12:11:58 +00:00
parent 926707b167
commit b4c7b09c13
13 changed files with 386 additions and 39 deletions

View File

@ -95,6 +95,8 @@ const char MediaConstraintsInterface::kEnableVideoSuspendBelowMinBitrate[] =
"googSuspendBelowMinBitrate";
const char MediaConstraintsInterface::kNumUnsignalledRecvStreams[] =
"googNumUnsignalledRecvStreams";
const char MediaConstraintsInterface::kCombinedAudioVideoBwe[] =
"googCombinedAudioVideoBwe";
const char MediaConstraintsInterface::kScreencastMinBitrate[] =
"googScreencastMinBitrate";
// TODO(ronghuawu): Remove once cpu overuse detection is stable.

View File

@ -117,6 +117,8 @@ class MediaConstraintsInterface {
// googSuspendBelowMinBitrate
static const char kNumUnsignalledRecvStreams[];
// googNumUnsignalledRecvStreams
// Constraint to enable combined audio+video bandwidth estimation.
static const char kCombinedAudioVideoBwe[]; // googCombinedAudioVideoBwe
static const char kScreencastMinBitrate[]; // googScreencastMinBitrate
static const char kCpuOveruseDetection[]; // googCpuOveruseDetection
static const char kCpuUnderuseThreshold[]; // googCpuUnderuseThreshold

View File

@ -90,6 +90,8 @@ static const int kMaxWaitMs = 2000;
// warnings.
#if !defined(THREAD_SANITIZER)
static const int kMaxWaitForStatsMs = 3000;
static const int kMaxWaitForAudioDataMs = 10000;
static const int kMaxWaitForRembMs = 5000;
#endif
static const int kMaxWaitForFramesMs = 10000;
static const int kEndAudioFrameCount = 3;
@ -154,11 +156,11 @@ class PeerConnectionTestClientBase
}
void AddMediaStream(bool audio, bool video) {
std::string label = kStreamLabelBase +
std::string stream_label = kStreamLabelBase +
rtc::ToString<int>(
static_cast<int>(peer_connection_->local_streams()->count()));
rtc::scoped_refptr<webrtc::MediaStreamInterface> stream =
peer_connection_factory_->CreateLocalMediaStream(label);
peer_connection_factory_->CreateLocalMediaStream(stream_label);
if (audio && can_receive_audio()) {
FakeConstraints constraints;
@ -169,13 +171,13 @@ class PeerConnectionTestClientBase
peer_connection_factory_->CreateAudioSource(&constraints);
// TODO(perkj): Test audio source when it is implemented. Currently audio
// always use the default input.
std::string label = stream_label + kAudioTrackLabelBase;
rtc::scoped_refptr<webrtc::AudioTrackInterface> audio_track(
peer_connection_factory_->CreateAudioTrack(kAudioTrackLabelBase,
source));
peer_connection_factory_->CreateAudioTrack(label, source));
stream->AddTrack(audio_track);
}
if (video && can_receive_video()) {
stream->AddTrack(CreateLocalVideoTrack(label));
stream->AddTrack(CreateLocalVideoTrack(stream_label));
}
EXPECT_TRUE(peer_connection_->AddStream(stream, NULL));
@ -368,6 +370,17 @@ class PeerConnectionTestClientBase
return observer->BytesSent();
}
int GetAvailableReceivedBandwidthStats() {
rtc::scoped_refptr<MockStatsObserver>
observer(new rtc::RefCountedObject<MockStatsObserver>());
EXPECT_TRUE(peer_connection_->GetStats(
observer, NULL, PeerConnectionInterface::kStatsOutputLevelStandard));
EXPECT_TRUE_WAIT(observer->called(), kMaxWaitMs);
int bw = observer->AvailableReceiveBandwidth();
LOG(INFO) << "Available Receive Bandwidth: " << bw;
return bw;
}
int rendered_width() {
EXPECT_FALSE(fake_video_renderers_.empty());
return fake_video_renderers_.empty() ? 1 :
@ -447,6 +460,12 @@ class PeerConnectionTestClientBase
webrtc::PeerConnectionInterface* pc() {
return peer_connection_.get();
}
void StopVideoCapturers() {
for (std::vector<cricket::VideoCapturer*>::iterator it =
video_capturers_.begin(); it != video_capturers_.end(); ++it) {
(*it)->Stop();
}
}
protected:
explicit PeerConnectionTestClientBase(const std::string& id)
@ -529,10 +548,12 @@ class PeerConnectionTestClientBase
FakeConstraints source_constraints = video_constraints_;
source_constraints.SetMandatoryMaxFrameRate(10);
cricket::FakeVideoCapturer* fake_capturer =
new webrtc::FakePeriodicVideoCapturer();
video_capturers_.push_back(fake_capturer);
rtc::scoped_refptr<webrtc::VideoSourceInterface> source =
peer_connection_factory_->CreateVideoSource(
new webrtc::FakePeriodicVideoCapturer(),
&source_constraints);
fake_capturer, &source_constraints);
std::string label = stream_label + kVideoTrackLabelBase;
return peer_connection_factory_->CreateVideoTrack(label, source);
}
@ -569,6 +590,10 @@ class PeerConnectionTestClientBase
// For remote peer communication.
MessageReceiver* signaling_message_receiver_;
// Store references to the video capturers we've created, so that we can stop
// them, if required.
std::vector<cricket::VideoCapturer*> video_capturers_;
};
class JsepTestClient
@ -1314,6 +1339,7 @@ TEST_F(JsepPeerConnectionP2PTestClient, RegisterDataChannelObserver) {
// Unregister the existing observer.
receiving_client()->data_channel()->UnregisterObserver();
std::string data = "hello world";
SendRtpData(initializing_client()->data_channel(), data);
@ -1437,4 +1463,80 @@ TEST_F(JsepPeerConnectionP2PTestClient,
EnableVideoDecoderFactory();
LocalP2PTest();
}
// Test receive bandwidth stats with only audio enabled at receiver.
TEST_F(JsepPeerConnectionP2PTestClient, ReceivedBweStatsAudio) {
ASSERT_TRUE(CreateTestClients());
receiving_client()->SetReceiveAudioVideo(true, false);
LocalP2PTest();
// Wait until we have received some audio data.
StreamCollectionInterface* local_streams =
initializing_client()->local_streams();
ASSERT_GT(local_streams->count(), 0u);
ASSERT_GT(local_streams->at(0)->GetAudioTracks().size(), 0u);
MediaStreamTrackInterface* local_audio_track =
local_streams->at(0)->GetAudioTracks()[0];
EXPECT_TRUE_WAIT(
receiving_client()->GetBytesReceivedStats(local_audio_track) > 10000,
kMaxWaitForAudioDataMs);
// Then wait for REMB.
EXPECT_EQ_WAIT(
receiving_client()->GetAvailableReceivedBandwidthStats(), 0,
kMaxWaitForRembMs);
}
// Test receive bandwidth stats with combined BWE.
TEST_F(JsepPeerConnectionP2PTestClient, ReceivedBweStatsCombined) {
FakeConstraints setup_constraints;
setup_constraints.AddOptional(
MediaConstraintsInterface::kCombinedAudioVideoBwe, true);
ASSERT_TRUE(CreateTestClients(&setup_constraints, &setup_constraints));
initializing_client()->AddMediaStream(true, true);
initializing_client()->AddMediaStream(false, true);
initializing_client()->AddMediaStream(false, true);
initializing_client()->AddMediaStream(false, true);
LocalP2PTest();
// Run until a non-zero bw is reported.
EXPECT_TRUE_WAIT(
receiving_client()->GetAvailableReceivedBandwidthStats() > 40000,
kMaxWaitForRembMs);
int video_bw = receiving_client()->GetAvailableReceivedBandwidthStats();
// Halt video capturers, then run until we get a new non-zero bw which is
// lower than the previous value.
initializing_client()->StopVideoCapturers();
EXPECT_TRUE_WAIT(
receiving_client()->GetAvailableReceivedBandwidthStats() < video_bw,
kMaxWaitForRembMs);
EXPECT_GT(receiving_client()->GetAvailableReceivedBandwidthStats(), 0);
}
// Test receive bandwidth stats with 1 video, 3 audio streams but no combined
// BWE.
TEST_F(JsepPeerConnectionP2PTestClient, ReceivedBweStatsNotCombined) {
FakeConstraints setup_constraints;
setup_constraints.AddOptional(
MediaConstraintsInterface::kCombinedAudioVideoBwe, false);
ASSERT_TRUE(CreateTestClients(&setup_constraints, &setup_constraints));
initializing_client()->AddMediaStream(true, true);
initializing_client()->AddMediaStream(false, true);
initializing_client()->AddMediaStream(false, true);
initializing_client()->AddMediaStream(false, true);
LocalP2PTest();
// Run until a non-zero bw is reported.
EXPECT_TRUE_WAIT(
receiving_client()->GetAvailableReceivedBandwidthStats() > 40000,
kMaxWaitForRembMs);
// Halt video capturers, then run until we get a new bw which is zero.
initializing_client()->StopVideoCapturers();
EXPECT_EQ_WAIT(
receiving_client()->GetAvailableReceivedBandwidthStats(), 0,
kMaxWaitForRembMs);
}
#endif // if !defined(THREAD_SANITIZER)

View File

@ -133,31 +133,37 @@ class MockStatsObserver : public webrtc::StatsObserver {
size_t number_of_reports() const { return reports_.size(); }
int AudioOutputLevel() {
return GetSsrcStatsValue(
webrtc::StatsReport::kStatsValueNameAudioOutputLevel);
return GetStatsValue(StatsReport::kStatsReportTypeSsrc,
StatsReport::kStatsValueNameAudioOutputLevel);
}
int AudioInputLevel() {
return GetSsrcStatsValue(
webrtc::StatsReport::kStatsValueNameAudioInputLevel);
return GetStatsValue(StatsReport::kStatsReportTypeSsrc,
StatsReport::kStatsValueNameAudioInputLevel);
}
int BytesReceived() {
return GetSsrcStatsValue(
webrtc::StatsReport::kStatsValueNameBytesReceived);
return GetStatsValue(StatsReport::kStatsReportTypeSsrc,
StatsReport::kStatsValueNameBytesReceived);
}
int BytesSent() {
return GetSsrcStatsValue(webrtc::StatsReport::kStatsValueNameBytesSent);
return GetStatsValue(StatsReport::kStatsReportTypeSsrc,
StatsReport::kStatsValueNameBytesSent);
}
int AvailableReceiveBandwidth() {
return GetStatsValue(StatsReport::kStatsReportTypeBwe,
StatsReport::kStatsValueNameAvailableReceiveBandwidth);
}
private:
int GetSsrcStatsValue(StatsReport::StatsValueName name) {
int GetStatsValue(const std::string& type, StatsReport::StatsValueName name) {
if (reports_.empty()) {
return 0;
}
for (size_t i = 0; i < reports_.size(); ++i) {
if (reports_[i].type != StatsReport::kStatsReportTypeSsrc)
if (reports_[i].type != type)
continue;
webrtc::StatsReport::Values::const_iterator it =
reports_[i].values.begin();

View File

@ -471,14 +471,16 @@ WebRtcSession::WebRtcSession(
}
WebRtcSession::~WebRtcSession() {
if (voice_channel_.get()) {
SignalVoiceChannelDestroyed();
channel_manager_->DestroyVoiceChannel(voice_channel_.release());
}
// Destroy video_channel_ first since it may have a pointer to the
// voice_channel_.
if (video_channel_.get()) {
SignalVideoChannelDestroyed();
channel_manager_->DestroyVideoChannel(video_channel_.release());
}
if (voice_channel_.get()) {
SignalVoiceChannelDestroyed();
channel_manager_->DestroyVoiceChannel(voice_channel_.release());
}
if (data_channel_.get()) {
SignalDataChannelDestroyed();
channel_manager_->DestroyDataChannel(data_channel_.release());
@ -610,6 +612,10 @@ bool WebRtcSession::Initialize(
cricket::VideoOptions::HIGH);
}
SetOptionFromOptionalConstraint(constraints,
MediaConstraintsInterface::kCombinedAudioVideoBwe,
&audio_options_.combined_audio_video_bwe);
const cricket::VideoCodec default_codec(
JsepSessionDescription::kDefaultVideoCodecId,
JsepSessionDescription::kDefaultVideoCodecName,
@ -1425,16 +1431,8 @@ bool WebRtcSession::UseCandidate(
void WebRtcSession::RemoveUnusedChannelsAndTransports(
const SessionDescription* desc) {
const cricket::ContentInfo* voice_info =
cricket::GetFirstAudioContent(desc);
if ((!voice_info || voice_info->rejected) && voice_channel_) {
mediastream_signaling_->OnAudioChannelClose();
SignalVoiceChannelDestroyed();
const std::string content_name = voice_channel_->content_name();
channel_manager_->DestroyVoiceChannel(voice_channel_.release());
DestroyTransportProxy(content_name);
}
// Destroy video_channel_ first since it may have a pointer to the
// voice_channel_.
const cricket::ContentInfo* video_info =
cricket::GetFirstVideoContent(desc);
if ((!video_info || video_info->rejected) && video_channel_) {
@ -1445,6 +1443,16 @@ void WebRtcSession::RemoveUnusedChannelsAndTransports(
DestroyTransportProxy(content_name);
}
const cricket::ContentInfo* voice_info =
cricket::GetFirstAudioContent(desc);
if ((!voice_info || voice_info->rejected) && voice_channel_) {
mediastream_signaling_->OnAudioChannelClose();
SignalVoiceChannelDestroyed();
const std::string content_name = voice_channel_->content_name();
channel_manager_->DestroyVoiceChannel(voice_channel_.release());
DestroyTransportProxy(content_name);
}
const cricket::ContentInfo* data_info =
cricket::GetFirstDataContent(desc);
if ((!data_info || data_info->rejected) && data_channel_) {

View File

@ -3311,6 +3311,26 @@ TEST_F(WebRtcSessionTest, TestNumUnsignalledRecvStreamsConstraint) {
SetAndVerifyNumUnsignalledRecvStreams(-1, 0);
}
TEST_F(WebRtcSessionTest, TestCombinedAudioVideoBweConstraint) {
constraints_.reset(new FakeConstraints());
constraints_->AddOptional(
webrtc::MediaConstraintsInterface::kCombinedAudioVideoBwe,
true);
Init(NULL);
mediastream_signaling_.SendAudioVideoStream1();
SessionDescriptionInterface* offer = CreateOffer();
SetLocalDescriptionWithoutError(offer);
voice_channel_ = media_engine_->GetVoiceChannel(0);
ASSERT_TRUE(voice_channel_ != NULL);
cricket::AudioOptions audio_options;
EXPECT_TRUE(voice_channel_->GetOptions(&audio_options));
EXPECT_TRUE(
audio_options.combined_audio_video_bwe.GetWithDefaultIfUnset(false));
}
// Tests that we can renegotiate new media content with ICE candidates in the
// new remote SDP.
TEST_F(WebRtcSessionTest, TestRenegotiateNewMediaWithCandidatesInSdp) {

View File

@ -182,6 +182,7 @@ struct AudioOptions {
recording_sample_rate.SetFrom(change.recording_sample_rate);
playout_sample_rate.SetFrom(change.playout_sample_rate);
dscp.SetFrom(change.dscp);
combined_audio_video_bwe.SetFrom(change.combined_audio_video_bwe);
}
bool operator==(const AudioOptions& o) const {
@ -207,7 +208,8 @@ struct AudioOptions {
rx_agc_limiter == o.rx_agc_limiter &&
recording_sample_rate == o.recording_sample_rate &&
playout_sample_rate == o.playout_sample_rate &&
dscp == o.dscp;
dscp == o.dscp &&
combined_audio_video_bwe == o.combined_audio_video_bwe;
}
std::string ToString() const {
@ -238,6 +240,7 @@ struct AudioOptions {
ost << ToStringIfSet("recording_sample_rate", recording_sample_rate);
ost << ToStringIfSet("playout_sample_rate", playout_sample_rate);
ost << ToStringIfSet("dscp", dscp);
ost << ToStringIfSet("combined_audio_video_bwe", combined_audio_video_bwe);
ost << "}";
return ost.str();
}
@ -275,6 +278,8 @@ struct AudioOptions {
Settable<uint32> playout_sample_rate;
// Set DSCP value for packet sent from audio channel.
Settable<bool> dscp;
// Enable combined audio+bandwidth BWE.
Settable<bool> combined_audio_video_bwe;
};
// Options that can be applied to a VideoMediaChannel or a VideoMediaEngine.

View File

@ -335,6 +335,7 @@ class FakeWebRtcVideoEngine
unsigned int send_nack_bitrate_;
unsigned int send_bandwidth_;
unsigned int receive_bandwidth_;
// Bandwidth to deduct from estimated uplink capacity.
unsigned int reserved_transmit_bitrate_bps_;
bool suspend_below_min_bitrate_;
webrtc::CpuOveruseObserver* overuse_observer_;

View File

@ -43,10 +43,7 @@
#ifdef USE_WEBRTC_DEV_BRANCH
#include "webrtc/modules/audio_processing/include/audio_processing.h"
#endif
namespace webrtc {
class ViENetwork;
}
#include "webrtc/video_engine/include/vie_network.h"
namespace cricket {
@ -316,6 +313,8 @@ class FakeWebRtcVoiceEngine
}
webrtc::ViENetwork* GetViENetwork(int channel) {
WEBRTC_ASSERT_CHANNEL(channel);
// WARNING: This pointer is for verification purposes only. Calling
// functions on it may result in undefined behavior!
return channels_[channel]->vie_network;
}
int GetVideoChannel(int channel) {
@ -999,6 +998,11 @@ class FakeWebRtcVoiceEngine
WEBRTC_CHECK_CHANNEL(channel);
channels_[channel]->vie_network = vie_network;
channels_[channel]->video_channel = video_channel;
if (vie_network) {
// The interface is released here to avoid leaks. A test should not
// attempt to call functions on the interface stored in the channel.
vie_network->Release();
}
return 0;
}

View File

@ -1572,7 +1572,19 @@ WebRtcVideoMediaChannel::WebRtcVideoMediaChannel(
bool WebRtcVideoMediaChannel::Init() {
const uint32 ssrc_key = 0;
return CreateChannel(ssrc_key, MD_SENDRECV, &default_channel_id_);
bool result = CreateChannel(ssrc_key, MD_SENDRECV, &default_channel_id_);
if (!result) {
return false;
}
if (voice_channel_) {
WebRtcVoiceMediaChannel* voice_channel =
static_cast<WebRtcVoiceMediaChannel*>(voice_channel_);
if (!voice_channel->SetupSharedBandwidthEstimation(
engine()->vie()->engine(), default_channel_id_)) {
return false;
}
}
return true;
}
WebRtcVideoMediaChannel::~WebRtcVideoMediaChannel() {
@ -1581,6 +1593,12 @@ WebRtcVideoMediaChannel::~WebRtcVideoMediaChannel() {
const bool render = false;
SetRender(render);
if (voice_channel_) {
WebRtcVoiceMediaChannel* voice_channel =
static_cast<WebRtcVoiceMediaChannel*>(voice_channel_);
voice_channel->SetupSharedBandwidthEstimation(NULL, -1);
}
while (!send_channels_.empty()) {
if (!DeleteSendChannel(send_channels_.begin()->first)) {
LOG(LS_ERROR) << "Unable to delete channel with ssrc key "

View File

@ -52,6 +52,7 @@
#include "webrtc/base/stringutils.h"
#include "webrtc/common.h"
#include "webrtc/modules/audio_processing/include/audio_processing.h"
#include "webrtc/video_engine/include/vie_network.h"
#ifdef WIN32
#include <objbase.h> // NOLINT
@ -1759,6 +1760,8 @@ WebRtcVoiceMediaChannel::WebRtcVoiceMediaChannel(WebRtcVoiceEngine *engine)
typing_noise_detected_(false),
desired_send_(SEND_NOTHING),
send_(SEND_NOTHING),
shared_bwe_vie_(NULL),
shared_bwe_vie_channel_(-1),
default_receive_ssrc_(0) {
engine->RegisterChannel(this);
LOG(LS_VERBOSE) << "WebRtcVoiceMediaChannel::WebRtcVoiceMediaChannel "
@ -1770,6 +1773,8 @@ WebRtcVoiceMediaChannel::WebRtcVoiceMediaChannel(WebRtcVoiceEngine *engine)
WebRtcVoiceMediaChannel::~WebRtcVoiceMediaChannel() {
LOG(LS_VERBOSE) << "WebRtcVoiceMediaChannel::~WebRtcVoiceMediaChannel "
<< voe_channel();
ASSERT(shared_bwe_vie_ == NULL);
ASSERT(shared_bwe_vie_channel_ == -1);
// Remove any remaining send streams, the default channel will be deleted
// later.
@ -1870,6 +1875,12 @@ bool WebRtcVoiceMediaChannel::SetOptions(const AudioOptions& options) {
}
}
// Force update of Video Engine BWE forwarding to reflect experiment setting.
if (!SetupSharedBandwidthEstimation(shared_bwe_vie_,
shared_bwe_vie_channel_)) {
return false;
}
LOG(LS_INFO) << "Set voice channel options. Current options: "
<< options_.ToString();
return true;
@ -2531,8 +2542,8 @@ bool WebRtcVoiceMediaChannel::AddSendStream(const StreamParams& sp) {
}
if (engine()->voe()->rtp()->SetRTCP_CNAME(channel, sp.cname.c_str()) == -1) {
LOG_RTCERR2(SetRTCP_CNAME, channel, sp.cname);
return false;
LOG_RTCERR2(SetRTCP_CNAME, channel, sp.cname);
return false;
}
// Set the current codecs to be used for the new channel.
@ -2604,6 +2615,9 @@ bool WebRtcVoiceMediaChannel::AddRecvStream(const StreamParams& sp) {
receive_channels_.insert(std::make_pair(
default_receive_ssrc_,
new WebRtcVoiceChannelRenderer(voe_channel(), audio_transport)));
if (!SetupSharedBweOnChannel(voe_channel())) {
return false;
}
return SetPlayout(voe_channel(), playout_);
}
@ -2691,6 +2705,11 @@ bool WebRtcVoiceMediaChannel::ConfigureRecvChannel(int channel) {
return false;
}
// Set up channel to be able to forward incoming packets to video engine BWE.
if (!SetupSharedBweOnChannel(channel)) {
return false;
}
return SetPlayout(channel, playout_);
}
@ -3060,7 +3079,8 @@ void WebRtcVoiceMediaChannel::OnPacketReceived(
engine()->voe()->network()->ReceivedRTPPacket(
which_channel,
packet->data(),
static_cast<unsigned int>(packet->length()));
static_cast<unsigned int>(packet->length()),
webrtc::PacketTime(packet_time.timestamp, packet_time.not_before));
}
void WebRtcVoiceMediaChannel::OnRtcpReceived(
@ -3454,6 +3474,23 @@ int WebRtcVoiceMediaChannel::GetSendChannelNum(uint32 ssrc) {
return -1;
}
bool WebRtcVoiceMediaChannel::SetupSharedBandwidthEstimation(
webrtc::VideoEngine* vie, int vie_channel) {
shared_bwe_vie_ = vie;
shared_bwe_vie_channel_ = vie_channel;
if (!SetupSharedBweOnChannel(voe_channel())) {
return false;
}
for (ChannelMap::iterator it = receive_channels_.begin();
it != receive_channels_.end(); ++it) {
if (!SetupSharedBweOnChannel(it->second->channel())) {
return false;
}
}
return true;
}
bool WebRtcVoiceMediaChannel::GetRedSendCodec(const AudioCodec& red_codec,
const std::vector<AudioCodec>& all_codecs, webrtc::CodecInst* send_codec) {
// Get the RED encodings from the parameter with no name. This may
@ -3603,6 +3640,25 @@ bool WebRtcVoiceMediaChannel::SetHeaderExtension(ExtensionSetterFunction setter,
return true;
}
bool WebRtcVoiceMediaChannel::SetupSharedBweOnChannel(int voe_channel) {
webrtc::ViENetwork* vie_network = NULL;
int vie_channel = -1;
if (options_.combined_audio_video_bwe.GetWithDefaultIfUnset(false) &&
shared_bwe_vie_ != NULL && shared_bwe_vie_channel_ != -1) {
vie_network = webrtc::ViENetwork::GetInterface(shared_bwe_vie_);
vie_channel = shared_bwe_vie_channel_;
}
if (engine()->voe()->rtp()->SetVideoEngineBWETarget(voe_channel, vie_network,
vie_channel) == -1) {
LOG_RTCERR3(SetVideoEngineBWETarget, voe_channel, vie_network, vie_channel);
if (vie_network != NULL) {
// Don't fail if we're tearing down.
return false;
}
}
return true;
}
int WebRtcSoundclipStream::Read(void *buf, int len) {
size_t res = 0;
mem_.Read(buf, len, &res, NULL);

View File

@ -54,6 +54,10 @@
#error "Bogus include."
#endif
namespace webrtc {
class VideoEngine;
}
namespace cricket {
// WebRtcSoundclipStream is an adapter object that allows a memory stream to be
@ -377,6 +381,8 @@ class WebRtcVoiceMediaChannel
int GetReceiveChannelNum(uint32 ssrc);
int GetSendChannelNum(uint32 ssrc);
bool SetupSharedBandwidthEstimation(webrtc::VideoEngine* vie,
int vie_channel);
protected:
int GetLastEngineError() { return engine()->GetLastEngineError(); }
int GetOutputLevel(int channel);
@ -419,6 +425,7 @@ class WebRtcVoiceMediaChannel
bool SetHeaderExtension(ExtensionSetterFunction setter, int channel_id,
const RtpHeaderExtension* extension);
bool SetupSharedBweOnChannel(int voe_channel);
bool SetChannelRecvRtpHeaderExtensions(
int channel_id,
@ -442,6 +449,11 @@ class WebRtcVoiceMediaChannel
bool typing_noise_detected_;
SendFlags desired_send_;
SendFlags send_;
// shared_bwe_vie_ and shared_bwe_vie_channel_ together identifies a WebRTC
// VideoEngine channel that this voice channel should forward incoming packets
// to for Bandwidth Estimation purposes.
webrtc::VideoEngine* shared_bwe_vie_;
int shared_bwe_vie_channel_;
// send_channels_ contains the channels which are being used for sending.
// When the default channel (voe_channel) is used for sending, it is

View File

@ -38,6 +38,7 @@
#include "talk/media/base/fakenetworkinterface.h"
#include "talk/media/base/fakertp.h"
#include "talk/media/webrtc/fakewebrtcvoiceengine.h"
#include "talk/media/webrtc/webrtcvie.h"
#include "talk/media/webrtc/webrtcvoiceengine.h"
#include "talk/p2p/base/fakesession.h"
#include "talk/session/media/channel.h"
@ -3160,3 +3161,113 @@ TEST(WebRtcVoiceEngineTest, CoInitialize) {
CoUninitialize();
}
#endif
TEST_F(WebRtcVoiceEngineTestFake, ChangeCombinedAudioVideoBweOption) {
// Test that changing the combined_audio_video_bwe option results in the
// expected state changes in VoiceEngine.
cricket::ViEWrapper vie;
const int kVieCh = 667;
EXPECT_TRUE(SetupEngine());
cricket::WebRtcVoiceMediaChannel* media_channel =
static_cast<cricket::WebRtcVoiceMediaChannel*>(channel_);
EXPECT_TRUE(media_channel->SetupSharedBandwidthEstimation(vie.engine(),
kVieCh));
EXPECT_TRUE(media_channel->AddRecvStream(
cricket::StreamParams::CreateLegacy(2)));
int recv_ch = voe_.GetLastChannel();
// Combined BWE should not be set up yet.
EXPECT_EQ(NULL, voe_.GetViENetwork(recv_ch));
EXPECT_EQ(-1, voe_.GetVideoChannel(recv_ch));
// Enable combined BWE option - now it should be set up.
cricket::AudioOptions options;
options.combined_audio_video_bwe.Set(true);
EXPECT_TRUE(media_channel->SetOptions(options));
EXPECT_EQ(vie.network(), voe_.GetViENetwork(recv_ch));
EXPECT_EQ(kVieCh, voe_.GetVideoChannel(recv_ch));
// Disable combined BWE option - should be disabled again.
options.combined_audio_video_bwe.Set(false);
EXPECT_TRUE(media_channel->SetOptions(options));
EXPECT_EQ(NULL, voe_.GetViENetwork(recv_ch));
EXPECT_EQ(-1, voe_.GetVideoChannel(recv_ch));
EXPECT_TRUE(media_channel->SetupSharedBandwidthEstimation(NULL, -1));
}
TEST_F(WebRtcVoiceEngineTestFake, SetupSharedBandwidthEstimation) {
// Test that calling SetupSharedBandwidthEstimation() on the voice media
// channel results in the expected state changes in VoiceEngine.
cricket::ViEWrapper vie1;
cricket::ViEWrapper vie2;
const int kVieCh1 = 667;
const int kVieCh2 = 70;
EXPECT_TRUE(SetupEngine());
cricket::WebRtcVoiceMediaChannel* media_channel =
static_cast<cricket::WebRtcVoiceMediaChannel*>(channel_);
cricket::AudioOptions options;
options.combined_audio_video_bwe.Set(true);
EXPECT_TRUE(media_channel->SetOptions(options));
EXPECT_TRUE(media_channel->AddRecvStream(
cricket::StreamParams::CreateLegacy(2)));
int recv_ch = voe_.GetLastChannel();
// Combined BWE should not be set up yet.
EXPECT_EQ(NULL, voe_.GetViENetwork(recv_ch));
EXPECT_EQ(-1, voe_.GetVideoChannel(recv_ch));
// Register - should be enabled.
EXPECT_TRUE(media_channel->SetupSharedBandwidthEstimation(vie1.engine(),
kVieCh1));
EXPECT_EQ(vie1.network(), voe_.GetViENetwork(recv_ch));
EXPECT_EQ(kVieCh1, voe_.GetVideoChannel(recv_ch));
// Re-register - should still be enabled.
EXPECT_TRUE(media_channel->SetupSharedBandwidthEstimation(vie2.engine(),
kVieCh2));
EXPECT_EQ(vie2.network(), voe_.GetViENetwork(recv_ch));
EXPECT_EQ(kVieCh2, voe_.GetVideoChannel(recv_ch));
// Unregister - should be disabled again.
EXPECT_TRUE(media_channel->SetupSharedBandwidthEstimation(NULL, -1));
EXPECT_EQ(NULL, voe_.GetViENetwork(recv_ch));
EXPECT_EQ(-1, voe_.GetVideoChannel(recv_ch));
}
TEST_F(WebRtcVoiceEngineTestFake, ConfigureCombinedBweForNewRecvStreams) {
// Test that adding receive streams after enabling combined bandwidth
// estimation will correctly configure each channel.
cricket::ViEWrapper vie;
const int kVieCh = 667;
EXPECT_TRUE(SetupEngine());
cricket::WebRtcVoiceMediaChannel* media_channel =
static_cast<cricket::WebRtcVoiceMediaChannel*>(channel_);
EXPECT_TRUE(media_channel->SetupSharedBandwidthEstimation(vie.engine(),
kVieCh));
cricket::AudioOptions options;
options.combined_audio_video_bwe.Set(true);
EXPECT_TRUE(media_channel->SetOptions(options));
static const uint32 kSsrcs[] = {1, 2, 3, 4};
int voe_channels[ARRAY_SIZE(kSsrcs)] = {0};
for (unsigned int i = 0; i < ARRAY_SIZE(kSsrcs); ++i) {
EXPECT_TRUE(media_channel->AddRecvStream(
cricket::StreamParams::CreateLegacy(kSsrcs[i])));
int recv_ch = media_channel->GetReceiveChannelNum(kSsrcs[i]);
EXPECT_NE(-1, recv_ch);
voe_channels[i] = recv_ch;
EXPECT_EQ(vie.network(), voe_.GetViENetwork(recv_ch));
EXPECT_EQ(kVieCh, voe_.GetVideoChannel(recv_ch));
}
EXPECT_TRUE(media_channel->SetupSharedBandwidthEstimation(NULL, -1));
for (unsigned int i = 0; i < ARRAY_SIZE(voe_channels); ++i) {
EXPECT_EQ(NULL, voe_.GetViENetwork(voe_channels[i]));
EXPECT_EQ(-1, voe_.GetVideoChannel(voe_channels[i]));
}
}