Implement dual stream full stack test and loopback tool
Bug: webrtc:8588 Change-Id: I0abec4891a723c98001f4580f0cfa57a5d6d6bdb Reviewed-on: https://webrtc-review.googlesource.com/34441 Commit-Queue: Ilya Nikolaevskiy <ilnik@webrtc.org> Reviewed-by: Stefan Holmer <stefan@webrtc.org> Reviewed-by: Erik Språng <sprang@webrtc.org> Cr-Commit-Position: refs/heads/master@{#21416}
This commit is contained in:

committed by
Commit Bot

parent
c3216e1b1d
commit
255d1cd3b4
1
BUILD.gn
1
BUILD.gn
@ -59,6 +59,7 @@ if (!build_with_chromium) {
|
||||
"system_wrappers:system_wrappers_unittests",
|
||||
"test",
|
||||
"video:screenshare_loopback",
|
||||
"video:sv_loopback",
|
||||
"video:video_loopback",
|
||||
"voice_engine:voice_engine_unittests",
|
||||
]
|
||||
|
@ -192,33 +192,34 @@ void CallTest::DestroyCalls() {
|
||||
receiver_call_.reset();
|
||||
}
|
||||
|
||||
void CallTest::CreateSendConfig(size_t num_video_streams,
|
||||
size_t num_audio_streams,
|
||||
size_t num_flexfec_streams,
|
||||
Transport* send_transport) {
|
||||
RTC_DCHECK(num_video_streams <= kNumSsrcs);
|
||||
void CallTest::CreateVideoSendConfig(VideoSendStream::Config* video_config,
|
||||
size_t num_video_streams,
|
||||
size_t num_used_ssrcs,
|
||||
Transport* send_transport) {
|
||||
RTC_DCHECK_LE(num_video_streams + num_used_ssrcs, kNumSsrcs);
|
||||
*video_config = VideoSendStream::Config(send_transport);
|
||||
video_config->encoder_settings.encoder = &fake_encoder_;
|
||||
video_config->encoder_settings.payload_name = "FAKE";
|
||||
video_config->encoder_settings.payload_type = kFakeVideoSendPayloadType;
|
||||
video_config->rtp.extensions.push_back(
|
||||
RtpExtension(RtpExtension::kTransportSequenceNumberUri,
|
||||
kTransportSequenceNumberExtensionId));
|
||||
video_config->rtp.extensions.push_back(RtpExtension(
|
||||
RtpExtension::kVideoContentTypeUri, kVideoContentTypeExtensionId));
|
||||
FillEncoderConfiguration(num_video_streams, &video_encoder_config_);
|
||||
|
||||
for (size_t i = 0; i < num_video_streams; ++i)
|
||||
video_config->rtp.ssrcs.push_back(kVideoSendSsrcs[num_used_ssrcs + i]);
|
||||
video_config->rtp.extensions.push_back(RtpExtension(
|
||||
RtpExtension::kVideoRotationUri, kVideoRotationRtpExtensionId));
|
||||
}
|
||||
|
||||
void CallTest::CreateAudioAndFecSendConfigs(size_t num_audio_streams,
|
||||
size_t num_flexfec_streams,
|
||||
Transport* send_transport) {
|
||||
RTC_DCHECK_LE(num_audio_streams, 1);
|
||||
RTC_DCHECK_LE(num_flexfec_streams, 1);
|
||||
RTC_DCHECK(num_audio_streams == 0 || voe_send_.channel_id >= 0);
|
||||
if (num_video_streams > 0) {
|
||||
video_send_config_ = VideoSendStream::Config(send_transport);
|
||||
video_send_config_.encoder_settings.encoder = &fake_encoder_;
|
||||
video_send_config_.encoder_settings.payload_name = "FAKE";
|
||||
video_send_config_.encoder_settings.payload_type =
|
||||
kFakeVideoSendPayloadType;
|
||||
video_send_config_.rtp.extensions.push_back(
|
||||
RtpExtension(RtpExtension::kTransportSequenceNumberUri,
|
||||
kTransportSequenceNumberExtensionId));
|
||||
video_send_config_.rtp.extensions.push_back(RtpExtension(
|
||||
RtpExtension::kVideoContentTypeUri, kVideoContentTypeExtensionId));
|
||||
FillEncoderConfiguration(num_video_streams, &video_encoder_config_);
|
||||
|
||||
for (size_t i = 0; i < num_video_streams; ++i)
|
||||
video_send_config_.rtp.ssrcs.push_back(kVideoSendSsrcs[i]);
|
||||
video_send_config_.rtp.extensions.push_back(RtpExtension(
|
||||
RtpExtension::kVideoRotationUri, kVideoRotationRtpExtensionId));
|
||||
}
|
||||
|
||||
if (num_audio_streams > 0) {
|
||||
audio_send_config_ = AudioSendStream::Config(send_transport);
|
||||
audio_send_config_.voe_channel_id = voe_send_.channel_id;
|
||||
@ -237,32 +238,47 @@ void CallTest::CreateSendConfig(size_t num_video_streams,
|
||||
}
|
||||
}
|
||||
|
||||
void CallTest::CreateMatchingReceiveConfigs(Transport* rtcp_send_transport) {
|
||||
video_receive_configs_.clear();
|
||||
allocated_decoders_.clear();
|
||||
if (num_video_streams_ > 0) {
|
||||
RTC_DCHECK(!video_send_config_.rtp.ssrcs.empty());
|
||||
VideoReceiveStream::Config video_config(rtcp_send_transport);
|
||||
video_config.rtp.remb = false;
|
||||
video_config.rtp.transport_cc = true;
|
||||
video_config.rtp.local_ssrc = kReceiverLocalVideoSsrc;
|
||||
for (const RtpExtension& extension : video_send_config_.rtp.extensions)
|
||||
video_config.rtp.extensions.push_back(extension);
|
||||
video_config.renderer = &fake_renderer_;
|
||||
for (size_t i = 0; i < video_send_config_.rtp.ssrcs.size(); ++i) {
|
||||
VideoReceiveStream::Decoder decoder =
|
||||
test::CreateMatchingDecoder(video_send_config_.encoder_settings);
|
||||
allocated_decoders_.push_back(
|
||||
std::unique_ptr<VideoDecoder>(decoder.decoder));
|
||||
video_config.decoders.clear();
|
||||
video_config.decoders.push_back(decoder);
|
||||
video_config.rtp.remote_ssrc = video_send_config_.rtp.ssrcs[i];
|
||||
video_receive_configs_.push_back(video_config.Copy());
|
||||
}
|
||||
video_receive_configs_[0].rtp.protected_by_flexfec =
|
||||
(num_flexfec_streams_ == 1);
|
||||
void CallTest::CreateSendConfig(size_t num_video_streams,
|
||||
size_t num_audio_streams,
|
||||
size_t num_flexfec_streams,
|
||||
Transport* send_transport) {
|
||||
if (num_video_streams > 0) {
|
||||
CreateVideoSendConfig(&video_send_config_, num_video_streams, 0,
|
||||
send_transport);
|
||||
}
|
||||
CreateAudioAndFecSendConfigs(num_audio_streams, num_flexfec_streams,
|
||||
send_transport);
|
||||
}
|
||||
|
||||
std::vector<VideoReceiveStream::Config>
|
||||
CallTest::CreateMatchingVideoReceiveConfigs(
|
||||
const VideoSendStream::Config& video_send_config,
|
||||
Transport* rtcp_send_transport) {
|
||||
std::vector<VideoReceiveStream::Config> result;
|
||||
RTC_DCHECK(!video_send_config.rtp.ssrcs.empty());
|
||||
VideoReceiveStream::Config video_config(rtcp_send_transport);
|
||||
video_config.rtp.remb = false;
|
||||
video_config.rtp.transport_cc = true;
|
||||
video_config.rtp.local_ssrc = kReceiverLocalVideoSsrc;
|
||||
for (const RtpExtension& extension : video_send_config.rtp.extensions)
|
||||
video_config.rtp.extensions.push_back(extension);
|
||||
video_config.renderer = &fake_renderer_;
|
||||
for (size_t i = 0; i < video_send_config.rtp.ssrcs.size(); ++i) {
|
||||
VideoReceiveStream::Decoder decoder =
|
||||
test::CreateMatchingDecoder(video_send_config.encoder_settings);
|
||||
allocated_decoders_.push_back(
|
||||
std::unique_ptr<VideoDecoder>(decoder.decoder));
|
||||
video_config.decoders.clear();
|
||||
video_config.decoders.push_back(decoder);
|
||||
video_config.rtp.remote_ssrc = video_send_config.rtp.ssrcs[i];
|
||||
result.push_back(video_config.Copy());
|
||||
}
|
||||
result[0].rtp.protected_by_flexfec = (num_flexfec_streams_ == 1);
|
||||
return result;
|
||||
}
|
||||
|
||||
void CallTest::CreateMatchingAudioAndFecConfigs(
|
||||
Transport* rtcp_send_transport) {
|
||||
RTC_DCHECK_GE(1, num_audio_streams_);
|
||||
if (num_audio_streams_ == 1) {
|
||||
RTC_DCHECK_LE(0, voe_send_.channel_id);
|
||||
@ -290,6 +306,20 @@ void CallTest::CreateMatchingReceiveConfigs(Transport* rtcp_send_transport) {
|
||||
}
|
||||
}
|
||||
|
||||
void CallTest::CreateMatchingReceiveConfigs(Transport* rtcp_send_transport) {
|
||||
video_receive_configs_.clear();
|
||||
allocated_decoders_.clear();
|
||||
if (num_video_streams_ > 0) {
|
||||
std::vector<VideoReceiveStream::Config> new_configs =
|
||||
CreateMatchingVideoReceiveConfigs(video_send_config_,
|
||||
rtcp_send_transport);
|
||||
for (VideoReceiveStream::Config& config : new_configs) {
|
||||
video_receive_configs_.push_back(config.Copy());
|
||||
}
|
||||
}
|
||||
CreateMatchingAudioAndFecConfigs(rtcp_send_transport);
|
||||
}
|
||||
|
||||
void CallTest::CreateFrameGeneratorCapturerWithDrift(Clock* clock,
|
||||
float speed,
|
||||
int framerate,
|
||||
@ -324,8 +354,6 @@ void CallTest::CreateFakeAudioDevices(
|
||||
void CallTest::CreateVideoStreams() {
|
||||
RTC_DCHECK(video_send_stream_ == nullptr);
|
||||
RTC_DCHECK(video_receive_streams_.empty());
|
||||
RTC_DCHECK(audio_send_stream_ == nullptr);
|
||||
RTC_DCHECK(audio_receive_streams_.empty());
|
||||
|
||||
video_send_stream_ = sender_call_->CreateVideoSendStream(
|
||||
video_send_config_.Copy(), video_encoder_config_.Copy());
|
||||
@ -338,6 +366,8 @@ void CallTest::CreateVideoStreams() {
|
||||
}
|
||||
|
||||
void CallTest::CreateAudioStreams() {
|
||||
RTC_DCHECK(audio_send_stream_ == nullptr);
|
||||
RTC_DCHECK(audio_receive_streams_.empty());
|
||||
audio_send_stream_ = sender_call_->CreateAudioSendStream(audio_send_config_);
|
||||
for (size_t i = 0; i < audio_receive_configs_.size(); ++i) {
|
||||
audio_receive_streams_.push_back(
|
||||
@ -464,6 +494,7 @@ void CallTest::DestroyVoiceEngines() {
|
||||
voe_recv_.voice_engine = nullptr;
|
||||
}
|
||||
|
||||
constexpr size_t CallTest::kNumSsrcs;
|
||||
const int CallTest::kDefaultWidth;
|
||||
const int CallTest::kDefaultHeight;
|
||||
const int CallTest::kDefaultFramerate;
|
||||
@ -480,10 +511,10 @@ const uint8_t CallTest::kAudioSendPayloadType = 103;
|
||||
const uint8_t CallTest::kPayloadTypeH264 = 122;
|
||||
const uint8_t CallTest::kPayloadTypeVP8 = 123;
|
||||
const uint8_t CallTest::kPayloadTypeVP9 = 124;
|
||||
const uint32_t CallTest::kSendRtxSsrcs[kNumSsrcs] = {0xBADCAFD, 0xBADCAFE,
|
||||
0xBADCAFF};
|
||||
const uint32_t CallTest::kVideoSendSsrcs[kNumSsrcs] = {0xC0FFED, 0xC0FFEE,
|
||||
0xC0FFEF};
|
||||
const uint32_t CallTest::kSendRtxSsrcs[kNumSsrcs] = {
|
||||
0xBADCAFD, 0xBADCAFE, 0xBADCAFF, 0xBADCB00, 0xBADCB01, 0xBADCB02};
|
||||
const uint32_t CallTest::kVideoSendSsrcs[kNumSsrcs] = {
|
||||
0xC0FFED, 0xC0FFEE, 0xC0FFEF, 0xC0FFF0, 0xC0FFF1, 0xC0FFF2};
|
||||
const uint32_t CallTest::kAudioSendSsrc = 0xDEADBEEF;
|
||||
const uint32_t CallTest::kFlexfecSendSsrc = 0xBADBEEF;
|
||||
const uint32_t CallTest::kReceiverLocalVideoSsrc = 0x123456;
|
||||
|
@ -38,7 +38,8 @@ class CallTest : public ::testing::Test {
|
||||
CallTest();
|
||||
virtual ~CallTest();
|
||||
|
||||
static const size_t kNumSsrcs = 3;
|
||||
static constexpr size_t kNumSsrcs = 6;
|
||||
static const int kNumSimulcastStreams = 3;
|
||||
static const int kDefaultWidth = 320;
|
||||
static const int kDefaultHeight = 180;
|
||||
static const int kDefaultFramerate = 30;
|
||||
@ -77,11 +78,22 @@ class CallTest : public ::testing::Test {
|
||||
void CreateReceiverCall(const Call::Config& config);
|
||||
void DestroyCalls();
|
||||
|
||||
void CreateVideoSendConfig(VideoSendStream::Config* video_config,
|
||||
size_t num_video_streams,
|
||||
size_t num_used_ssrcs,
|
||||
Transport* send_transport);
|
||||
void CreateAudioAndFecSendConfigs(size_t num_audio_streams,
|
||||
size_t num_flexfec_streams,
|
||||
Transport* send_transport);
|
||||
void CreateSendConfig(size_t num_video_streams,
|
||||
size_t num_audio_streams,
|
||||
size_t num_flexfec_streams,
|
||||
Transport* send_transport);
|
||||
|
||||
std::vector<VideoReceiveStream::Config> CreateMatchingVideoReceiveConfigs(
|
||||
const VideoSendStream::Config& video_send_config,
|
||||
Transport* rtcp_send_transport);
|
||||
void CreateMatchingAudioAndFecConfigs(Transport* rtcp_send_transport);
|
||||
void CreateMatchingReceiveConfigs(Transport* rtcp_send_transport);
|
||||
|
||||
void CreateFrameGeneratorCapturerWithDrift(Clock* drift_clock,
|
||||
|
@ -20,6 +20,45 @@
|
||||
namespace webrtc {
|
||||
namespace test {
|
||||
|
||||
LayerFilteringTransport::LayerFilteringTransport(
|
||||
SingleThreadedTaskQueueForTesting* task_queue,
|
||||
const FakeNetworkPipe::Config& config,
|
||||
Call* send_call,
|
||||
uint8_t vp8_video_payload_type,
|
||||
uint8_t vp9_video_payload_type,
|
||||
int selected_tl,
|
||||
int selected_sl,
|
||||
const std::map<uint8_t, MediaType>& payload_type_map,
|
||||
uint32_t ssrc_to_filter_min,
|
||||
uint32_t ssrc_to_filter_max)
|
||||
: DirectTransport(task_queue, config, send_call, payload_type_map),
|
||||
vp8_video_payload_type_(vp8_video_payload_type),
|
||||
vp9_video_payload_type_(vp9_video_payload_type),
|
||||
selected_tl_(selected_tl),
|
||||
selected_sl_(selected_sl),
|
||||
discarded_last_packet_(false),
|
||||
ssrc_to_filter_min_(ssrc_to_filter_min),
|
||||
ssrc_to_filter_max_(ssrc_to_filter_max) {}
|
||||
|
||||
LayerFilteringTransport::LayerFilteringTransport(
|
||||
SingleThreadedTaskQueueForTesting* task_queue,
|
||||
std::unique_ptr<FakeNetworkPipe> pipe,
|
||||
Call* send_call,
|
||||
uint8_t vp8_video_payload_type,
|
||||
uint8_t vp9_video_payload_type,
|
||||
int selected_tl,
|
||||
int selected_sl,
|
||||
uint32_t ssrc_to_filter_min,
|
||||
uint32_t ssrc_to_filter_max)
|
||||
: DirectTransport(task_queue, std::move(pipe), send_call),
|
||||
vp8_video_payload_type_(vp8_video_payload_type),
|
||||
vp9_video_payload_type_(vp9_video_payload_type),
|
||||
selected_tl_(selected_tl),
|
||||
selected_sl_(selected_sl),
|
||||
discarded_last_packet_(false),
|
||||
ssrc_to_filter_min_(ssrc_to_filter_min),
|
||||
ssrc_to_filter_max_(ssrc_to_filter_max) {}
|
||||
|
||||
LayerFilteringTransport::LayerFilteringTransport(
|
||||
SingleThreadedTaskQueueForTesting* task_queue,
|
||||
const FakeNetworkPipe::Config& config,
|
||||
@ -34,7 +73,9 @@ LayerFilteringTransport::LayerFilteringTransport(
|
||||
vp9_video_payload_type_(vp9_video_payload_type),
|
||||
selected_tl_(selected_tl),
|
||||
selected_sl_(selected_sl),
|
||||
discarded_last_packet_(false) {}
|
||||
discarded_last_packet_(false),
|
||||
ssrc_to_filter_min_(0),
|
||||
ssrc_to_filter_max_(0xFFFFFFFF) {}
|
||||
|
||||
LayerFilteringTransport::LayerFilteringTransport(
|
||||
SingleThreadedTaskQueueForTesting* task_queue,
|
||||
@ -49,7 +90,9 @@ LayerFilteringTransport::LayerFilteringTransport(
|
||||
vp9_video_payload_type_(vp9_video_payload_type),
|
||||
selected_tl_(selected_tl),
|
||||
selected_sl_(selected_sl),
|
||||
discarded_last_packet_(false) {}
|
||||
discarded_last_packet_(false),
|
||||
ssrc_to_filter_min_(0),
|
||||
ssrc_to_filter_max_(0xFFFFFFFF) {}
|
||||
|
||||
bool LayerFilteringTransport::DiscardedLastPacket() const {
|
||||
return discarded_last_packet_;
|
||||
@ -68,6 +111,11 @@ bool LayerFilteringTransport::SendRtp(const uint8_t* packet,
|
||||
RTPHeader header;
|
||||
parser.Parse(&header);
|
||||
|
||||
if (header.ssrc < ssrc_to_filter_min_ || header.ssrc > ssrc_to_filter_max_) {
|
||||
// Nothing to change, forward the packet immediately.
|
||||
return test::DirectTransport::SendRtp(packet, length, options);
|
||||
}
|
||||
|
||||
RTC_DCHECK_LE(length, IP_PACKET_SIZE);
|
||||
uint8_t temp_buffer[IP_PACKET_SIZE];
|
||||
memcpy(temp_buffer, packet, length);
|
||||
|
@ -23,6 +23,16 @@ namespace test {
|
||||
|
||||
class LayerFilteringTransport : public test::DirectTransport {
|
||||
public:
|
||||
LayerFilteringTransport(SingleThreadedTaskQueueForTesting* task_queue,
|
||||
const FakeNetworkPipe::Config& config,
|
||||
Call* send_call,
|
||||
uint8_t vp8_video_payload_type,
|
||||
uint8_t vp9_video_payload_type,
|
||||
int selected_tl,
|
||||
int selected_sl,
|
||||
const std::map<uint8_t, MediaType>& payload_type_map,
|
||||
uint32_t ssrc_to_filter_min,
|
||||
uint32_t ssrc_to_filter_max);
|
||||
LayerFilteringTransport(SingleThreadedTaskQueueForTesting* task_queue,
|
||||
const FakeNetworkPipe::Config& config,
|
||||
Call* send_call,
|
||||
@ -31,6 +41,15 @@ class LayerFilteringTransport : public test::DirectTransport {
|
||||
int selected_tl,
|
||||
int selected_sl,
|
||||
const std::map<uint8_t, MediaType>& payload_type_map);
|
||||
LayerFilteringTransport(SingleThreadedTaskQueueForTesting* task_queue,
|
||||
std::unique_ptr<FakeNetworkPipe> pipe,
|
||||
Call* send_call,
|
||||
uint8_t vp8_video_payload_type,
|
||||
uint8_t vp9_video_payload_type,
|
||||
int selected_tl,
|
||||
int selected_sl,
|
||||
uint32_t ssrc_to_filter_min,
|
||||
uint32_t ssrc_to_filter_max);
|
||||
LayerFilteringTransport(SingleThreadedTaskQueueForTesting* task_queue,
|
||||
std::unique_ptr<FakeNetworkPipe> pipe,
|
||||
Call* send_call,
|
||||
@ -52,6 +71,8 @@ class LayerFilteringTransport : public test::DirectTransport {
|
||||
const int selected_tl_;
|
||||
const int selected_sl_;
|
||||
bool discarded_last_packet_;
|
||||
const uint32_t ssrc_to_filter_min_;
|
||||
const uint32_t ssrc_to_filter_max_;
|
||||
};
|
||||
|
||||
} // namespace test
|
||||
|
@ -211,6 +211,30 @@ if (rtc_include_tests) {
|
||||
}
|
||||
}
|
||||
|
||||
rtc_executable("sv_loopback") {
|
||||
testonly = true
|
||||
sources = [
|
||||
"sv_loopback.cc",
|
||||
]
|
||||
deps = [
|
||||
":video_quality_test",
|
||||
"../rtc_base:rtc_base_approved",
|
||||
"../system_wrappers:metrics_default",
|
||||
"../test:field_trial",
|
||||
"../test:run_test",
|
||||
"../test:run_test_interface",
|
||||
"../test:test_common",
|
||||
"../test:test_renderer",
|
||||
"../test:test_support",
|
||||
"//testing/gmock",
|
||||
"//testing/gtest",
|
||||
]
|
||||
if (!build_with_chromium && is_clang) {
|
||||
# Suppress warnings from the Chromium Clang plugin (bugs.webrtc.org/163).
|
||||
suppressed_configs += [ "//build/config/clang:find_bad_constructs" ]
|
||||
}
|
||||
}
|
||||
|
||||
rtc_executable("video_replay") {
|
||||
testonly = true
|
||||
sources = [
|
||||
|
@ -3413,7 +3413,8 @@ TEST_P(EndToEndTest, GetStats) {
|
||||
RTC_DCHECK(send_stream_);
|
||||
VideoSendStream::Stats stats = send_stream_->GetStats();
|
||||
|
||||
size_t expected_num_streams = kNumSsrcs + expected_send_ssrcs_.size();
|
||||
size_t expected_num_streams =
|
||||
kNumSimulcastStreams + expected_send_ssrcs_.size();
|
||||
send_stats_filled_["NumStreams"] |=
|
||||
stats.substreams.size() == expected_num_streams;
|
||||
|
||||
@ -3563,7 +3564,7 @@ TEST_P(EndToEndTest, GetStats) {
|
||||
kFakeVideoSendPayloadType;
|
||||
}
|
||||
|
||||
for (size_t i = 0; i < kNumSsrcs; ++i)
|
||||
for (size_t i = 0; i < kNumSimulcastStreams; ++i)
|
||||
send_config->rtp.rtx.ssrcs.push_back(kSendRtxSsrcs[i]);
|
||||
|
||||
// Use a delayed encoder to make sure we see CpuOveruseMetrics stats that
|
||||
@ -3571,7 +3572,7 @@ TEST_P(EndToEndTest, GetStats) {
|
||||
send_config->encoder_settings.encoder = &encoder_;
|
||||
}
|
||||
|
||||
size_t GetNumVideoStreams() const override { return kNumSsrcs; }
|
||||
size_t GetNumVideoStreams() const override { return kNumSimulcastStreams; }
|
||||
|
||||
void OnVideoStreamsCreated(
|
||||
VideoSendStream* send_stream,
|
||||
@ -3910,21 +3911,22 @@ TEST_P(EndToEndTest, SendsSetSsrc) {
|
||||
}
|
||||
|
||||
TEST_P(EndToEndTest, SendsSetSimulcastSsrcs) {
|
||||
TestSendsSetSsrcs(kNumSsrcs, false);
|
||||
TestSendsSetSsrcs(kNumSimulcastStreams, false);
|
||||
}
|
||||
|
||||
TEST_P(EndToEndTest, CanSwitchToUseAllSsrcs) {
|
||||
TestSendsSetSsrcs(kNumSsrcs, true);
|
||||
TestSendsSetSsrcs(kNumSimulcastStreams, true);
|
||||
}
|
||||
|
||||
TEST_P(EndToEndTest, DISABLED_RedundantPayloadsTransmittedOnAllSsrcs) {
|
||||
class ObserveRedundantPayloads: public test::EndToEndTest {
|
||||
public:
|
||||
ObserveRedundantPayloads()
|
||||
: EndToEndTest(kDefaultTimeoutMs), ssrcs_to_observe_(kNumSsrcs) {
|
||||
for (size_t i = 0; i < kNumSsrcs; ++i) {
|
||||
registered_rtx_ssrc_[kSendRtxSsrcs[i]] = true;
|
||||
}
|
||||
: EndToEndTest(kDefaultTimeoutMs),
|
||||
ssrcs_to_observe_(kNumSimulcastStreams) {
|
||||
for (size_t i = 0; i < kNumSimulcastStreams; ++i) {
|
||||
registered_rtx_ssrc_[kSendRtxSsrcs[i]] = true;
|
||||
}
|
||||
}
|
||||
|
||||
private:
|
||||
@ -3951,7 +3953,7 @@ TEST_P(EndToEndTest, DISABLED_RedundantPayloadsTransmittedOnAllSsrcs) {
|
||||
return SEND_PACKET;
|
||||
}
|
||||
|
||||
size_t GetNumVideoStreams() const override { return kNumSsrcs; }
|
||||
size_t GetNumVideoStreams() const override { return kNumSimulcastStreams; }
|
||||
|
||||
// This test use other VideoStream settings than the the default settings
|
||||
// implemented in DefaultVideoStreamFactory. Therefore this test implement
|
||||
@ -3988,7 +3990,7 @@ TEST_P(EndToEndTest, DISABLED_RedundantPayloadsTransmittedOnAllSsrcs) {
|
||||
new rtc::RefCountedObject<VideoStreamFactory>();
|
||||
send_config->rtp.rtx.payload_type = kSendRtxPayloadType;
|
||||
|
||||
for (size_t i = 0; i < kNumSsrcs; ++i)
|
||||
for (size_t i = 0; i < kNumSimulcastStreams; ++i)
|
||||
send_config->rtp.rtx.ssrcs.push_back(kSendRtxSsrcs[i]);
|
||||
|
||||
// Significantly higher than max bitrates for all video streams -> forcing
|
||||
@ -4053,8 +4055,8 @@ void EndToEndTest::TestRtpStatePreservation(bool use_rtx,
|
||||
public:
|
||||
explicit RtpSequenceObserver(bool use_rtx)
|
||||
: test::RtpRtcpObserver(kDefaultTimeoutMs),
|
||||
ssrcs_to_observe_(kNumSsrcs) {
|
||||
for (size_t i = 0; i < kNumSsrcs; ++i) {
|
||||
ssrcs_to_observe_(kNumSimulcastStreams) {
|
||||
for (size_t i = 0; i < kNumSimulcastStreams; ++i) {
|
||||
ssrc_is_rtx_[kVideoSendSsrcs[i]] = false;
|
||||
if (use_rtx)
|
||||
ssrc_is_rtx_[kSendRtxSsrcs[i]] = true;
|
||||
@ -4184,10 +4186,10 @@ void EndToEndTest::TestRtpStatePreservation(bool use_rtx,
|
||||
send_transport->SetReceiver(receiver_call_->Receiver());
|
||||
receive_transport->SetReceiver(sender_call_->Receiver());
|
||||
|
||||
CreateSendConfig(kNumSsrcs, 0, 0, send_transport.get());
|
||||
CreateSendConfig(kNumSimulcastStreams, 0, 0, send_transport.get());
|
||||
|
||||
if (use_rtx) {
|
||||
for (size_t i = 0; i < kNumSsrcs; ++i) {
|
||||
for (size_t i = 0; i < kNumSimulcastStreams; ++i) {
|
||||
video_send_config_.rtp.rtx.ssrcs.push_back(kSendRtxSsrcs[i]);
|
||||
}
|
||||
video_send_config_.rtp.rtx.payload_type = kSendRtxPayloadType;
|
||||
@ -4242,7 +4244,7 @@ void EndToEndTest::TestRtpStatePreservation(bool use_rtx,
|
||||
task_queue_.SendTask([this]() {
|
||||
video_send_stream_->ReconfigureVideoEncoder(video_encoder_config_.Copy());
|
||||
});
|
||||
observer.ResetExpectedSsrcs(kNumSsrcs);
|
||||
observer.ResetExpectedSsrcs(kNumSimulcastStreams);
|
||||
EXPECT_TRUE(observer.Wait())
|
||||
<< "Timed out waiting for all SSRCs to send packets.";
|
||||
|
||||
@ -4257,7 +4259,7 @@ void EndToEndTest::TestRtpStatePreservation(bool use_rtx,
|
||||
task_queue_.SendTask([this]() {
|
||||
video_send_stream_->ReconfigureVideoEncoder(video_encoder_config_.Copy());
|
||||
});
|
||||
observer.ResetExpectedSsrcs(kNumSsrcs);
|
||||
observer.ResetExpectedSsrcs(kNumSimulcastStreams);
|
||||
EXPECT_TRUE(observer.Wait())
|
||||
<< "Timed out waiting for all SSRCs to send packets.";
|
||||
}
|
||||
|
@ -32,6 +32,10 @@ class FullStackTest : public VideoQualityTest {
|
||||
const std::string kAlrProbingExperiment =
|
||||
std::string(AlrDetector::kScreenshareProbingBweExperimentName) +
|
||||
"/1.1,2875,85,20,-20,0/";
|
||||
const std::string kRoundRobinPacingQueueExperiment =
|
||||
"WebRTC-RoundRobinPacing/Enabled/";
|
||||
const std::string kPacerPushBackExperiment =
|
||||
"WebRTC-PacerPushbackExperiment/Enabled/";
|
||||
};
|
||||
|
||||
// VideoQualityTest::Params params = {
|
||||
@ -49,8 +53,9 @@ TEST_F(FullStackTest, ForemanCifWithoutPacketLossVp9) {
|
||||
// TODO(pbos): Decide on psnr/ssim thresholds for foreman_cif.
|
||||
VideoQualityTest::Params foreman_cif;
|
||||
foreman_cif.call.send_side_bwe = true;
|
||||
foreman_cif.video = {true, 352, 288, 30, 700000, 700000, 700000, false,
|
||||
"VP9", 1, 0, 0, false, false, "foreman_cif"};
|
||||
foreman_cif.video[0] = {true, 352, 288, 30, 700000,
|
||||
700000, 700000, false, "VP9", 1,
|
||||
0, 0, false, false, "foreman_cif"};
|
||||
foreman_cif.analyzer = {"foreman_cif_net_delay_0_0_plr_0_VP9", 0.0, 0.0,
|
||||
kFullStackTestDurationSecs};
|
||||
RunTest(foreman_cif);
|
||||
@ -59,8 +64,9 @@ TEST_F(FullStackTest, ForemanCifWithoutPacketLossVp9) {
|
||||
TEST_F(FullStackTest, ForemanCifPlr5Vp9) {
|
||||
VideoQualityTest::Params foreman_cif;
|
||||
foreman_cif.call.send_side_bwe = true;
|
||||
foreman_cif.video = {true, 352, 288, 30, 30000, 500000, 2000000, false,
|
||||
"VP9", 1, 0, 0, false, false, "foreman_cif"};
|
||||
foreman_cif.video[0] = {true, 352, 288, 30, 30000,
|
||||
500000, 2000000, false, "VP9", 1,
|
||||
0, 0, false, false, "foreman_cif"};
|
||||
foreman_cif.analyzer = {"foreman_cif_delay_50_0_plr_5_VP9", 0.0, 0.0,
|
||||
kFullStackTestDurationSecs};
|
||||
foreman_cif.pipe.loss_percent = 5;
|
||||
@ -72,8 +78,9 @@ TEST_F(FullStackTest, ForemanCifPlr5Vp9) {
|
||||
TEST_F(FullStackTest, ParisQcifWithoutPacketLoss) {
|
||||
VideoQualityTest::Params paris_qcif;
|
||||
paris_qcif.call.send_side_bwe = true;
|
||||
paris_qcif.video = {true, 176, 144, 30, 300000, 300000, 300000, false,
|
||||
"VP8", 1, 0, 0, false, false, "paris_qcif"};
|
||||
paris_qcif.video[0] = {true, 176, 144, 30, 300000,
|
||||
300000, 300000, false, "VP8", 1,
|
||||
0, 0, false, false, "paris_qcif"};
|
||||
paris_qcif.analyzer = {"net_delay_0_0_plr_0", 36.0, 0.96,
|
||||
kFullStackTestDurationSecs};
|
||||
RunTest(paris_qcif);
|
||||
@ -83,8 +90,9 @@ TEST_F(FullStackTest, ForemanCifWithoutPacketLoss) {
|
||||
// TODO(pbos): Decide on psnr/ssim thresholds for foreman_cif.
|
||||
VideoQualityTest::Params foreman_cif;
|
||||
foreman_cif.call.send_side_bwe = true;
|
||||
foreman_cif.video = {true, 352, 288, 30, 700000, 700000, 700000, false,
|
||||
"VP8", 1, 0, 0, false, false, "foreman_cif"};
|
||||
foreman_cif.video[0] = {true, 352, 288, 30, 700000,
|
||||
700000, 700000, false, "VP8", 1,
|
||||
0, 0, false, false, "foreman_cif"};
|
||||
foreman_cif.analyzer = {"foreman_cif_net_delay_0_0_plr_0", 0.0, 0.0,
|
||||
kFullStackTestDurationSecs};
|
||||
RunTest(foreman_cif);
|
||||
@ -93,8 +101,9 @@ TEST_F(FullStackTest, ForemanCifWithoutPacketLoss) {
|
||||
TEST_F(FullStackTest, ForemanCif30kbpsWithoutPacketLoss) {
|
||||
VideoQualityTest::Params foreman_cif;
|
||||
foreman_cif.call.send_side_bwe = true;
|
||||
foreman_cif.video = {true, 352, 288, 10, 30000, 30000, 30000, false,
|
||||
"VP8", 1, 0, 0, false, false, "foreman_cif"};
|
||||
foreman_cif.video[0] = {true, 352, 288, 10, 30000,
|
||||
30000, 30000, false, "VP8", 1,
|
||||
0, 0, false, false, "foreman_cif"};
|
||||
foreman_cif.analyzer = {"foreman_cif_30kbps_net_delay_0_0_plr_0", 0.0, 0.0,
|
||||
kFullStackTestDurationSecs};
|
||||
RunTest(foreman_cif);
|
||||
@ -103,8 +112,9 @@ TEST_F(FullStackTest, ForemanCif30kbpsWithoutPacketLoss) {
|
||||
TEST_F(FullStackTest, ForemanCifPlr5) {
|
||||
VideoQualityTest::Params foreman_cif;
|
||||
foreman_cif.call.send_side_bwe = true;
|
||||
foreman_cif.video = {true, 352, 288, 30, 30000, 500000, 2000000, false,
|
||||
"VP8", 1, 0, 0, false, false, "foreman_cif"};
|
||||
foreman_cif.video[0] = {true, 352, 288, 30, 30000,
|
||||
500000, 2000000, false, "VP8", 1,
|
||||
0, 0, false, false, "foreman_cif"};
|
||||
foreman_cif.analyzer = {"foreman_cif_delay_50_0_plr_5", 0.0, 0.0,
|
||||
kFullStackTestDurationSecs};
|
||||
foreman_cif.pipe.loss_percent = 5;
|
||||
@ -115,8 +125,9 @@ TEST_F(FullStackTest, ForemanCifPlr5) {
|
||||
TEST_F(FullStackTest, ForemanCifPlr5Ulpfec) {
|
||||
VideoQualityTest::Params foreman_cif;
|
||||
foreman_cif.call.send_side_bwe = true;
|
||||
foreman_cif.video = {true, 352, 288, 30, 30000, 500000, 2000000, false,
|
||||
"VP8", 1, 0, 0, true, false, "foreman_cif"};
|
||||
foreman_cif.video[0] = {true, 352, 288, 30, 30000,
|
||||
500000, 2000000, false, "VP8", 1,
|
||||
0, 0, true, false, "foreman_cif"};
|
||||
foreman_cif.analyzer = {"foreman_cif_delay_50_0_plr_5_ulpfec", 0.0, 0.0,
|
||||
kFullStackTestDurationSecs};
|
||||
foreman_cif.pipe.loss_percent = 5;
|
||||
@ -127,8 +138,9 @@ TEST_F(FullStackTest, ForemanCifPlr5Ulpfec) {
|
||||
TEST_F(FullStackTest, ForemanCifPlr5Flexfec) {
|
||||
VideoQualityTest::Params foreman_cif;
|
||||
foreman_cif.call.send_side_bwe = true;
|
||||
foreman_cif.video = {true, 352, 288, 30, 30000, 500000, 2000000, false,
|
||||
"VP8", 1, 0, 0, false, true, "foreman_cif"};
|
||||
foreman_cif.video[0] = {true, 352, 288, 30, 30000,
|
||||
500000, 2000000, false, "VP8", 1,
|
||||
0, 0, false, true, "foreman_cif"};
|
||||
foreman_cif.analyzer = {"foreman_cif_delay_50_0_plr_5_flexfec", 0.0, 0.0,
|
||||
kFullStackTestDurationSecs};
|
||||
foreman_cif.pipe.loss_percent = 5;
|
||||
@ -139,8 +151,9 @@ TEST_F(FullStackTest, ForemanCifPlr5Flexfec) {
|
||||
TEST_F(FullStackTest, ForemanCif500kbpsPlr3Flexfec) {
|
||||
VideoQualityTest::Params foreman_cif;
|
||||
foreman_cif.call.send_side_bwe = true;
|
||||
foreman_cif.video = {true, 352, 288, 30, 30000, 500000, 2000000, false,
|
||||
"VP8", 1, 0, 0, false, true, "foreman_cif"};
|
||||
foreman_cif.video[0] = {true, 352, 288, 30, 30000,
|
||||
500000, 2000000, false, "VP8", 1,
|
||||
0, 0, false, true, "foreman_cif"};
|
||||
foreman_cif.analyzer = {"foreman_cif_500kbps_delay_50_0_plr_3_flexfec", 0.0,
|
||||
0.0, kFullStackTestDurationSecs};
|
||||
foreman_cif.pipe.loss_percent = 3;
|
||||
@ -152,8 +165,9 @@ TEST_F(FullStackTest, ForemanCif500kbpsPlr3Flexfec) {
|
||||
TEST_F(FullStackTest, ForemanCif500kbpsPlr3Ulpfec) {
|
||||
VideoQualityTest::Params foreman_cif;
|
||||
foreman_cif.call.send_side_bwe = true;
|
||||
foreman_cif.video = {true, 352, 288, 30, 30000, 500000, 2000000, false,
|
||||
"VP8", 1, 0, 0, true, false, "foreman_cif"};
|
||||
foreman_cif.video[0] = {true, 352, 288, 30, 30000,
|
||||
500000, 2000000, false, "VP8", 1,
|
||||
0, 0, true, false, "foreman_cif"};
|
||||
foreman_cif.analyzer = {"foreman_cif_500kbps_delay_50_0_plr_3_ulpfec", 0.0,
|
||||
0.0, kFullStackTestDurationSecs};
|
||||
foreman_cif.pipe.loss_percent = 3;
|
||||
@ -167,9 +181,9 @@ TEST_F(FullStackTest, ForemanCifWithoutPacketlossH264) {
|
||||
// TODO(pbos): Decide on psnr/ssim thresholds for foreman_cif.
|
||||
VideoQualityTest::Params foreman_cif;
|
||||
foreman_cif.call.send_side_bwe = true;
|
||||
foreman_cif.video = {true, 352, 288, 30, 700000,
|
||||
700000, 700000, false, "H264", 1,
|
||||
0, 0, false, false, "foreman_cif"};
|
||||
foreman_cif.video[0] = {true, 352, 288, 30, 700000,
|
||||
700000, 700000, false, "H264", 1,
|
||||
0, 0, false, false, "foreman_cif"};
|
||||
foreman_cif.analyzer = {"foreman_cif_net_delay_0_0_plr_0_H264", 0.0, 0.0,
|
||||
kFullStackTestDurationSecs};
|
||||
RunTest(foreman_cif);
|
||||
@ -178,8 +192,9 @@ TEST_F(FullStackTest, ForemanCifWithoutPacketlossH264) {
|
||||
TEST_F(FullStackTest, ForemanCif30kbpsWithoutPacketlossH264) {
|
||||
VideoQualityTest::Params foreman_cif;
|
||||
foreman_cif.call.send_side_bwe = true;
|
||||
foreman_cif.video = {true, 352, 288, 10, 30000, 30000, 30000, false,
|
||||
"H264", 1, 0, 0, false, false, "foreman_cif"};
|
||||
foreman_cif.video[0] = {true, 352, 288, 10, 30000,
|
||||
30000, 30000, false, "H264", 1,
|
||||
0, 0, false, false, "foreman_cif"};
|
||||
foreman_cif.analyzer = {"foreman_cif_30kbps_net_delay_0_0_plr_0_H264", 0.0,
|
||||
0.0, kFullStackTestDurationSecs};
|
||||
RunTest(foreman_cif);
|
||||
@ -188,8 +203,9 @@ TEST_F(FullStackTest, ForemanCif30kbpsWithoutPacketlossH264) {
|
||||
TEST_F(FullStackTest, ForemanCifPlr5H264) {
|
||||
VideoQualityTest::Params foreman_cif;
|
||||
foreman_cif.call.send_side_bwe = true;
|
||||
foreman_cif.video = {true, 352, 288, 30, 30000, 500000, 2000000, false,
|
||||
"H264", 1, 0, 0, false, false, "foreman_cif"};
|
||||
foreman_cif.video[0] = {true, 352, 288, 30, 30000,
|
||||
500000, 2000000, false, "H264", 1,
|
||||
0, 0, false, false, "foreman_cif"};
|
||||
foreman_cif.analyzer = {"foreman_cif_delay_50_0_plr_5_H264", 0.0, 0.0,
|
||||
kFullStackTestDurationSecs};
|
||||
foreman_cif.pipe.loss_percent = 5;
|
||||
@ -203,8 +219,9 @@ TEST_F(FullStackTest, ForemanCifPlr5H264SpsPpsIdrIsKeyframe) {
|
||||
|
||||
VideoQualityTest::Params foreman_cif;
|
||||
foreman_cif.call.send_side_bwe = true;
|
||||
foreman_cif.video = {true, 352, 288, 30, 30000, 500000, 2000000, false,
|
||||
"H264", 1, 0, 0, false, false, "foreman_cif"};
|
||||
foreman_cif.video[0] = {true, 352, 288, 30, 30000,
|
||||
500000, 2000000, false, "H264", 1,
|
||||
0, 0, false, false, "foreman_cif"};
|
||||
foreman_cif.analyzer = {"foreman_cif_delay_50_0_plr_5_H264_sps_pps_idr", 0.0,
|
||||
0.0, kFullStackTestDurationSecs};
|
||||
foreman_cif.pipe.loss_percent = 5;
|
||||
@ -216,8 +233,9 @@ TEST_F(FullStackTest, ForemanCifPlr5H264SpsPpsIdrIsKeyframe) {
|
||||
TEST_F(FullStackTest, ForemanCifPlr5H264Flexfec) {
|
||||
VideoQualityTest::Params foreman_cif;
|
||||
foreman_cif.call.send_side_bwe = true;
|
||||
foreman_cif.video = {true, 352, 288, 30, 30000, 500000, 2000000, false,
|
||||
"H264", 1, 0, 0, false, true, "foreman_cif"};
|
||||
foreman_cif.video[0] = {true, 352, 288, 30, 30000,
|
||||
500000, 2000000, false, "H264", 1,
|
||||
0, 0, false, true, "foreman_cif"};
|
||||
foreman_cif.analyzer = {"foreman_cif_delay_50_0_plr_5_H264_flexfec", 0.0, 0.0,
|
||||
kFullStackTestDurationSecs};
|
||||
foreman_cif.pipe.loss_percent = 5;
|
||||
@ -230,8 +248,9 @@ TEST_F(FullStackTest, ForemanCifPlr5H264Flexfec) {
|
||||
TEST_F(FullStackTest, DISABLED_ForemanCifPlr5H264Ulpfec) {
|
||||
VideoQualityTest::Params foreman_cif;
|
||||
foreman_cif.call.send_side_bwe = true;
|
||||
foreman_cif.video = {true, 352, 288, 30, 30000, 500000, 2000000, false,
|
||||
"H264", 1, 0, 0, true, false, "foreman_cif"};
|
||||
foreman_cif.video[0] = {true, 352, 288, 30, 30000,
|
||||
500000, 2000000, false, "H264", 1,
|
||||
0, 0, true, false, "foreman_cif"};
|
||||
foreman_cif.analyzer = {"foreman_cif_delay_50_0_plr_5_H264_ulpfec", 0.0, 0.0,
|
||||
kFullStackTestDurationSecs};
|
||||
foreman_cif.pipe.loss_percent = 5;
|
||||
@ -243,8 +262,9 @@ TEST_F(FullStackTest, DISABLED_ForemanCifPlr5H264Ulpfec) {
|
||||
TEST_F(FullStackTest, ForemanCif500kbps) {
|
||||
VideoQualityTest::Params foreman_cif;
|
||||
foreman_cif.call.send_side_bwe = true;
|
||||
foreman_cif.video = {true, 352, 288, 30, 30000, 500000, 2000000, false,
|
||||
"VP8", 1, 0, 0, false, false, "foreman_cif"};
|
||||
foreman_cif.video[0] = {true, 352, 288, 30, 30000,
|
||||
500000, 2000000, false, "VP8", 1,
|
||||
0, 0, false, false, "foreman_cif"};
|
||||
foreman_cif.analyzer = {"foreman_cif_500kbps", 0.0, 0.0,
|
||||
kFullStackTestDurationSecs};
|
||||
foreman_cif.pipe.queue_length_packets = 0;
|
||||
@ -256,8 +276,9 @@ TEST_F(FullStackTest, ForemanCif500kbps) {
|
||||
TEST_F(FullStackTest, ForemanCif500kbpsLimitedQueue) {
|
||||
VideoQualityTest::Params foreman_cif;
|
||||
foreman_cif.call.send_side_bwe = true;
|
||||
foreman_cif.video = {true, 352, 288, 30, 30000, 500000, 2000000, false,
|
||||
"VP8", 1, 0, 0, false, false, "foreman_cif"};
|
||||
foreman_cif.video[0] = {true, 352, 288, 30, 30000,
|
||||
500000, 2000000, false, "VP8", 1,
|
||||
0, 0, false, false, "foreman_cif"};
|
||||
foreman_cif.analyzer = {"foreman_cif_500kbps_32pkts_queue", 0.0, 0.0,
|
||||
kFullStackTestDurationSecs};
|
||||
foreman_cif.pipe.queue_length_packets = 32;
|
||||
@ -269,8 +290,9 @@ TEST_F(FullStackTest, ForemanCif500kbpsLimitedQueue) {
|
||||
TEST_F(FullStackTest, ForemanCif500kbps100ms) {
|
||||
VideoQualityTest::Params foreman_cif;
|
||||
foreman_cif.call.send_side_bwe = true;
|
||||
foreman_cif.video = {true, 352, 288, 30, 30000, 500000, 2000000, false,
|
||||
"VP8", 1, 0, 0, false, false, "foreman_cif"};
|
||||
foreman_cif.video[0] = {true, 352, 288, 30, 30000,
|
||||
500000, 2000000, false, "VP8", 1,
|
||||
0, 0, false, false, "foreman_cif"};
|
||||
foreman_cif.analyzer = {"foreman_cif_500kbps_100ms", 0.0, 0.0,
|
||||
kFullStackTestDurationSecs};
|
||||
foreman_cif.pipe.queue_length_packets = 0;
|
||||
@ -282,8 +304,9 @@ TEST_F(FullStackTest, ForemanCif500kbps100ms) {
|
||||
TEST_F(FullStackTest, ForemanCif500kbps100msLimitedQueue) {
|
||||
VideoQualityTest::Params foreman_cif;
|
||||
foreman_cif.call.send_side_bwe = true;
|
||||
foreman_cif.video = {true, 352, 288, 30, 30000, 500000, 2000000, false,
|
||||
"VP8", 1, 0, 0, false, false, "foreman_cif"};
|
||||
foreman_cif.video[0] = {true, 352, 288, 30, 30000,
|
||||
500000, 2000000, false, "VP8", 1,
|
||||
0, 0, false, false, "foreman_cif"};
|
||||
foreman_cif.analyzer = {"foreman_cif_500kbps_100ms_32pkts_queue", 0.0, 0.0,
|
||||
kFullStackTestDurationSecs};
|
||||
foreman_cif.pipe.queue_length_packets = 32;
|
||||
@ -295,8 +318,9 @@ TEST_F(FullStackTest, ForemanCif500kbps100msLimitedQueue) {
|
||||
TEST_F(FullStackTest, ForemanCif500kbps100msLimitedQueueRecvBwe) {
|
||||
VideoQualityTest::Params foreman_cif;
|
||||
foreman_cif.call.send_side_bwe = false;
|
||||
foreman_cif.video = {true, 352, 288, 30, 30000, 500000, 2000000, false,
|
||||
"VP8", 1, 0, 0, false, false, "foreman_cif"};
|
||||
foreman_cif.video[0] = {true, 352, 288, 30, 30000,
|
||||
500000, 2000000, false, "VP8", 1,
|
||||
0, 0, false, false, "foreman_cif"};
|
||||
foreman_cif.analyzer = {"foreman_cif_500kbps_100ms_32pkts_queue_recv_bwe",
|
||||
0.0, 0.0, kFullStackTestDurationSecs};
|
||||
foreman_cif.pipe.queue_length_packets = 32;
|
||||
@ -308,8 +332,9 @@ TEST_F(FullStackTest, ForemanCif500kbps100msLimitedQueueRecvBwe) {
|
||||
TEST_F(FullStackTest, ForemanCif1000kbps100msLimitedQueue) {
|
||||
VideoQualityTest::Params foreman_cif;
|
||||
foreman_cif.call.send_side_bwe = true;
|
||||
foreman_cif.video = {true, 352, 288, 30, 30000, 2000000, 2000000, false,
|
||||
"VP8", 1, 0, 0, false, false, "foreman_cif"};
|
||||
foreman_cif.video[0] = {true, 352, 288, 30, 30000,
|
||||
2000000, 2000000, false, "VP8", 1,
|
||||
0, 0, false, false, "foreman_cif"};
|
||||
foreman_cif.analyzer = {"foreman_cif_1000kbps_100ms_32pkts_queue", 0.0, 0.0,
|
||||
kFullStackTestDurationSecs};
|
||||
foreman_cif.pipe.queue_length_packets = 32;
|
||||
@ -322,7 +347,7 @@ TEST_F(FullStackTest, ForemanCif1000kbps100msLimitedQueue) {
|
||||
TEST_F(FullStackTest, ConferenceMotionHd2000kbps100msLimitedQueue) {
|
||||
VideoQualityTest::Params conf_motion_hd;
|
||||
conf_motion_hd.call.send_side_bwe = true;
|
||||
conf_motion_hd.video = {
|
||||
conf_motion_hd.video[0] = {
|
||||
true, 1280, 720, 50, 30000,
|
||||
3000000, 3000000, false, "VP8", 1,
|
||||
0, 0, false, false, "ConferenceMotion_1280_720_50"};
|
||||
@ -337,7 +362,7 @@ TEST_F(FullStackTest, ConferenceMotionHd2000kbps100msLimitedQueue) {
|
||||
TEST_F(FullStackTest, ConferenceMotionHd1TLModerateLimits) {
|
||||
VideoQualityTest::Params conf_motion_hd;
|
||||
conf_motion_hd.call.send_side_bwe = true;
|
||||
conf_motion_hd.video = {
|
||||
conf_motion_hd.video[0] = {
|
||||
true, 1280, 720, 50, 30000,
|
||||
3000000, 3000000, false, "VP8", 1,
|
||||
-1, 0, false, false, "ConferenceMotion_1280_720_50"};
|
||||
@ -353,7 +378,7 @@ TEST_F(FullStackTest, ConferenceMotionHd1TLModerateLimits) {
|
||||
TEST_F(FullStackTest, ConferenceMotionHd2TLModerateLimits) {
|
||||
VideoQualityTest::Params conf_motion_hd;
|
||||
conf_motion_hd.call.send_side_bwe = true;
|
||||
conf_motion_hd.video = {
|
||||
conf_motion_hd.video[0] = {
|
||||
true, 1280, 720, 50, 30000,
|
||||
3000000, 3000000, false, "VP8", 2,
|
||||
-1, 0, false, false, "ConferenceMotion_1280_720_50"};
|
||||
@ -369,7 +394,7 @@ TEST_F(FullStackTest, ConferenceMotionHd2TLModerateLimits) {
|
||||
TEST_F(FullStackTest, ConferenceMotionHd3TLModerateLimits) {
|
||||
VideoQualityTest::Params conf_motion_hd;
|
||||
conf_motion_hd.call.send_side_bwe = true;
|
||||
conf_motion_hd.video = {
|
||||
conf_motion_hd.video[0] = {
|
||||
true, 1280, 720, 50, 30000,
|
||||
3000000, 3000000, false, "VP8", 3,
|
||||
-1, 0, false, false, "ConferenceMotion_1280_720_50"};
|
||||
@ -385,7 +410,7 @@ TEST_F(FullStackTest, ConferenceMotionHd3TLModerateLimits) {
|
||||
TEST_F(FullStackTest, ConferenceMotionHd4TLModerateLimits) {
|
||||
VideoQualityTest::Params conf_motion_hd;
|
||||
conf_motion_hd.call.send_side_bwe = true;
|
||||
conf_motion_hd.video = {
|
||||
conf_motion_hd.video[0] = {
|
||||
true, 1280, 720, 50, 30000,
|
||||
3000000, 3000000, false, "VP8", 4,
|
||||
-1, 0, false, false, "ConferenceMotion_1280_720_50"};
|
||||
@ -402,7 +427,7 @@ TEST_F(FullStackTest, ConferenceMotionHd3TLModerateLimitsAltTLPattern) {
|
||||
test::ScopedFieldTrials field_trial("WebRTC-UseShortVP8TL3Pattern/Enabled/");
|
||||
VideoQualityTest::Params conf_motion_hd;
|
||||
conf_motion_hd.call.send_side_bwe = true;
|
||||
conf_motion_hd.video = {
|
||||
conf_motion_hd.video[0] = {
|
||||
true, 1280, 720, 50, 30000,
|
||||
3000000, 3000000, false, "VP8", 3,
|
||||
-1, 0, false, false, "ConferenceMotion_1280_720_50"};
|
||||
@ -419,7 +444,7 @@ TEST_F(FullStackTest, ConferenceMotionHd3TLModerateLimitsAltTLPattern) {
|
||||
TEST_F(FullStackTest, ConferenceMotionHd2000kbps100msLimitedQueueVP9) {
|
||||
VideoQualityTest::Params conf_motion_hd;
|
||||
conf_motion_hd.call.send_side_bwe = true;
|
||||
conf_motion_hd.video = {
|
||||
conf_motion_hd.video[0] = {
|
||||
true, 1280, 720, 50, 30000,
|
||||
3000000, 3000000, false, "VP9", 1,
|
||||
0, 0, false, false, "ConferenceMotion_1280_720_50"};
|
||||
@ -436,9 +461,10 @@ TEST_F(FullStackTest, ConferenceMotionHd2000kbps100msLimitedQueueVP9) {
|
||||
TEST_F(FullStackTest, ScreenshareSlidesVP8_2TL) {
|
||||
VideoQualityTest::Params screenshare;
|
||||
screenshare.call.send_side_bwe = true;
|
||||
screenshare.video = {true, 1850, 1110, 5, 50000, 200000, 2000000, false,
|
||||
"VP8", 2, 1, 400000, false, false, ""};
|
||||
screenshare.screenshare = {true, false, 10};
|
||||
screenshare.video[0] = {true, 1850, 1110, 5, 50000,
|
||||
200000, 2000000, false, "VP8", 2,
|
||||
1, 400000, false, false, ""};
|
||||
screenshare.screenshare[0] = {true, false, 10};
|
||||
screenshare.analyzer = {"screenshare_slides", 0.0, 0.0,
|
||||
kFullStackTestDurationSecs};
|
||||
RunTest(screenshare);
|
||||
@ -448,34 +474,35 @@ TEST_F(FullStackTest, ScreenshareSlidesVP8_3TL_Simulcast) {
|
||||
test::ScopedFieldTrials field_trial(kScreenshareSimulcastExperiment);
|
||||
VideoQualityTest::Params screenshare;
|
||||
screenshare.call.send_side_bwe = true;
|
||||
screenshare.screenshare = {true, false, 10};
|
||||
screenshare.video = {true, 1850, 1110, 5, 800000,
|
||||
2500000, 2500000, false, "VP8", 3,
|
||||
2, 400000, false, false, ""};
|
||||
screenshare.screenshare[0] = {true, false, 10};
|
||||
screenshare.video[0] = {true, 1850, 1110, 5, 800000,
|
||||
2500000, 2500000, false, "VP8", 3,
|
||||
2, 400000, false, false, ""};
|
||||
screenshare.analyzer = {"screenshare_slides_simulcast", 0.0, 0.0,
|
||||
kFullStackTestDurationSecs};
|
||||
VideoQualityTest::Params screenshare_params_high;
|
||||
screenshare_params_high.video = {true, 1850, 1110, 5, 800000,
|
||||
2500000, 2500000, false, "VP8", 3,
|
||||
0, 400000, false, false, ""};
|
||||
screenshare_params_high.video[0] = {true, 1850, 1110, 5, 800000,
|
||||
2500000, 2500000, false, "VP8", 3,
|
||||
0, 400000, false, false, ""};
|
||||
VideoQualityTest::Params screenshare_params_low;
|
||||
screenshare_params_low.video = {true, 1850, 1110, 5, 50000,
|
||||
200000, 2000000, false, "VP8", 2,
|
||||
0, 400000, false, false, ""};
|
||||
screenshare_params_low.video[0] = {true, 1850, 1110, 5, 50000,
|
||||
200000, 2000000, false, "VP8", 2,
|
||||
0, 400000, false, false, ""};
|
||||
|
||||
std::vector<VideoStream> streams = {
|
||||
DefaultVideoStream(screenshare_params_low),
|
||||
DefaultVideoStream(screenshare_params_high)};
|
||||
screenshare.ss = {streams, 1, 1, 0, std::vector<SpatialLayer>(), false};
|
||||
DefaultVideoStream(screenshare_params_low, 0),
|
||||
DefaultVideoStream(screenshare_params_high, 0)};
|
||||
screenshare.ss[0] = {streams, 1, 1, 0, std::vector<SpatialLayer>(), false};
|
||||
RunTest(screenshare);
|
||||
}
|
||||
|
||||
TEST_F(FullStackTest, ScreenshareSlidesVP8_2TL_Scroll) {
|
||||
VideoQualityTest::Params config;
|
||||
config.call.send_side_bwe = true;
|
||||
config.video = {true, 1850, 1110 / 2, 5, 50000, 200000, 2000000, false,
|
||||
"VP8", 2, 1, 400000, false, false, ""};
|
||||
config.screenshare = {true, false, 10, 2};
|
||||
config.video[0] = {true, 1850, 1110 / 2, 5, 50000,
|
||||
200000, 2000000, false, "VP8", 2,
|
||||
1, 400000, false, false, ""};
|
||||
config.screenshare[0] = {true, false, 10, 2};
|
||||
config.analyzer = {"screenshare_slides_scrolling", 0.0, 0.0,
|
||||
kFullStackTestDurationSecs};
|
||||
RunTest(config);
|
||||
@ -484,9 +511,10 @@ TEST_F(FullStackTest, ScreenshareSlidesVP8_2TL_Scroll) {
|
||||
TEST_F(FullStackTest, ScreenshareSlidesVP8_2TL_LossyNet) {
|
||||
VideoQualityTest::Params screenshare;
|
||||
screenshare.call.send_side_bwe = true;
|
||||
screenshare.video = {true, 1850, 1110, 5, 50000, 200000, 2000000, false,
|
||||
"VP8", 2, 1, 400000, false, false, ""};
|
||||
screenshare.screenshare = {true, false, 10};
|
||||
screenshare.video[0] = {true, 1850, 1110, 5, 50000,
|
||||
200000, 2000000, false, "VP8", 2,
|
||||
1, 400000, false, false, ""};
|
||||
screenshare.screenshare[0] = {true, false, 10};
|
||||
screenshare.analyzer = {"screenshare_slides_lossy_net", 0.0, 0.0,
|
||||
kFullStackTestDurationSecs};
|
||||
screenshare.pipe.loss_percent = 5;
|
||||
@ -498,9 +526,10 @@ TEST_F(FullStackTest, ScreenshareSlidesVP8_2TL_LossyNet) {
|
||||
TEST_F(FullStackTest, ScreenshareSlidesVP8_2TL_VeryLossyNet) {
|
||||
VideoQualityTest::Params screenshare;
|
||||
screenshare.call.send_side_bwe = true;
|
||||
screenshare.video = {true, 1850, 1110, 5, 50000, 200000, 2000000, false,
|
||||
"VP8", 2, 1, 400000, false, false, ""};
|
||||
screenshare.screenshare = {true, false, 10};
|
||||
screenshare.video[0] = {true, 1850, 1110, 5, 50000,
|
||||
200000, 2000000, false, "VP8", 2,
|
||||
1, 400000, false, false, ""};
|
||||
screenshare.screenshare[0] = {true, false, 10};
|
||||
screenshare.analyzer = {"screenshare_slides_very_lossy", 0.0, 0.0,
|
||||
kFullStackTestDurationSecs};
|
||||
screenshare.pipe.loss_percent = 10;
|
||||
@ -512,9 +541,10 @@ TEST_F(FullStackTest, ScreenshareSlidesVP8_2TL_VeryLossyNet) {
|
||||
TEST_F(FullStackTest, ScreenshareSlidesVP8_2TL_LossyNetRestrictedQueue) {
|
||||
VideoQualityTest::Params screenshare;
|
||||
screenshare.call.send_side_bwe = true;
|
||||
screenshare.video = {true, 1850, 1110, 5, 50000, 200000, 2000000, false,
|
||||
"VP8", 2, 1, 400000, false, false, ""};
|
||||
screenshare.screenshare = {true, false, 10};
|
||||
screenshare.video[0] = {true, 1850, 1110, 5, 50000,
|
||||
200000, 2000000, false, "VP8", 2,
|
||||
1, 400000, false, false, ""};
|
||||
screenshare.screenshare[0] = {true, false, 10};
|
||||
screenshare.analyzer = {"screenshare_slides_lossy_limited", 0.0, 0.0,
|
||||
kFullStackTestDurationSecs};
|
||||
screenshare.pipe.loss_percent = 5;
|
||||
@ -527,9 +557,10 @@ TEST_F(FullStackTest, ScreenshareSlidesVP8_2TL_LossyNetRestrictedQueue) {
|
||||
TEST_F(FullStackTest, ScreenshareSlidesVP8_2TL_ModeratelyRestricted) {
|
||||
VideoQualityTest::Params screenshare;
|
||||
screenshare.call.send_side_bwe = true;
|
||||
screenshare.video = {true, 1850, 1110, 5, 50000, 200000, 2000000, false,
|
||||
"VP8", 2, 1, 400000, false, false, ""};
|
||||
screenshare.screenshare = {true, false, 10};
|
||||
screenshare.video[0] = {true, 1850, 1110, 5, 50000,
|
||||
200000, 2000000, false, "VP8", 2,
|
||||
1, 400000, false, false, ""};
|
||||
screenshare.screenshare[0] = {true, false, 10};
|
||||
screenshare.analyzer = {"screenshare_slides_moderately_restricted", 0.0, 0.0,
|
||||
kFullStackTestDurationSecs};
|
||||
screenshare.pipe.loss_percent = 1;
|
||||
@ -544,9 +575,10 @@ TEST_F(FullStackTest, ScreenshareSlidesVP8_2TL_LossyNetRestrictedQueue_ALR) {
|
||||
test::ScopedFieldTrials field_trial(kAlrProbingExperiment);
|
||||
VideoQualityTest::Params screenshare;
|
||||
screenshare.call.send_side_bwe = true;
|
||||
screenshare.video = {true, 1850, 1110, 5, 50000, 200000, 2000000, false,
|
||||
"VP8", 2, 1, 400000, false, false, ""};
|
||||
screenshare.screenshare = {true, false, 10};
|
||||
screenshare.video[0] = {true, 1850, 1110, 5, 50000,
|
||||
200000, 2000000, false, "VP8", 2,
|
||||
1, 400000, false, false, ""};
|
||||
screenshare.screenshare[0] = {true, false, 10};
|
||||
screenshare.analyzer = {"screenshare_slides_lossy_limited_ALR", 0.0, 0.0,
|
||||
kFullStackTestDurationSecs};
|
||||
screenshare.pipe.loss_percent = 5;
|
||||
@ -560,9 +592,10 @@ TEST_F(FullStackTest, ScreenshareSlidesVP8_2TL_ALR) {
|
||||
test::ScopedFieldTrials field_trial(kAlrProbingExperiment);
|
||||
VideoQualityTest::Params screenshare;
|
||||
screenshare.call.send_side_bwe = true;
|
||||
screenshare.video = {true, 1850, 1110, 5, 50000, 200000, 2000000, false,
|
||||
"VP8", 2, 1, 400000, false, false, ""};
|
||||
screenshare.screenshare = {true, false, 10};
|
||||
screenshare.video[0] = {true, 1850, 1110, 5, 50000,
|
||||
200000, 2000000, false, "VP8", 2,
|
||||
1, 400000, false, false, ""};
|
||||
screenshare.screenshare[0] = {true, false, 10};
|
||||
screenshare.analyzer = {"screenshare_slides_ALR", 0.0, 0.0,
|
||||
kFullStackTestDurationSecs};
|
||||
RunTest(screenshare);
|
||||
@ -572,9 +605,10 @@ TEST_F(FullStackTest, ScreenshareSlidesVP8_2TL_ModeratelyRestricted_ALR) {
|
||||
test::ScopedFieldTrials field_trial(kAlrProbingExperiment);
|
||||
VideoQualityTest::Params screenshare;
|
||||
screenshare.call.send_side_bwe = true;
|
||||
screenshare.video = {true, 1850, 1110, 5, 50000, 200000, 2000000, false,
|
||||
"VP8", 2, 1, 400000, false, false, ""};
|
||||
screenshare.screenshare = {true, false, 10};
|
||||
screenshare.video[0] = {true, 1850, 1110, 5, 50000,
|
||||
200000, 2000000, false, "VP8", 2,
|
||||
1, 400000, false, false, ""};
|
||||
screenshare.screenshare[0] = {true, false, 10};
|
||||
screenshare.analyzer = {"screenshare_slides_moderately_restricted_ALR", 0.0,
|
||||
0.0, kFullStackTestDurationSecs};
|
||||
screenshare.pipe.loss_percent = 1;
|
||||
@ -589,25 +623,25 @@ TEST_F(FullStackTest, ScreenshareSlidesVP8_3TL_Simulcast_ALR) {
|
||||
kAlrProbingExperiment);
|
||||
VideoQualityTest::Params screenshare;
|
||||
screenshare.call.send_side_bwe = true;
|
||||
screenshare.screenshare = {true, false, 10};
|
||||
screenshare.video = {true, 1850, 1110, 5, 800000,
|
||||
2500000, 2500000, false, "VP8", 3,
|
||||
2, 400000, false, false, ""};
|
||||
screenshare.screenshare[0] = {true, false, 10};
|
||||
screenshare.video[0] = {true, 1850, 1110, 5, 800000,
|
||||
2500000, 2500000, false, "VP8", 3,
|
||||
2, 400000, false, false, ""};
|
||||
screenshare.analyzer = {"screenshare_slides_simulcast_alr", 0.0, 0.0,
|
||||
kFullStackTestDurationSecs};
|
||||
VideoQualityTest::Params screenshare_params_high;
|
||||
screenshare_params_high.video = {true, 1850, 1110, 5, 800000,
|
||||
2500000, 2500000, false, "VP8", 3,
|
||||
0, 400000, false, false, ""};
|
||||
screenshare_params_high.video[0] = {true, 1850, 1110, 5, 800000,
|
||||
2500000, 2500000, false, "VP8", 3,
|
||||
0, 400000, false, false, ""};
|
||||
VideoQualityTest::Params screenshare_params_low;
|
||||
screenshare_params_low.video = {true, 1850, 1110, 5, 50000,
|
||||
200000, 2000000, false, "VP8", 2,
|
||||
0, 400000, false, false, ""};
|
||||
screenshare_params_low.video[0] = {true, 1850, 1110, 5, 50000,
|
||||
200000, 2000000, false, "VP8", 2,
|
||||
0, 400000, false, false, ""};
|
||||
|
||||
std::vector<VideoStream> streams = {
|
||||
DefaultVideoStream(screenshare_params_low),
|
||||
DefaultVideoStream(screenshare_params_high)};
|
||||
screenshare.ss = {streams, 1, 1, 0, std::vector<SpatialLayer>(), false};
|
||||
DefaultVideoStream(screenshare_params_low, 0),
|
||||
DefaultVideoStream(screenshare_params_high, 0)};
|
||||
screenshare.ss[0] = {streams, 1, 1, 0, std::vector<SpatialLayer>(), false};
|
||||
RunTest(screenshare);
|
||||
}
|
||||
|
||||
@ -635,45 +669,46 @@ const VideoQualityTest::Params::Video kSimulcastVp8VideoLow = {
|
||||
TEST_F(FullStackTest, ScreenshareSlidesVP9_2SL) {
|
||||
VideoQualityTest::Params screenshare;
|
||||
screenshare.call.send_side_bwe = true;
|
||||
screenshare.video = {true, 1850, 1110, 5, 50000, 200000, 2000000, false,
|
||||
"VP9", 1, 0, 400000, false, false, ""};
|
||||
screenshare.screenshare = {true, false, 10};
|
||||
screenshare.video[0] = {true, 1850, 1110, 5, 50000,
|
||||
200000, 2000000, false, "VP9", 1,
|
||||
0, 400000, false, false, ""};
|
||||
screenshare.screenshare[0] = {true, false, 10};
|
||||
screenshare.analyzer = {"screenshare_slides_vp9_2sl", 0.0, 0.0,
|
||||
kFullStackTestDurationSecs};
|
||||
screenshare.ss = {std::vector<VideoStream>(), 0, 2, 1,
|
||||
std::vector<SpatialLayer>(), false};
|
||||
screenshare.ss[0] = {std::vector<VideoStream>(), 0, 2, 1,
|
||||
std::vector<SpatialLayer>(), false};
|
||||
RunTest(screenshare);
|
||||
}
|
||||
|
||||
TEST_F(FullStackTest, VP9SVC_3SL_High) {
|
||||
VideoQualityTest::Params simulcast;
|
||||
simulcast.call.send_side_bwe = true;
|
||||
simulcast.video = kSvcVp9Video;
|
||||
simulcast.video[0] = kSvcVp9Video;
|
||||
simulcast.analyzer = {"vp9svc_3sl_high", 0.0, 0.0,
|
||||
kFullStackTestDurationSecs};
|
||||
simulcast.ss = {std::vector<VideoStream>(), 0, 3, 2,
|
||||
std::vector<SpatialLayer>(), false};
|
||||
simulcast.ss[0] = {std::vector<VideoStream>(), 0, 3, 2,
|
||||
std::vector<SpatialLayer>(), false};
|
||||
RunTest(simulcast);
|
||||
}
|
||||
|
||||
TEST_F(FullStackTest, VP9SVC_3SL_Medium) {
|
||||
VideoQualityTest::Params simulcast;
|
||||
simulcast.call.send_side_bwe = true;
|
||||
simulcast.video = kSvcVp9Video;
|
||||
simulcast.video[0] = kSvcVp9Video;
|
||||
simulcast.analyzer = {"vp9svc_3sl_medium", 0.0, 0.0,
|
||||
kFullStackTestDurationSecs};
|
||||
simulcast.ss = {std::vector<VideoStream>(), 0, 3, 1,
|
||||
std::vector<SpatialLayer>(), false};
|
||||
simulcast.ss[0] = {std::vector<VideoStream>(), 0, 3, 1,
|
||||
std::vector<SpatialLayer>(), false};
|
||||
RunTest(simulcast);
|
||||
}
|
||||
|
||||
TEST_F(FullStackTest, VP9SVC_3SL_Low) {
|
||||
VideoQualityTest::Params simulcast;
|
||||
simulcast.call.send_side_bwe = true;
|
||||
simulcast.video = kSvcVp9Video;
|
||||
simulcast.video[0] = kSvcVp9Video;
|
||||
simulcast.analyzer = {"vp9svc_3sl_low", 0.0, 0.0, kFullStackTestDurationSecs};
|
||||
simulcast.ss = {std::vector<VideoStream>(), 0, 3, 0,
|
||||
std::vector<SpatialLayer>(), false};
|
||||
simulcast.ss[0] = {std::vector<VideoStream>(), 0, 3, 0,
|
||||
std::vector<SpatialLayer>(), false};
|
||||
RunTest(simulcast);
|
||||
}
|
||||
#endif // !defined(RTC_DISABLE_VP9)
|
||||
@ -688,17 +723,17 @@ TEST_F(FullStackTest, VP9SVC_3SL_Low) {
|
||||
TEST_F(FullStackTest, MAYBE_SimulcastFullHdOveruse) {
|
||||
VideoQualityTest::Params simulcast;
|
||||
simulcast.call.send_side_bwe = true;
|
||||
simulcast.video = {true, 1920, 1080, 30, 800000,
|
||||
2500000, 2500000, false, "VP8", 3,
|
||||
2, 400000, false, false, "Generator"};
|
||||
simulcast.video[0] = {true, 1920, 1080, 30, 800000,
|
||||
2500000, 2500000, false, "VP8", 3,
|
||||
2, 400000, false, false, "Generator"};
|
||||
simulcast.analyzer = {"simulcast_HD_high", 0.0, 0.0,
|
||||
kFullStackTestDurationSecs};
|
||||
simulcast.pipe.loss_percent = 0;
|
||||
simulcast.pipe.queue_delay_ms = 100;
|
||||
std::vector<VideoStream> streams = {DefaultVideoStream(simulcast),
|
||||
DefaultVideoStream(simulcast),
|
||||
DefaultVideoStream(simulcast)};
|
||||
simulcast.ss = {streams, 2, 1, 0, std::vector<SpatialLayer>(), true};
|
||||
std::vector<VideoStream> streams = {DefaultVideoStream(simulcast, 0),
|
||||
DefaultVideoStream(simulcast, 0),
|
||||
DefaultVideoStream(simulcast, 0)};
|
||||
simulcast.ss[0] = {streams, 2, 1, 0, std::vector<SpatialLayer>(), true};
|
||||
webrtc::test::ScopedFieldTrials override_trials(
|
||||
"WebRTC-ForceSimulatedOveruseIntervalMs/1000-50000-300/");
|
||||
RunTest(simulcast);
|
||||
@ -707,89 +742,93 @@ TEST_F(FullStackTest, MAYBE_SimulcastFullHdOveruse) {
|
||||
TEST_F(FullStackTest, SimulcastVP8_3SL_High) {
|
||||
VideoQualityTest::Params simulcast;
|
||||
simulcast.call.send_side_bwe = true;
|
||||
simulcast.video = kSimulcastVp8VideoHigh;
|
||||
simulcast.video[0] = kSimulcastVp8VideoHigh;
|
||||
simulcast.analyzer = {"simulcast_vp8_3sl_high", 0.0, 0.0,
|
||||
kFullStackTestDurationSecs};
|
||||
simulcast.pipe.loss_percent = 0;
|
||||
simulcast.pipe.queue_delay_ms = 100;
|
||||
VideoQualityTest::Params video_params_high;
|
||||
video_params_high.video = kSimulcastVp8VideoHigh;
|
||||
video_params_high.video[0] = kSimulcastVp8VideoHigh;
|
||||
VideoQualityTest::Params video_params_medium;
|
||||
video_params_medium.video = kSimulcastVp8VideoMedium;
|
||||
video_params_medium.video[0] = kSimulcastVp8VideoMedium;
|
||||
VideoQualityTest::Params video_params_low;
|
||||
video_params_low.video = kSimulcastVp8VideoLow;
|
||||
video_params_low.video[0] = kSimulcastVp8VideoLow;
|
||||
|
||||
std::vector<VideoStream> streams = {DefaultVideoStream(video_params_low),
|
||||
DefaultVideoStream(video_params_medium),
|
||||
DefaultVideoStream(video_params_high)};
|
||||
simulcast.ss = {streams, 2, 1, 0, std::vector<SpatialLayer>(), false};
|
||||
std::vector<VideoStream> streams = {
|
||||
DefaultVideoStream(video_params_low, 0),
|
||||
DefaultVideoStream(video_params_medium, 0),
|
||||
DefaultVideoStream(video_params_high, 0)};
|
||||
simulcast.ss[0] = {streams, 2, 1, 0, std::vector<SpatialLayer>(), false};
|
||||
RunTest(simulcast);
|
||||
}
|
||||
|
||||
TEST_F(FullStackTest, SimulcastVP8_3SL_Medium) {
|
||||
VideoQualityTest::Params simulcast;
|
||||
simulcast.call.send_side_bwe = true;
|
||||
simulcast.video = kSimulcastVp8VideoHigh;
|
||||
simulcast.video[0] = kSimulcastVp8VideoHigh;
|
||||
simulcast.analyzer = {"simulcast_vp8_3sl_medium", 0.0, 0.0,
|
||||
kFullStackTestDurationSecs};
|
||||
simulcast.pipe.loss_percent = 0;
|
||||
simulcast.pipe.queue_delay_ms = 100;
|
||||
VideoQualityTest::Params video_params_high;
|
||||
video_params_high.video = kSimulcastVp8VideoHigh;
|
||||
video_params_high.video[0] = kSimulcastVp8VideoHigh;
|
||||
VideoQualityTest::Params video_params_medium;
|
||||
video_params_medium.video = kSimulcastVp8VideoMedium;
|
||||
video_params_medium.video[0] = kSimulcastVp8VideoMedium;
|
||||
VideoQualityTest::Params video_params_low;
|
||||
video_params_low.video = kSimulcastVp8VideoLow;
|
||||
video_params_low.video[0] = kSimulcastVp8VideoLow;
|
||||
|
||||
std::vector<VideoStream> streams = {DefaultVideoStream(video_params_low),
|
||||
DefaultVideoStream(video_params_medium),
|
||||
DefaultVideoStream(video_params_high)};
|
||||
simulcast.ss = {streams, 1, 1, 0, std::vector<SpatialLayer>(), false};
|
||||
std::vector<VideoStream> streams = {
|
||||
DefaultVideoStream(video_params_low, 0),
|
||||
DefaultVideoStream(video_params_medium, 0),
|
||||
DefaultVideoStream(video_params_high, 0)};
|
||||
simulcast.ss[0] = {streams, 1, 1, 0, std::vector<SpatialLayer>(), false};
|
||||
RunTest(simulcast);
|
||||
}
|
||||
|
||||
TEST_F(FullStackTest, SimulcastVP8_3SL_Low) {
|
||||
VideoQualityTest::Params simulcast;
|
||||
simulcast.call.send_side_bwe = true;
|
||||
simulcast.video = kSimulcastVp8VideoHigh;
|
||||
simulcast.video[0] = kSimulcastVp8VideoHigh;
|
||||
simulcast.analyzer = {"simulcast_vp8_3sl_low", 0.0, 0.0,
|
||||
kFullStackTestDurationSecs};
|
||||
simulcast.pipe.loss_percent = 0;
|
||||
simulcast.pipe.queue_delay_ms = 100;
|
||||
VideoQualityTest::Params video_params_high;
|
||||
video_params_high.video = kSimulcastVp8VideoHigh;
|
||||
video_params_high.video[0] = kSimulcastVp8VideoHigh;
|
||||
VideoQualityTest::Params video_params_medium;
|
||||
video_params_medium.video = kSimulcastVp8VideoMedium;
|
||||
video_params_medium.video[0] = kSimulcastVp8VideoMedium;
|
||||
VideoQualityTest::Params video_params_low;
|
||||
video_params_low.video = kSimulcastVp8VideoLow;
|
||||
video_params_low.video[0] = kSimulcastVp8VideoLow;
|
||||
|
||||
std::vector<VideoStream> streams = {DefaultVideoStream(video_params_low),
|
||||
DefaultVideoStream(video_params_medium),
|
||||
DefaultVideoStream(video_params_high)};
|
||||
simulcast.ss = {streams, 0, 1, 0, std::vector<SpatialLayer>(), false};
|
||||
std::vector<VideoStream> streams = {
|
||||
DefaultVideoStream(video_params_low, 0),
|
||||
DefaultVideoStream(video_params_medium, 0),
|
||||
DefaultVideoStream(video_params_high, 0)};
|
||||
simulcast.ss[0] = {streams, 0, 1, 0, std::vector<SpatialLayer>(), false};
|
||||
RunTest(simulcast);
|
||||
}
|
||||
|
||||
TEST_F(FullStackTest, LargeRoomVP8_5thumb) {
|
||||
VideoQualityTest::Params large_room;
|
||||
large_room.call.send_side_bwe = true;
|
||||
large_room.video = kSimulcastVp8VideoHigh;
|
||||
large_room.video[0] = kSimulcastVp8VideoHigh;
|
||||
large_room.analyzer = {"largeroom_5thumb", 0.0, 0.0,
|
||||
kFullStackTestDurationSecs};
|
||||
large_room.pipe.loss_percent = 0;
|
||||
large_room.pipe.queue_delay_ms = 100;
|
||||
VideoQualityTest::Params video_params_high;
|
||||
video_params_high.video = kSimulcastVp8VideoHigh;
|
||||
video_params_high.video[0] = kSimulcastVp8VideoHigh;
|
||||
VideoQualityTest::Params video_params_medium;
|
||||
video_params_medium.video = kSimulcastVp8VideoMedium;
|
||||
video_params_medium.video[0] = kSimulcastVp8VideoMedium;
|
||||
VideoQualityTest::Params video_params_low;
|
||||
video_params_low.video = kSimulcastVp8VideoLow;
|
||||
video_params_low.video[0] = kSimulcastVp8VideoLow;
|
||||
|
||||
std::vector<VideoStream> streams = {DefaultVideoStream(video_params_low),
|
||||
DefaultVideoStream(video_params_medium),
|
||||
DefaultVideoStream(video_params_high)};
|
||||
std::vector<VideoStream> streams = {
|
||||
DefaultVideoStream(video_params_low, 0),
|
||||
DefaultVideoStream(video_params_medium, 0),
|
||||
DefaultVideoStream(video_params_high, 0)};
|
||||
large_room.call.num_thumbnails = 5;
|
||||
large_room.ss = {streams, 2, 1, 0, std::vector<SpatialLayer>(), false};
|
||||
large_room.ss[0] = {streams, 2, 1, 0, std::vector<SpatialLayer>(), false};
|
||||
RunTest(large_room);
|
||||
}
|
||||
|
||||
@ -806,48 +845,148 @@ TEST_F(FullStackTest, LargeRoomVP8_5thumb) {
|
||||
TEST_F(FullStackTest, MAYBE_LargeRoomVP8_15thumb) {
|
||||
VideoQualityTest::Params large_room;
|
||||
large_room.call.send_side_bwe = true;
|
||||
large_room.video = kSimulcastVp8VideoHigh;
|
||||
large_room.video[0] = kSimulcastVp8VideoHigh;
|
||||
large_room.analyzer = {"largeroom_15thumb", 0.0, 0.0,
|
||||
kFullStackTestDurationSecs};
|
||||
large_room.pipe.loss_percent = 0;
|
||||
large_room.pipe.queue_delay_ms = 100;
|
||||
VideoQualityTest::Params video_params_high;
|
||||
video_params_high.video = kSimulcastVp8VideoHigh;
|
||||
video_params_high.video[0] = kSimulcastVp8VideoHigh;
|
||||
VideoQualityTest::Params video_params_medium;
|
||||
video_params_medium.video = kSimulcastVp8VideoMedium;
|
||||
video_params_medium.video[0] = kSimulcastVp8VideoMedium;
|
||||
VideoQualityTest::Params video_params_low;
|
||||
video_params_low.video = kSimulcastVp8VideoLow;
|
||||
video_params_low.video[0] = kSimulcastVp8VideoLow;
|
||||
|
||||
std::vector<VideoStream> streams = {DefaultVideoStream(video_params_low),
|
||||
DefaultVideoStream(video_params_medium),
|
||||
DefaultVideoStream(video_params_high)};
|
||||
std::vector<VideoStream> streams = {
|
||||
DefaultVideoStream(video_params_low, 0),
|
||||
DefaultVideoStream(video_params_medium, 0),
|
||||
DefaultVideoStream(video_params_high, 0)};
|
||||
large_room.call.num_thumbnails = 15;
|
||||
large_room.ss = {streams, 2, 1, 0, std::vector<SpatialLayer>(), false};
|
||||
large_room.ss[0] = {streams, 2, 1, 0, std::vector<SpatialLayer>(), false};
|
||||
RunTest(large_room);
|
||||
}
|
||||
|
||||
TEST_F(FullStackTest, MAYBE_LargeRoomVP8_50thumb) {
|
||||
VideoQualityTest::Params large_room;
|
||||
large_room.call.send_side_bwe = true;
|
||||
large_room.video = kSimulcastVp8VideoHigh;
|
||||
large_room.video[0] = kSimulcastVp8VideoHigh;
|
||||
large_room.analyzer = {"largeroom_50thumb", 0.0, 0.0,
|
||||
kFullStackTestDurationSecs};
|
||||
large_room.pipe.loss_percent = 0;
|
||||
large_room.pipe.queue_delay_ms = 100;
|
||||
VideoQualityTest::Params video_params_high;
|
||||
video_params_high.video = kSimulcastVp8VideoHigh;
|
||||
video_params_high.video[0] = kSimulcastVp8VideoHigh;
|
||||
VideoQualityTest::Params video_params_medium;
|
||||
video_params_medium.video = kSimulcastVp8VideoMedium;
|
||||
video_params_medium.video[0] = kSimulcastVp8VideoMedium;
|
||||
VideoQualityTest::Params video_params_low;
|
||||
video_params_low.video = kSimulcastVp8VideoLow;
|
||||
video_params_low.video[0] = kSimulcastVp8VideoLow;
|
||||
|
||||
std::vector<VideoStream> streams = {DefaultVideoStream(video_params_low),
|
||||
DefaultVideoStream(video_params_medium),
|
||||
DefaultVideoStream(video_params_high)};
|
||||
std::vector<VideoStream> streams = {
|
||||
DefaultVideoStream(video_params_low, 0),
|
||||
DefaultVideoStream(video_params_medium, 0),
|
||||
DefaultVideoStream(video_params_high, 0)};
|
||||
large_room.call.num_thumbnails = 50;
|
||||
large_room.ss = {streams, 2, 1, 0, std::vector<SpatialLayer>(), false};
|
||||
large_room.ss[0] = {streams, 2, 1, 0, std::vector<SpatialLayer>(), false};
|
||||
RunTest(large_room);
|
||||
}
|
||||
|
||||
class DualStreamsTest : public FullStackTest,
|
||||
public ::testing::WithParamInterface<int> {};
|
||||
|
||||
// Disable dual video test on mobile device becuase it's too heavy.
|
||||
#if !defined(WEBRTC_ANDROID) && !defined(WEBRTC_IOS)
|
||||
TEST_P(DualStreamsTest,
|
||||
ModeratelyRestricted_SlidesVp8_3TL_Simulcast_Video_Simulcast_High) {
|
||||
test::ScopedFieldTrials field_trial(
|
||||
kScreenshareSimulcastExperiment + kAlrProbingExperiment +
|
||||
kRoundRobinPacingQueueExperiment + kPacerPushBackExperiment);
|
||||
const int first_stream = GetParam();
|
||||
VideoQualityTest::Params dual_streams;
|
||||
|
||||
// Screenshare Settings.
|
||||
dual_streams.screenshare[first_stream] = {true, false, 10};
|
||||
dual_streams.video[first_stream] = {true, 1850, 1110, 5, 800000,
|
||||
2500000, 2500000, false, "VP8", 3,
|
||||
2, 400000, false, false, ""};
|
||||
|
||||
VideoQualityTest::Params screenshare_params_high;
|
||||
screenshare_params_high.video[0] = {true, 1850, 1110, 5, 800000,
|
||||
2500000, 2500000, false, "VP8", 3,
|
||||
0, 400000, false, false, ""};
|
||||
VideoQualityTest::Params screenshare_params_low;
|
||||
screenshare_params_low.video[0] = {true, 1850, 1110, 5, 50000,
|
||||
200000, 2000000, false, "VP8", 2,
|
||||
0, 400000, false, false, ""};
|
||||
std::vector<VideoStream> screenhsare_streams = {
|
||||
DefaultVideoStream(screenshare_params_low, 0),
|
||||
DefaultVideoStream(screenshare_params_high, 0)};
|
||||
|
||||
dual_streams.ss[first_stream] = {screenhsare_streams, 1, 1, 0,
|
||||
std::vector<SpatialLayer>(), false};
|
||||
|
||||
// Video settings.
|
||||
dual_streams.video[1 - first_stream] = kSimulcastVp8VideoHigh;
|
||||
|
||||
VideoQualityTest::Params video_params_high;
|
||||
video_params_high.video[0] = kSimulcastVp8VideoHigh;
|
||||
VideoQualityTest::Params video_params_medium;
|
||||
video_params_medium.video[0] = kSimulcastVp8VideoMedium;
|
||||
VideoQualityTest::Params video_params_low;
|
||||
video_params_low.video[0] = kSimulcastVp8VideoLow;
|
||||
std::vector<VideoStream> streams = {
|
||||
DefaultVideoStream(video_params_low, 0),
|
||||
DefaultVideoStream(video_params_medium, 0),
|
||||
DefaultVideoStream(video_params_high, 0)};
|
||||
|
||||
dual_streams.ss[1 - first_stream] = {
|
||||
streams, 2, 1, 0, std::vector<SpatialLayer>(), false};
|
||||
|
||||
// Call settings.
|
||||
dual_streams.call.send_side_bwe = true;
|
||||
dual_streams.call.dual_video = true;
|
||||
dual_streams.analyzer = {"dualstreams_moderately_restricted_screenshare", 0.0,
|
||||
0.0, kFullStackTestDurationSecs};
|
||||
dual_streams.pipe.loss_percent = 1;
|
||||
dual_streams.pipe.link_capacity_kbps = 7500;
|
||||
dual_streams.pipe.queue_length_packets = 30;
|
||||
dual_streams.pipe.queue_delay_ms = 100;
|
||||
|
||||
RunTest(dual_streams);
|
||||
}
|
||||
#endif // !defined(WEBRTC_ANDROID) && !defined(WEBRTC_IOS)
|
||||
|
||||
TEST_P(DualStreamsTest, Conference_Restricted) {
|
||||
test::ScopedFieldTrials field_trial(kRoundRobinPacingQueueExperiment +
|
||||
kPacerPushBackExperiment);
|
||||
const int first_stream = GetParam();
|
||||
VideoQualityTest::Params dual_streams;
|
||||
|
||||
// Screenshare Settings.
|
||||
dual_streams.screenshare[first_stream] = {true, false, 10};
|
||||
dual_streams.video[first_stream] = {true, 1850, 1110, 5, 800000,
|
||||
2500000, 2500000, false, "VP8", 3,
|
||||
2, 400000, false, false, ""};
|
||||
// Video settings.
|
||||
dual_streams.video[1 - first_stream] = {
|
||||
true, 1280, 720, 30, 150000,
|
||||
500000, 700000, false, "VP8", 3,
|
||||
2, 400000, false, false, "ConferenceMotion_1280_720_50"};
|
||||
|
||||
// Call settings.
|
||||
dual_streams.call.send_side_bwe = true;
|
||||
dual_streams.call.dual_video = true;
|
||||
dual_streams.analyzer = {"dualstreams_moderately_restricted_screenshare", 0.0,
|
||||
0.0, kFullStackTestDurationSecs};
|
||||
dual_streams.pipe.loss_percent = 1;
|
||||
dual_streams.pipe.link_capacity_kbps = 5000;
|
||||
dual_streams.pipe.queue_length_packets = 30;
|
||||
dual_streams.pipe.queue_delay_ms = 100;
|
||||
|
||||
RunTest(dual_streams);
|
||||
}
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(FullStackTest,
|
||||
DualStreamsTest,
|
||||
::testing::Values(0, 1));
|
||||
|
||||
} // namespace webrtc
|
||||
|
@ -317,7 +317,7 @@ void PictureIdTest::SetupEncoder(VideoEncoder* encoder,
|
||||
test::PacketTransport::kSender, payload_type_map_,
|
||||
FakeNetworkPipe::Config()));
|
||||
|
||||
CreateSendConfig(kNumSsrcs, 0, 0, send_transport_.get());
|
||||
CreateSendConfig(kNumSimulcastStreams, 0, 0, send_transport_.get());
|
||||
video_send_config_.encoder_settings.encoder = encoder;
|
||||
video_send_config_.encoder_settings.payload_name = payload_name;
|
||||
video_encoder_config_.video_stream_factory =
|
||||
|
@ -273,24 +273,24 @@ void Loopback() {
|
||||
|
||||
VideoQualityTest::Params params;
|
||||
params.call = {flags::FLAG_send_side_bwe, call_bitrate_config};
|
||||
params.video = {true,
|
||||
flags::Width(),
|
||||
flags::Height(),
|
||||
flags::Fps(),
|
||||
flags::MinBitrateKbps() * 1000,
|
||||
flags::TargetBitrateKbps() * 1000,
|
||||
flags::MaxBitrateKbps() * 1000,
|
||||
false,
|
||||
flags::Codec(),
|
||||
flags::NumTemporalLayers(),
|
||||
flags::SelectedTL(),
|
||||
flags::MinTransmitBitrateKbps() * 1000,
|
||||
false, // ULPFEC disabled.
|
||||
false, // FlexFEC disabled.
|
||||
""};
|
||||
params.screenshare = {true, flags::GenerateSlides(),
|
||||
flags::SlideChangeInterval(),
|
||||
flags::ScrollDuration(), flags::Slides()};
|
||||
params.video[0] = {true,
|
||||
flags::Width(),
|
||||
flags::Height(),
|
||||
flags::Fps(),
|
||||
flags::MinBitrateKbps() * 1000,
|
||||
flags::TargetBitrateKbps() * 1000,
|
||||
flags::MaxBitrateKbps() * 1000,
|
||||
false,
|
||||
flags::Codec(),
|
||||
flags::NumTemporalLayers(),
|
||||
flags::SelectedTL(),
|
||||
flags::MinTransmitBitrateKbps() * 1000,
|
||||
false, // ULPFEC disabled.
|
||||
false, // FlexFEC disabled.
|
||||
""};
|
||||
params.screenshare[0] = {true, flags::GenerateSlides(),
|
||||
flags::SlideChangeInterval(),
|
||||
flags::ScrollDuration(), flags::Slides()};
|
||||
params.analyzer = {"screenshare", 0.0, 0.0, flags::DurationSecs(),
|
||||
flags::OutputFilename(), flags::GraphTitle()};
|
||||
params.pipe = pipe_config;
|
||||
@ -299,7 +299,7 @@ void Loopback() {
|
||||
|
||||
if (flags::NumStreams() > 1 && flags::Stream0().empty() &&
|
||||
flags::Stream1().empty()) {
|
||||
params.ss.infer_streams = true;
|
||||
params.ss[0].infer_streams = true;
|
||||
}
|
||||
|
||||
std::vector<std::string> stream_descriptors;
|
||||
@ -309,8 +309,9 @@ void Loopback() {
|
||||
SL_descriptors.push_back(flags::SL0());
|
||||
SL_descriptors.push_back(flags::SL1());
|
||||
VideoQualityTest::FillScalabilitySettings(
|
||||
¶ms, stream_descriptors, flags::NumStreams(), flags::SelectedStream(),
|
||||
flags::NumSpatialLayers(), flags::SelectedSL(), SL_descriptors);
|
||||
¶ms, 0, stream_descriptors, flags::NumStreams(),
|
||||
flags::SelectedStream(), flags::NumSpatialLayers(), flags::SelectedSL(),
|
||||
SL_descriptors);
|
||||
|
||||
VideoQualityTest test;
|
||||
if (flags::DurationSecs()) {
|
||||
|
578
video/sv_loopback.cc
Normal file
578
video/sv_loopback.cc
Normal file
@ -0,0 +1,578 @@
|
||||
/*
|
||||
* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include <stdio.h>
|
||||
|
||||
#include "rtc_base/flags.h"
|
||||
#include "rtc_base/stringencode.h"
|
||||
#include "test/field_trial.h"
|
||||
#include "test/gtest.h"
|
||||
#include "test/run_test.h"
|
||||
#include "video/video_quality_test.h"
|
||||
|
||||
namespace webrtc {
|
||||
namespace flags {
|
||||
|
||||
// Flags for video.
|
||||
DEFINE_int(vwidth, 640, "Video width.");
|
||||
size_t VideoWidth() {
|
||||
return static_cast<size_t>(FLAG_vwidth);
|
||||
}
|
||||
|
||||
DEFINE_int(vheight, 480, "Video height.");
|
||||
size_t VideoHeight() {
|
||||
return static_cast<size_t>(FLAG_vheight);
|
||||
}
|
||||
|
||||
DEFINE_int(vfps, 30, "Video frames per second.");
|
||||
int VideoFps() {
|
||||
return static_cast<int>(FLAG_vfps);
|
||||
}
|
||||
|
||||
DEFINE_int(capture_device_index,
|
||||
0,
|
||||
"Capture device to select for video stream");
|
||||
size_t GetCaptureDevice() {
|
||||
return static_cast<size_t>(FLAG_capture_device_index);
|
||||
}
|
||||
|
||||
DEFINE_int(vtarget_bitrate, 400, "Video stream target bitrate in kbps.");
|
||||
int VideoTargetBitrateKbps() {
|
||||
return static_cast<int>(FLAG_vtarget_bitrate);
|
||||
}
|
||||
|
||||
DEFINE_int(vmin_bitrate, 100, "Video stream min bitrate in kbps.");
|
||||
int VideoMinBitrateKbps() {
|
||||
return static_cast<int>(FLAG_vmin_bitrate);
|
||||
}
|
||||
|
||||
DEFINE_int(vmax_bitrate, 2000, "Video stream max bitrate in kbps.");
|
||||
int VideoMaxBitrateKbps() {
|
||||
return static_cast<int>(FLAG_vmax_bitrate);
|
||||
}
|
||||
|
||||
DEFINE_bool(suspend_below_min_bitrate,
|
||||
false,
|
||||
"Suspends video below the configured min bitrate.");
|
||||
|
||||
DEFINE_int(vnum_temporal_layers,
|
||||
1,
|
||||
"Number of temporal layers for video. Set to 1-4 to override.");
|
||||
int VideoNumTemporalLayers() {
|
||||
return static_cast<int>(FLAG_vnum_temporal_layers);
|
||||
}
|
||||
|
||||
DEFINE_int(vnum_streams, 0, "Number of video streams to show or analyze.");
|
||||
int VideoNumStreams() {
|
||||
return static_cast<int>(FLAG_vnum_streams);
|
||||
}
|
||||
|
||||
DEFINE_int(vnum_spatial_layers, 1, "Number of video spatial layers to use.");
|
||||
int VideoNumSpatialLayers() {
|
||||
return static_cast<int>(FLAG_vnum_spatial_layers);
|
||||
}
|
||||
|
||||
DEFINE_string(
|
||||
vstream0,
|
||||
"",
|
||||
"Comma separated values describing VideoStream for video stream #0.");
|
||||
std::string VideoStream0() {
|
||||
return static_cast<std::string>(FLAG_vstream0);
|
||||
}
|
||||
|
||||
DEFINE_string(
|
||||
vstream1,
|
||||
"",
|
||||
"Comma separated values describing VideoStream for video stream #1.");
|
||||
std::string VideoStream1() {
|
||||
return static_cast<std::string>(FLAG_vstream1);
|
||||
}
|
||||
|
||||
DEFINE_string(
|
||||
vsl0,
|
||||
"",
|
||||
"Comma separated values describing SpatialLayer for video layer #0.");
|
||||
std::string VideoSL0() {
|
||||
return static_cast<std::string>(FLAG_vsl0);
|
||||
}
|
||||
|
||||
DEFINE_string(
|
||||
vsl1,
|
||||
"",
|
||||
"Comma separated values describing SpatialLayer for video layer #1.");
|
||||
std::string VideoSL1() {
|
||||
return static_cast<std::string>(FLAG_vsl1);
|
||||
}
|
||||
|
||||
DEFINE_int(vselected_tl,
|
||||
-1,
|
||||
"Temporal layer to show or analyze for screenshare. -1 to disable "
|
||||
"filtering.");
|
||||
int VideoSelectedTL() {
|
||||
return static_cast<int>(FLAG_vselected_tl);
|
||||
}
|
||||
|
||||
DEFINE_int(vselected_stream,
|
||||
0,
|
||||
"ID of the stream to show or analyze for screenshare."
|
||||
"Set to the number of streams to show them all.");
|
||||
int VideoSelectedStream() {
|
||||
return static_cast<int>(FLAG_vselected_stream);
|
||||
}
|
||||
|
||||
DEFINE_int(vselected_sl,
|
||||
-1,
|
||||
"Spatial layer to show or analyze for screenshare. -1 to disable "
|
||||
"filtering.");
|
||||
int VideoSelectedSL() {
|
||||
return static_cast<int>(FLAG_vselected_sl);
|
||||
}
|
||||
|
||||
// Flags for screenshare.
|
||||
DEFINE_int(min_transmit_bitrate,
|
||||
400,
|
||||
"Min transmit bitrate incl. padding for screenshare.");
|
||||
int ScreenshareMinTransmitBitrateKbps() {
|
||||
return FLAG_min_transmit_bitrate;
|
||||
}
|
||||
|
||||
DEFINE_int(swidth, 1850, "Screenshare width (crops source).");
|
||||
size_t ScreenshareWidth() {
|
||||
return static_cast<size_t>(FLAG_swidth);
|
||||
}
|
||||
|
||||
DEFINE_int(sheight, 1110, "Screenshare height (crops source).");
|
||||
size_t ScreenshareHeight() {
|
||||
return static_cast<size_t>(FLAG_sheight);
|
||||
}
|
||||
|
||||
DEFINE_int(sfps, 5, "Frames per second for screenshare.");
|
||||
int ScreenshareFps() {
|
||||
return static_cast<int>(FLAG_sfps);
|
||||
}
|
||||
|
||||
DEFINE_int(starget_bitrate, 100, "Screenshare stream target bitrate in kbps.");
|
||||
int ScreenshareTargetBitrateKbps() {
|
||||
return static_cast<int>(FLAG_starget_bitrate);
|
||||
}
|
||||
|
||||
DEFINE_int(smin_bitrate, 100, "Screenshare stream min bitrate in kbps.");
|
||||
int ScreenshareMinBitrateKbps() {
|
||||
return static_cast<int>(FLAG_smin_bitrate);
|
||||
}
|
||||
|
||||
DEFINE_int(smax_bitrate, 2000, "Screenshare stream max bitrate in kbps.");
|
||||
int ScreenshareMaxBitrateKbps() {
|
||||
return static_cast<int>(FLAG_smax_bitrate);
|
||||
}
|
||||
|
||||
DEFINE_int(snum_temporal_layers,
|
||||
2,
|
||||
"Number of temporal layers to use in screenshare.");
|
||||
int ScreenshareNumTemporalLayers() {
|
||||
return static_cast<int>(FLAG_snum_temporal_layers);
|
||||
}
|
||||
|
||||
DEFINE_int(snum_streams,
|
||||
0,
|
||||
"Number of screenshare streams to show or analyze.");
|
||||
int ScreenshareNumStreams() {
|
||||
return static_cast<int>(FLAG_snum_streams);
|
||||
}
|
||||
|
||||
DEFINE_int(snum_spatial_layers,
|
||||
1,
|
||||
"Number of screemshare spatial layers to use.");
|
||||
int ScreenshareNumSpatialLayers() {
|
||||
return static_cast<int>(FLAG_snum_spatial_layers);
|
||||
}
|
||||
|
||||
DEFINE_string(
|
||||
sstream0,
|
||||
"",
|
||||
"Comma separated values describing VideoStream for screenshare stream #0.");
|
||||
std::string ScreenshareStream0() {
|
||||
return static_cast<std::string>(FLAG_sstream0);
|
||||
}
|
||||
|
||||
DEFINE_string(
|
||||
sstream1,
|
||||
"",
|
||||
"Comma separated values describing VideoStream for screenshare stream #1.");
|
||||
std::string ScreenshareStream1() {
|
||||
return static_cast<std::string>(FLAG_sstream1);
|
||||
}
|
||||
|
||||
DEFINE_string(
|
||||
ssl0,
|
||||
"",
|
||||
"Comma separated values describing SpatialLayer for screenshare layer #0.");
|
||||
std::string ScreenshareSL0() {
|
||||
return static_cast<std::string>(FLAG_ssl0);
|
||||
}
|
||||
|
||||
DEFINE_string(
|
||||
ssl1,
|
||||
"",
|
||||
"Comma separated values describing SpatialLayer for screenshare layer #1.");
|
||||
std::string ScreenshareSL1() {
|
||||
return static_cast<std::string>(FLAG_ssl1);
|
||||
}
|
||||
|
||||
DEFINE_int(sselected_tl,
|
||||
-1,
|
||||
"Temporal layer to show or analyze for screenshare. -1 to disable "
|
||||
"filtering.");
|
||||
int ScreenshareSelectedTL() {
|
||||
return static_cast<int>(FLAG_sselected_tl);
|
||||
}
|
||||
|
||||
DEFINE_int(sselected_stream,
|
||||
0,
|
||||
"ID of the stream to show or analyze for screenshare."
|
||||
"Set to the number of streams to show them all.");
|
||||
int ScreenshareSelectedStream() {
|
||||
return static_cast<int>(FLAG_sselected_stream);
|
||||
}
|
||||
|
||||
DEFINE_int(sselected_sl,
|
||||
-1,
|
||||
"Spatial layer to show or analyze for screenshare. -1 to disable "
|
||||
"filtering.");
|
||||
int ScreenshareSelectedSL() {
|
||||
return static_cast<int>(FLAG_sselected_sl);
|
||||
}
|
||||
|
||||
DEFINE_bool(
|
||||
generate_slides,
|
||||
false,
|
||||
"Whether to use randomly generated slides or read them from files.");
|
||||
bool GenerateSlides() {
|
||||
return static_cast<int>(FLAG_generate_slides);
|
||||
}
|
||||
|
||||
DEFINE_int(slide_change_interval,
|
||||
10,
|
||||
"Interval (in seconds) between simulated slide changes.");
|
||||
int SlideChangeInterval() {
|
||||
return static_cast<int>(FLAG_slide_change_interval);
|
||||
}
|
||||
|
||||
DEFINE_int(
|
||||
scroll_duration,
|
||||
0,
|
||||
"Duration (in seconds) during which a slide will be scrolled into place.");
|
||||
int ScrollDuration() {
|
||||
return static_cast<int>(FLAG_scroll_duration);
|
||||
}
|
||||
|
||||
DEFINE_string(slides,
|
||||
"",
|
||||
"Comma-separated list of *.yuv files to display as slides.");
|
||||
std::vector<std::string> Slides() {
|
||||
std::vector<std::string> slides;
|
||||
std::string slides_list = FLAG_slides;
|
||||
rtc::tokenize(slides_list, ',', &slides);
|
||||
return slides;
|
||||
}
|
||||
|
||||
// Flags common with screenshare and video loopback, with equal default values.
|
||||
DEFINE_int(start_bitrate, 600, "Call start bitrate in kbps.");
|
||||
int StartBitrateKbps() {
|
||||
return static_cast<int>(FLAG_start_bitrate);
|
||||
}
|
||||
|
||||
DEFINE_string(codec, "VP8", "Video codec to use.");
|
||||
std::string Codec() {
|
||||
return static_cast<std::string>(FLAG_codec);
|
||||
}
|
||||
|
||||
DEFINE_bool(analyze_video,
|
||||
false,
|
||||
"Analyze video stream (if --duration is present)");
|
||||
bool AnalyzeVideo() {
|
||||
return static_cast<bool>(FLAG_analyze_video);
|
||||
}
|
||||
|
||||
DEFINE_bool(analyze_screenshare,
|
||||
false,
|
||||
"Analyze screenshare stream (if --duration is present)");
|
||||
bool AnalyzeScreenshare() {
|
||||
return static_cast<bool>(FLAG_analyze_screenshare);
|
||||
}
|
||||
|
||||
DEFINE_int(
|
||||
duration,
|
||||
0,
|
||||
"Duration of the test in seconds. If 0, rendered will be shown instead.");
|
||||
int DurationSecs() {
|
||||
return static_cast<int>(FLAG_duration);
|
||||
}
|
||||
|
||||
DEFINE_string(output_filename, "", "Target graph data filename.");
|
||||
std::string OutputFilename() {
|
||||
return static_cast<std::string>(FLAG_output_filename);
|
||||
}
|
||||
|
||||
DEFINE_string(graph_title,
|
||||
"",
|
||||
"If empty, title will be generated automatically.");
|
||||
std::string GraphTitle() {
|
||||
return static_cast<std::string>(FLAG_graph_title);
|
||||
}
|
||||
|
||||
DEFINE_int(loss_percent, 0, "Percentage of packets randomly lost.");
|
||||
int LossPercent() {
|
||||
return static_cast<int>(FLAG_loss_percent);
|
||||
}
|
||||
|
||||
DEFINE_int(avg_burst_loss_length, -1, "Average burst length of lost packets.");
|
||||
int AvgBurstLossLength() {
|
||||
return static_cast<int>(FLAG_avg_burst_loss_length);
|
||||
}
|
||||
|
||||
DEFINE_int(link_capacity,
|
||||
0,
|
||||
"Capacity (kbps) of the fake link. 0 means infinite.");
|
||||
int LinkCapacityKbps() {
|
||||
return static_cast<int>(FLAG_link_capacity);
|
||||
}
|
||||
|
||||
DEFINE_int(queue_size, 0, "Size of the bottleneck link queue in packets.");
|
||||
int QueueSize() {
|
||||
return static_cast<int>(FLAG_queue_size);
|
||||
}
|
||||
|
||||
DEFINE_int(avg_propagation_delay_ms,
|
||||
0,
|
||||
"Average link propagation delay in ms.");
|
||||
int AvgPropagationDelayMs() {
|
||||
return static_cast<int>(FLAG_avg_propagation_delay_ms);
|
||||
}
|
||||
|
||||
DEFINE_string(rtc_event_log_name,
|
||||
"",
|
||||
"Filename for rtc event log. Two files "
|
||||
"with \"_send\" and \"_recv\" suffixes will be created. "
|
||||
"Works only when --duration is set.");
|
||||
std::string RtcEventLogName() {
|
||||
return static_cast<std::string>(FLAG_rtc_event_log_name);
|
||||
}
|
||||
|
||||
DEFINE_string(rtp_dump_name, "", "Filename for dumped received RTP stream.");
|
||||
std::string RtpDumpName() {
|
||||
return static_cast<std::string>(FLAG_rtp_dump_name);
|
||||
}
|
||||
|
||||
DEFINE_int(std_propagation_delay_ms,
|
||||
0,
|
||||
"Link propagation delay standard deviation in ms.");
|
||||
int StdPropagationDelayMs() {
|
||||
return static_cast<int>(FLAG_std_propagation_delay_ms);
|
||||
}
|
||||
|
||||
DEFINE_string(encoded_frame_path,
|
||||
"",
|
||||
"The base path for encoded frame logs. Created files will have "
|
||||
"the form <encoded_frame_path>.<n>.(recv|send.<m>).ivf");
|
||||
std::string EncodedFramePath() {
|
||||
return static_cast<std::string>(FLAG_encoded_frame_path);
|
||||
}
|
||||
|
||||
DEFINE_bool(logs, false, "print logs to stderr");
|
||||
|
||||
DEFINE_bool(send_side_bwe, true, "Use send-side bandwidth estimation");
|
||||
|
||||
DEFINE_bool(allow_reordering, false, "Allow packet reordering to occur");
|
||||
|
||||
DEFINE_bool(use_ulpfec, false, "Use RED+ULPFEC forward error correction.");
|
||||
|
||||
DEFINE_bool(use_flexfec, false, "Use FlexFEC forward error correction.");
|
||||
|
||||
DEFINE_bool(audio, false, "Add audio stream");
|
||||
|
||||
DEFINE_bool(audio_video_sync,
|
||||
false,
|
||||
"Sync audio and video stream (no effect if"
|
||||
" audio is false)");
|
||||
|
||||
DEFINE_bool(audio_dtx, false, "Enable audio DTX (no effect if audio is false)");
|
||||
|
||||
DEFINE_bool(video, true, "Add video stream");
|
||||
|
||||
DEFINE_string(
|
||||
force_fieldtrials,
|
||||
"",
|
||||
"Field trials control experimental feature code which can be forced. "
|
||||
"E.g. running with --force_fieldtrials=WebRTC-FooFeature/Enable/"
|
||||
" will assign the group Enable to field trial WebRTC-FooFeature. Multiple "
|
||||
"trials are separated by \"/\"");
|
||||
|
||||
// Video-specific flags.
|
||||
DEFINE_string(vclip,
|
||||
"",
|
||||
"Name of the clip to show. If empty, the camera is used. Use "
|
||||
"\"Generator\" for chroma generator.");
|
||||
std::string VideoClip() {
|
||||
return static_cast<std::string>(FLAG_vclip);
|
||||
}
|
||||
|
||||
DEFINE_bool(help, false, "prints this message");
|
||||
|
||||
} // namespace flags
|
||||
|
||||
void Loopback() {
|
||||
int camera_idx, screenshare_idx;
|
||||
RTC_CHECK(!(flags::AnalyzeScreenshare() && flags::AnalyzeVideo()))
|
||||
<< "Select only one of video or screenshare.";
|
||||
RTC_CHECK(!flags::DurationSecs() || flags::AnalyzeScreenshare() ||
|
||||
flags::AnalyzeVideo())
|
||||
<< "If duration is set, exactly one of analyze_* flags should be set.";
|
||||
// Default: camera feed first, if nothing selected.
|
||||
if (flags::AnalyzeVideo() || !flags::AnalyzeScreenshare()) {
|
||||
camera_idx = 0;
|
||||
screenshare_idx = 1;
|
||||
} else {
|
||||
camera_idx = 1;
|
||||
screenshare_idx = 0;
|
||||
}
|
||||
|
||||
FakeNetworkPipe::Config pipe_config;
|
||||
pipe_config.loss_percent = flags::LossPercent();
|
||||
pipe_config.avg_burst_loss_length = flags::AvgBurstLossLength();
|
||||
pipe_config.link_capacity_kbps = flags::LinkCapacityKbps();
|
||||
pipe_config.queue_length_packets = flags::QueueSize();
|
||||
pipe_config.queue_delay_ms = flags::AvgPropagationDelayMs();
|
||||
pipe_config.delay_standard_deviation_ms = flags::StdPropagationDelayMs();
|
||||
pipe_config.allow_reordering = flags::FLAG_allow_reordering;
|
||||
|
||||
Call::Config::BitrateConfig call_bitrate_config;
|
||||
call_bitrate_config.min_bitrate_bps =
|
||||
(flags::ScreenshareMinBitrateKbps() + flags::VideoMinBitrateKbps()) *
|
||||
1000;
|
||||
call_bitrate_config.start_bitrate_bps = flags::StartBitrateKbps() * 1000;
|
||||
call_bitrate_config.max_bitrate_bps =
|
||||
(flags::ScreenshareMaxBitrateKbps() + flags::VideoMaxBitrateKbps()) *
|
||||
1000;
|
||||
|
||||
VideoQualityTest::Params params, camera_params, screenshare_params;
|
||||
params.call = {flags::FLAG_send_side_bwe, call_bitrate_config, 0};
|
||||
params.call.dual_video = true;
|
||||
params.video[screenshare_idx] = {
|
||||
true,
|
||||
flags::ScreenshareWidth(),
|
||||
flags::ScreenshareHeight(),
|
||||
flags::ScreenshareFps(),
|
||||
flags::ScreenshareMinBitrateKbps() * 1000,
|
||||
flags::ScreenshareTargetBitrateKbps() * 1000,
|
||||
flags::ScreenshareMaxBitrateKbps() * 1000,
|
||||
false,
|
||||
flags::Codec(),
|
||||
flags::ScreenshareNumTemporalLayers(),
|
||||
flags::ScreenshareSelectedTL(),
|
||||
flags::ScreenshareMinTransmitBitrateKbps() * 1000,
|
||||
false, // ULPFEC disabled.
|
||||
false, // FlexFEC disabled.
|
||||
""};
|
||||
params.video[camera_idx] = {flags::FLAG_video,
|
||||
flags::VideoWidth(),
|
||||
flags::VideoHeight(),
|
||||
flags::VideoFps(),
|
||||
flags::VideoMinBitrateKbps() * 1000,
|
||||
flags::VideoTargetBitrateKbps() * 1000,
|
||||
flags::VideoMaxBitrateKbps() * 1000,
|
||||
flags::FLAG_suspend_below_min_bitrate,
|
||||
flags::Codec(),
|
||||
flags::VideoNumTemporalLayers(),
|
||||
flags::VideoSelectedTL(),
|
||||
0, // No min transmit bitrate.
|
||||
flags::FLAG_use_ulpfec,
|
||||
flags::FLAG_use_flexfec,
|
||||
flags::VideoClip(),
|
||||
flags::GetCaptureDevice()};
|
||||
params.audio = {flags::FLAG_audio, flags::FLAG_audio_video_sync,
|
||||
flags::FLAG_audio_dtx};
|
||||
params.logging = {flags::FLAG_logs, flags::FLAG_rtc_event_log_name,
|
||||
flags::FLAG_rtp_dump_name, flags::FLAG_encoded_frame_path};
|
||||
params.analyzer = {"dual_streams",
|
||||
0.0,
|
||||
0.0,
|
||||
flags::DurationSecs(),
|
||||
flags::OutputFilename(),
|
||||
flags::GraphTitle()};
|
||||
params.pipe = pipe_config;
|
||||
|
||||
params.screenshare[camera_idx].enabled = false;
|
||||
params.screenshare[screenshare_idx] = {
|
||||
true, flags::GenerateSlides(), flags::SlideChangeInterval(),
|
||||
flags::ScrollDuration(), flags::Slides()};
|
||||
|
||||
if (flags::VideoNumStreams() > 1 && flags::VideoStream0().empty() &&
|
||||
flags::VideoStream1().empty()) {
|
||||
params.ss[camera_idx].infer_streams = true;
|
||||
}
|
||||
|
||||
if (flags::ScreenshareNumStreams() > 1 &&
|
||||
flags::ScreenshareStream0().empty() &&
|
||||
flags::ScreenshareStream1().empty()) {
|
||||
params.ss[screenshare_idx].infer_streams = true;
|
||||
}
|
||||
|
||||
std::vector<std::string> stream_descriptors;
|
||||
stream_descriptors.push_back(flags::ScreenshareStream0());
|
||||
stream_descriptors.push_back(flags::ScreenshareStream1());
|
||||
std::vector<std::string> SL_descriptors;
|
||||
SL_descriptors.push_back(flags::ScreenshareSL0());
|
||||
SL_descriptors.push_back(flags::ScreenshareSL1());
|
||||
VideoQualityTest::FillScalabilitySettings(
|
||||
¶ms, screenshare_idx, stream_descriptors,
|
||||
flags::ScreenshareNumStreams(), flags::ScreenshareSelectedStream(),
|
||||
flags::ScreenshareNumSpatialLayers(), flags::ScreenshareSelectedSL(),
|
||||
SL_descriptors);
|
||||
|
||||
stream_descriptors.clear();
|
||||
stream_descriptors.push_back(flags::VideoStream0());
|
||||
stream_descriptors.push_back(flags::VideoStream1());
|
||||
SL_descriptors.clear();
|
||||
SL_descriptors.push_back(flags::VideoSL0());
|
||||
SL_descriptors.push_back(flags::VideoSL1());
|
||||
VideoQualityTest::FillScalabilitySettings(
|
||||
¶ms, camera_idx, stream_descriptors, flags::VideoNumStreams(),
|
||||
flags::VideoSelectedStream(), flags::VideoNumSpatialLayers(),
|
||||
flags::VideoSelectedSL(), SL_descriptors);
|
||||
|
||||
VideoQualityTest test;
|
||||
if (flags::DurationSecs()) {
|
||||
test.RunWithAnalyzer(params);
|
||||
} else {
|
||||
test.RunWithRenderers(params);
|
||||
}
|
||||
}
|
||||
} // namespace webrtc
|
||||
|
||||
int main(int argc, char* argv[]) {
|
||||
::testing::InitGoogleTest(&argc, argv);
|
||||
if (rtc::FlagList::SetFlagsFromCommandLine(&argc, argv, true) != 0) {
|
||||
// Fail on unrecognized flags.
|
||||
return 1;
|
||||
}
|
||||
if (webrtc::flags::FLAG_help) {
|
||||
rtc::FlagList::Print(nullptr, false);
|
||||
return 0;
|
||||
}
|
||||
|
||||
// InitFieldTrialsFromString needs a reference to an std::string instance,
|
||||
// with a scope that outlives the test.
|
||||
std::string field_trials = webrtc::flags::FLAG_force_fieldtrials;
|
||||
webrtc::test::InitFieldTrialsFromString(field_trials);
|
||||
|
||||
webrtc::test::RunTest(webrtc::Loopback);
|
||||
return 0;
|
||||
}
|
@ -270,34 +270,34 @@ void Loopback() {
|
||||
|
||||
VideoQualityTest::Params params;
|
||||
params.call = {flags::FLAG_send_side_bwe, call_bitrate_config, 0};
|
||||
params.video = {flags::FLAG_video,
|
||||
flags::Width(),
|
||||
flags::Height(),
|
||||
flags::Fps(),
|
||||
flags::MinBitrateKbps() * 1000,
|
||||
flags::TargetBitrateKbps() * 1000,
|
||||
flags::MaxBitrateKbps() * 1000,
|
||||
flags::FLAG_suspend_below_min_bitrate,
|
||||
flags::Codec(),
|
||||
flags::NumTemporalLayers(),
|
||||
flags::SelectedTL(),
|
||||
0, // No min transmit bitrate.
|
||||
flags::FLAG_use_ulpfec,
|
||||
flags::FLAG_use_flexfec,
|
||||
flags::Clip(),
|
||||
flags::GetCaptureDevice()};
|
||||
params.video[0] = {flags::FLAG_video,
|
||||
flags::Width(),
|
||||
flags::Height(),
|
||||
flags::Fps(),
|
||||
flags::MinBitrateKbps() * 1000,
|
||||
flags::TargetBitrateKbps() * 1000,
|
||||
flags::MaxBitrateKbps() * 1000,
|
||||
flags::FLAG_suspend_below_min_bitrate,
|
||||
flags::Codec(),
|
||||
flags::NumTemporalLayers(),
|
||||
flags::SelectedTL(),
|
||||
0, // No min transmit bitrate.
|
||||
flags::FLAG_use_ulpfec,
|
||||
flags::FLAG_use_flexfec,
|
||||
flags::Clip(),
|
||||
flags::GetCaptureDevice()};
|
||||
params.audio = {flags::FLAG_audio, flags::FLAG_audio_video_sync,
|
||||
flags::FLAG_audio_dtx};
|
||||
params.logging = {flags::FLAG_logs, flags::FLAG_rtc_event_log_name,
|
||||
flags::FLAG_rtp_dump_name, flags::FLAG_encoded_frame_path};
|
||||
params.screenshare.enabled = false;
|
||||
params.screenshare[0].enabled = false;
|
||||
params.analyzer = {"video", 0.0, 0.0, flags::DurationSecs(),
|
||||
flags::OutputFilename(), flags::GraphTitle()};
|
||||
params.pipe = pipe_config;
|
||||
|
||||
if (flags::NumStreams() > 1 && flags::Stream0().empty() &&
|
||||
flags::Stream1().empty()) {
|
||||
params.ss.infer_streams = true;
|
||||
params.ss[0].infer_streams = true;
|
||||
}
|
||||
|
||||
std::vector<std::string> stream_descriptors;
|
||||
@ -307,8 +307,9 @@ void Loopback() {
|
||||
SL_descriptors.push_back(flags::SL0());
|
||||
SL_descriptors.push_back(flags::SL1());
|
||||
VideoQualityTest::FillScalabilitySettings(
|
||||
¶ms, stream_descriptors, flags::NumStreams(), flags::SelectedStream(),
|
||||
flags::NumSpatialLayers(), flags::SelectedSL(), SL_descriptors);
|
||||
¶ms, 0, stream_descriptors, flags::NumStreams(),
|
||||
flags::SelectedStream(), flags::NumSpatialLayers(), flags::SelectedSL(),
|
||||
SL_descriptors);
|
||||
|
||||
VideoQualityTest test;
|
||||
if (flags::DurationSecs()) {
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -35,6 +35,8 @@ class VideoQualityTest : public test::CallTest {
|
||||
bool send_side_bwe;
|
||||
Call::Config::BitrateConfig call_bitrate_config;
|
||||
int num_thumbnails;
|
||||
// Indicates if secondary_(video|ss|screenshare) structures are used.
|
||||
bool dual_video;
|
||||
} call;
|
||||
struct Video {
|
||||
bool enabled;
|
||||
@ -53,7 +55,7 @@ class VideoQualityTest : public test::CallTest {
|
||||
bool flexfec;
|
||||
std::string clip_name; // "Generator" to generate frames instead.
|
||||
size_t capture_device_index;
|
||||
} video;
|
||||
} video[2];
|
||||
struct Audio {
|
||||
bool enabled;
|
||||
bool sync_video;
|
||||
@ -65,7 +67,7 @@ class VideoQualityTest : public test::CallTest {
|
||||
int32_t slide_change_interval;
|
||||
int32_t scroll_duration;
|
||||
std::vector<std::string> slides;
|
||||
} screenshare;
|
||||
} screenshare[2];
|
||||
struct Analyzer {
|
||||
std::string test_label;
|
||||
double avg_psnr_threshold; // (*)
|
||||
@ -84,7 +86,7 @@ class VideoQualityTest : public test::CallTest {
|
||||
std::vector<SpatialLayer> spatial_layers;
|
||||
// If set, default parameters will be used instead of |streams|.
|
||||
bool infer_streams;
|
||||
} ss;
|
||||
} ss[2];
|
||||
struct Logging {
|
||||
bool logs;
|
||||
std::string rtc_event_log_name;
|
||||
@ -99,6 +101,7 @@ class VideoQualityTest : public test::CallTest {
|
||||
|
||||
static void FillScalabilitySettings(
|
||||
Params* params,
|
||||
size_t video_idx,
|
||||
const std::vector<std::string>& stream_descriptors,
|
||||
int num_streams,
|
||||
size_t selected_stream,
|
||||
@ -118,17 +121,19 @@ class VideoQualityTest : public test::CallTest {
|
||||
void CheckParams();
|
||||
|
||||
// Helper static methods.
|
||||
static VideoStream DefaultVideoStream(const Params& params);
|
||||
static VideoStream DefaultVideoStream(const Params& params, size_t video_idx);
|
||||
static VideoStream DefaultThumbnailStream();
|
||||
static std::vector<int> ParseCSV(const std::string& str);
|
||||
|
||||
// Helper methods for setting up the call.
|
||||
void CreateCapturer();
|
||||
void CreateVideoStreams();
|
||||
void DestroyStreams();
|
||||
void CreateCapturers();
|
||||
std::unique_ptr<test::FrameGenerator> CreateFrameGenerator(size_t video_idx);
|
||||
void SetupThumbnailCapturers(size_t num_thumbnail_streams);
|
||||
void SetupVideo(Transport* send_transport, Transport* recv_transport);
|
||||
void SetupThumbnails(Transport* send_transport, Transport* recv_transport);
|
||||
void DestroyThumbnailStreams();
|
||||
void SetupScreenshareOrSVC();
|
||||
void SetupAudio(int send_channel_id,
|
||||
int receive_channel_id,
|
||||
Transport* transport,
|
||||
@ -140,11 +145,9 @@ class VideoQualityTest : public test::CallTest {
|
||||
virtual std::unique_ptr<test::LayerFilteringTransport> CreateSendTransport();
|
||||
virtual std::unique_ptr<test::DirectTransport> CreateReceiveTransport();
|
||||
|
||||
// We need a more general capturer than the FrameGeneratorCapturer.
|
||||
std::unique_ptr<test::VideoCapturer> video_capturer_;
|
||||
std::vector<std::unique_ptr<test::VideoCapturer>> video_capturers_;
|
||||
std::vector<std::unique_ptr<test::VideoCapturer>> thumbnail_capturers_;
|
||||
std::unique_ptr<test::FrameGenerator> frame_generator_;
|
||||
std::unique_ptr<VideoEncoder> video_encoder_;
|
||||
std::vector<std::unique_ptr<VideoEncoder>> video_encoders_;
|
||||
|
||||
std::vector<std::unique_ptr<VideoEncoder>> thumbnail_encoders_;
|
||||
std::vector<VideoSendStream::Config> thumbnail_send_configs_;
|
||||
@ -153,6 +156,10 @@ class VideoQualityTest : public test::CallTest {
|
||||
std::vector<VideoReceiveStream::Config> thumbnail_receive_configs_;
|
||||
std::vector<VideoReceiveStream*> thumbnail_receive_streams_;
|
||||
|
||||
std::vector<VideoSendStream::Config> video_send_configs_;
|
||||
std::vector<VideoEncoderConfig> video_encoder_configs_;
|
||||
std::vector<VideoSendStream*> video_send_streams_;
|
||||
|
||||
Clock* const clock_;
|
||||
|
||||
int receive_logs_;
|
||||
@ -164,6 +171,8 @@ class VideoQualityTest : public test::CallTest {
|
||||
|
||||
std::unique_ptr<webrtc::RtcEventLog> recv_event_log_;
|
||||
std::unique_ptr<webrtc::RtcEventLog> send_event_log_;
|
||||
|
||||
size_t num_video_streams_;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
Reference in New Issue
Block a user