Adds support for VP8 simulcast to scenario tests.

Bug: webrtc:9510
Change-Id: Ice98e7bd98a1a8e4fd3b1a1c7c053a65de3f56e3
Reviewed-on: https://webrtc-review.googlesource.com/c/123380
Commit-Queue: Sebastian Jansson <srte@webrtc.org>
Reviewed-by: Ilya Nikolaevskiy <ilnik@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#26764}
This commit is contained in:
Sebastian Jansson
2019-02-20 11:16:19 +01:00
committed by Commit Bot
parent ccb9b759c5
commit 5fbebd585e
7 changed files with 181 additions and 65 deletions

View File

@ -128,6 +128,7 @@ if (rtc_include_tests) {
testonly = true
sources = [
"scenario_unittest.cc",
"video_stream_unittest.cc",
]
if (!build_with_chromium && is_clang) {
suppressed_configs += [ "//build/config/clang:find_bad_constructs" ]

View File

@ -168,7 +168,7 @@ ReceiveAudioStream::ReceiveAudioStream(
Transport* feedback_transport)
: receiver_(receiver), config_(config) {
AudioReceiveStream::Config recv_config;
recv_config.rtp.local_ssrc = CallTest::kReceiverLocalAudioSsrc;
recv_config.rtp.local_ssrc = receiver_->GetNextAudioLocalSsrc();
recv_config.rtcp_send_transport = feedback_transport;
recv_config.rtp.remote_ssrc = send_stream->ssrc_;
receiver->ssrc_media_types_[recv_config.rtp.remote_ssrc] = MediaType::AUDIO;

View File

@ -14,11 +14,20 @@
#include "absl/memory/memory.h"
#include "modules/audio_mixer/audio_mixer_impl.h"
#include "modules/congestion_controller/goog_cc/test/goog_cc_printer.h"
#include "test/call_test.h"
namespace webrtc {
namespace test {
namespace {
static constexpr size_t kNumSsrcs = 6;
const uint32_t kSendRtxSsrcs[kNumSsrcs] = {0xBADCAFD, 0xBADCAFE, 0xBADCAFF,
0xBADCB00, 0xBADCB01, 0xBADCB02};
const uint32_t kVideoSendSsrcs[kNumSsrcs] = {0xC0FFED, 0xC0FFEE, 0xC0FFEF,
0xC0FFF0, 0xC0FFF1, 0xC0FFF2};
const uint32_t kVideoRecvLocalSsrcs[kNumSsrcs] = {0xDAB001, 0xDAB002, 0xDAB003,
0xDAB004, 0xDAB005, 0xDAB006};
const uint32_t kAudioSendSsrc = 0xDEADBEEF;
const uint32_t kReceiverLocalAudioSsrc = 0x1234567;
const char* kPriorityStreamId = "priority-track";
CallClientFakeAudio InitAudio() {
@ -192,19 +201,30 @@ std::unique_ptr<RtcEventLogOutput> CallClient::GetLogWriter(std::string name) {
}
uint32_t CallClient::GetNextVideoSsrc() {
RTC_CHECK_LT(next_video_ssrc_index_, CallTest::kNumSsrcs);
return CallTest::kVideoSendSsrcs[next_video_ssrc_index_++];
RTC_CHECK_LT(next_video_ssrc_index_, kNumSsrcs);
return kVideoSendSsrcs[next_video_ssrc_index_++];
}
uint32_t CallClient::GetNextVideoLocalSsrc() {
RTC_CHECK_LT(next_video_local_ssrc_index_, kNumSsrcs);
return kVideoRecvLocalSsrcs[next_video_local_ssrc_index_++];
}
uint32_t CallClient::GetNextAudioSsrc() {
RTC_CHECK_LT(next_audio_ssrc_index_, 1);
next_audio_ssrc_index_++;
return CallTest::kAudioSendSsrc;
return kAudioSendSsrc;
}
uint32_t CallClient::GetNextAudioLocalSsrc() {
RTC_CHECK_LT(next_audio_local_ssrc_index_, 1);
next_audio_local_ssrc_index_++;
return kReceiverLocalAudioSsrc;
}
uint32_t CallClient::GetNextRtxSsrc() {
RTC_CHECK_LT(next_rtx_ssrc_index_, CallTest::kNumSsrcs);
return CallTest::kSendRtxSsrcs[next_rtx_ssrc_index_++];
RTC_CHECK_LT(next_rtx_ssrc_index_, kNumSsrcs);
return kSendRtxSsrcs[next_rtx_ssrc_index_++];
}
std::string CallClient::GetNextPriorityId() {

View File

@ -88,7 +88,9 @@ class CallClient : public EmulatedNetworkReceiverInterface {
friend class AudioStreamPair;
friend class NetworkNodeTransport;
uint32_t GetNextVideoSsrc();
uint32_t GetNextVideoLocalSsrc();
uint32_t GetNextAudioSsrc();
uint32_t GetNextAudioLocalSsrc();
uint32_t GetNextRtxSsrc();
std::string GetNextPriorityId();
void AddExtensions(std::vector<RtpExtension> extensions);
@ -106,8 +108,10 @@ class CallClient : public EmulatedNetworkReceiverInterface {
// to subtract the overhead before processing.
std::map<uint64_t, DataSize> route_overhead_;
int next_video_ssrc_index_ = 0;
int next_video_local_ssrc_index_ = 0;
int next_rtx_ssrc_index_ = 0;
int next_audio_ssrc_index_ = 0;
int next_audio_local_ssrc_index_ = 0;
int next_priority_index_ = 0;
std::map<uint32_t, MediaType> ssrc_media_types_;
};

View File

@ -161,13 +161,17 @@ CreateVp9SpecificSettings(VideoStreamConfig video_config) {
rtc::scoped_refptr<VideoEncoderConfig::EncoderSpecificSettings>
CreateVp8SpecificSettings(VideoStreamConfig config) {
RTC_DCHECK_EQ(config.encoder.layers.temporal, 1);
RTC_DCHECK_EQ(config.encoder.layers.spatial, 1);
VideoCodecVP8 vp8_settings = VideoEncoder::GetDefaultVp8Settings();
vp8_settings.frameDroppingOn = config.encoder.frame_dropping;
vp8_settings.keyFrameInterval = config.encoder.key_frame_interval.value_or(0);
vp8_settings.automaticResizeOn = config.encoder.single.automatic_scaling;
vp8_settings.denoisingOn = config.encoder.single.denoising;
vp8_settings.numberOfTemporalLayers = config.encoder.layers.temporal;
if (config.encoder.layers.spatial * config.encoder.layers.temporal > 1) {
vp8_settings.automaticResizeOn = false;
vp8_settings.denoisingOn = false;
} else {
vp8_settings.automaticResizeOn = config.encoder.single.automatic_scaling;
vp8_settings.denoisingOn = config.encoder.single.denoising;
}
return new rtc::RefCountedObject<
VideoEncoderConfig::Vp8EncoderSpecificSettings>(vp8_settings);
}
@ -210,17 +214,20 @@ VideoEncoderConfig CreateVideoEncoderConfig(VideoStreamConfig config) {
encoder_config.video_format =
SdpVideoFormat(CodecTypeToPayloadString(config.encoder.codec), {});
// TODO(srte): Replace with actual value when supported.
size_t num_streams = 1;
if (config.encoder.codec == VideoStreamConfig::Encoder::Codec::kVideoCodecVP8)
num_streams = static_cast<size_t>(config.encoder.layers.spatial);
encoder_config.number_of_streams = num_streams;
encoder_config.simulcast_layers = std::vector<VideoStream>(num_streams);
encoder_config.min_transmit_bitrate_bps = config.stream.pad_to_rate.bps();
std::string cricket_codec = CodecTypeToCodecName(config.encoder.codec);
if (!cricket_codec.empty()) {
bool screenshare = config.encoder.content_type ==
VideoStreamConfig::Encoder::ContentType::kScreen;
encoder_config.video_stream_factory =
new rtc::RefCountedObject<cricket::EncoderStreamFactory>(
cricket_codec, kDefaultMaxQp, false, false);
cricket_codec, kDefaultMaxQp, screenshare, screenshare);
} else {
encoder_config.video_stream_factory =
new rtc::RefCountedObject<DefaultVideoStreamFactory>();
@ -290,6 +297,39 @@ std::unique_ptr<FrameGenerator> CreateFrameGenerator(
}
}
VideoReceiveStream::Config CreateVideoReceiveStreamConfig(
VideoStreamConfig config,
Transport* feedback_transport,
VideoReceiveStream::Decoder decoder,
rtc::VideoSinkInterface<VideoFrame>* renderer,
uint32_t local_ssrc,
uint32_t ssrc,
uint32_t rtx_ssrc) {
VideoReceiveStream::Config recv(feedback_transport);
recv.rtp.remb = !config.stream.packet_feedback;
recv.rtp.transport_cc = config.stream.packet_feedback;
recv.rtp.local_ssrc = local_ssrc;
recv.rtp.extensions = GetVideoRtpExtensions(config);
RTC_DCHECK(!config.stream.use_rtx ||
config.stream.nack_history_time > TimeDelta::Zero());
recv.rtp.nack.rtp_history_ms = config.stream.nack_history_time.ms();
recv.rtp.protected_by_flexfec = config.stream.use_flexfec;
recv.rtp.remote_ssrc = ssrc;
recv.decoders.push_back(decoder);
recv.renderer = renderer;
if (config.stream.use_rtx) {
recv.rtp.rtx_ssrc = rtx_ssrc;
recv.rtp.rtx_associated_payload_types[CallTest::kSendRtxPayloadType] =
CodecTypeToPayloadType(config.encoder.codec);
}
if (config.stream.use_ulpfec) {
recv.rtp.red_payload_type = CallTest::kRedPayloadType;
recv.rtp.ulpfec_payload_type = CallTest::kUlpfecPayloadType;
recv.rtp.rtx_associated_payload_types[CallTest::kRtxRedPayloadType] =
CallTest::kRedPayloadType;
}
return recv;
}
} // namespace
SendVideoStream::SendVideoStream(CallClient* sender,
@ -428,35 +468,7 @@ ReceiveVideoStream::ReceiveVideoStream(CallClient* receiver,
Transport* feedback_transport,
VideoQualityAnalyzer* analyzer)
: receiver_(receiver), config_(config) {
if (analyzer->Active()) {
renderer_ = absl::make_unique<DecodedFrameTap>(analyzer);
} else {
renderer_ = absl::make_unique<FakeVideoRenderer>();
}
VideoReceiveStream::Config recv_config(feedback_transport);
recv_config.rtp.remb = !config.stream.packet_feedback;
recv_config.rtp.transport_cc = config.stream.packet_feedback;
recv_config.rtp.local_ssrc = CallTest::kReceiverLocalVideoSsrc;
recv_config.rtp.extensions = GetVideoRtpExtensions(config);
receiver_->AddExtensions(recv_config.rtp.extensions);
RTC_DCHECK(!config.stream.use_rtx ||
config.stream.nack_history_time > TimeDelta::Zero());
recv_config.rtp.nack.rtp_history_ms = config.stream.nack_history_time.ms();
recv_config.rtp.protected_by_flexfec = config.stream.use_flexfec;
recv_config.renderer = renderer_.get();
if (config.stream.use_rtx) {
recv_config.rtp.rtx_ssrc = send_stream->rtx_ssrcs_[chosen_stream];
receiver->ssrc_media_types_[recv_config.rtp.rtx_ssrc] = MediaType::VIDEO;
recv_config.rtp
.rtx_associated_payload_types[CallTest::kSendRtxPayloadType] =
CodecTypeToPayloadType(config.encoder.codec);
}
recv_config.rtp.remote_ssrc = send_stream->ssrcs_[chosen_stream];
receiver->ssrc_media_types_[recv_config.rtp.remote_ssrc] = MediaType::VIDEO;
VideoReceiveStream::Decoder decoder =
CreateMatchingDecoder(CodecTypeToPayloadType(config.encoder.codec),
CodecTypeToPayloadString(config.encoder.codec));
if (config.encoder.codec ==
VideoStreamConfig::Encoder::Codec::kVideoCodecGeneric) {
decoder_factory_ = absl::make_unique<FunctionVideoDecoderFactory>(
@ -464,42 +476,59 @@ ReceiveVideoStream::ReceiveVideoStream(CallClient* receiver,
} else {
decoder_factory_ = absl::make_unique<InternalDecoderFactory>();
}
decoder.decoder_factory = decoder_factory_.get();
recv_config.decoders.push_back(decoder);
if (config.stream.use_flexfec) {
FlexfecReceiveStream::Config flexfec_config(feedback_transport);
flexfec_config.payload_type = CallTest::kFlexfecPayloadType;
flexfec_config.remote_ssrc = CallTest::kFlexfecSendSsrc;
receiver->ssrc_media_types_[flexfec_config.remote_ssrc] = MediaType::VIDEO;
flexfec_config.protected_media_ssrcs = send_stream->rtx_ssrcs_;
flexfec_config.local_ssrc = recv_config.rtp.local_ssrc;
flecfec_stream_ =
receiver_->call_->CreateFlexfecReceiveStream(flexfec_config);
VideoReceiveStream::Decoder decoder =
CreateMatchingDecoder(CodecTypeToPayloadType(config.encoder.codec),
CodecTypeToPayloadString(config.encoder.codec));
decoder.decoder_factory = decoder_factory_.get();
size_t num_streams = 1;
if (config.encoder.codec == VideoStreamConfig::Encoder::Codec::kVideoCodecVP8)
num_streams = config.encoder.layers.spatial;
for (size_t i = 0; i < num_streams; ++i) {
rtc::VideoSinkInterface<VideoFrame>* renderer = &fake_renderer_;
if (analyzer->Active() && i == chosen_stream) {
analyzer_ = absl::make_unique<DecodedFrameTap>(analyzer);
renderer = analyzer_.get();
}
auto recv_config = CreateVideoReceiveStreamConfig(
config, feedback_transport, decoder, renderer,
receiver_->GetNextVideoLocalSsrc(), send_stream->ssrcs_[i],
send_stream->rtx_ssrcs_[i]);
if (config.stream.use_flexfec) {
RTC_DCHECK(num_streams == 1);
FlexfecReceiveStream::Config flexfec(feedback_transport);
flexfec.payload_type = CallTest::kFlexfecPayloadType;
flexfec.remote_ssrc = CallTest::kFlexfecSendSsrc;
flexfec.protected_media_ssrcs = send_stream->rtx_ssrcs_;
flexfec.local_ssrc = recv_config.rtp.local_ssrc;
receiver_->ssrc_media_types_[flexfec.remote_ssrc] = MediaType::VIDEO;
flecfec_stream_ = receiver_->call_->CreateFlexfecReceiveStream(flexfec);
}
receiver_->ssrc_media_types_[recv_config.rtp.remote_ssrc] =
MediaType::VIDEO;
if (config.stream.use_rtx)
receiver_->ssrc_media_types_[recv_config.rtp.rtx_ssrc] = MediaType::VIDEO;
receive_streams_.push_back(
receiver_->call_->CreateVideoReceiveStream(std::move(recv_config)));
}
if (config.stream.use_ulpfec) {
recv_config.rtp.red_payload_type = CallTest::kRedPayloadType;
recv_config.rtp.ulpfec_payload_type = CallTest::kUlpfecPayloadType;
recv_config.rtp.rtx_associated_payload_types[CallTest::kRtxRedPayloadType] =
CallTest::kRedPayloadType;
}
receive_stream_ =
receiver_->call_->CreateVideoReceiveStream(std::move(recv_config));
}
ReceiveVideoStream::~ReceiveVideoStream() {
receiver_->call_->DestroyVideoReceiveStream(receive_stream_);
for (auto* recv_stream : receive_streams_)
receiver_->call_->DestroyVideoReceiveStream(recv_stream);
if (flecfec_stream_)
receiver_->call_->DestroyFlexfecReceiveStream(flecfec_stream_);
}
void ReceiveVideoStream::Start() {
receive_stream_->Start();
for (auto* recv_stream : receive_streams_)
recv_stream->Start();
receiver_->call_->SignalChannelNetworkState(MediaType::VIDEO, kNetworkUp);
}
void ReceiveVideoStream::Stop() {
receive_stream_->Stop();
for (auto* recv_stream : receive_streams_)
recv_stream->Stop();
}
VideoStreamPair::~VideoStreamPair() = default;

View File

@ -15,6 +15,7 @@
#include "rtc_base/constructor_magic.h"
#include "test/fake_encoder.h"
#include "test/fake_videorenderer.h"
#include "test/frame_generator_capturer.h"
#include "test/logging/log_writer.h"
#include "test/scenario/call_client.h"
@ -82,9 +83,10 @@ class ReceiveVideoStream {
Transport* feedback_transport,
VideoQualityAnalyzer* analyzer);
VideoReceiveStream* receive_stream_ = nullptr;
std::vector<VideoReceiveStream*> receive_streams_;
FlexfecReceiveStream* flecfec_stream_ = nullptr;
std::unique_ptr<rtc::VideoSinkInterface<VideoFrame>> renderer_;
FakeVideoRenderer fake_renderer_;
std::unique_ptr<rtc::VideoSinkInterface<VideoFrame>> analyzer_;
CallClient* const receiver_;
const VideoStreamConfig config_;
std::unique_ptr<VideoDecoderFactory> decoder_factory_;

View File

@ -0,0 +1,60 @@
/*
* Copyright 2019 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <atomic>
#include "test/gtest.h"
#include "test/scenario/scenario.h"
namespace webrtc {
namespace test {
namespace {
using Codec = VideoStreamConfig::Encoder::Codec;
using CodecImpl = VideoStreamConfig::Encoder::Implementation;
} // namespace
TEST(VideoStreamTest, RecievesVp8SimulcastFrames) {
TimeDelta kRunTime = TimeDelta::ms(500);
int kFrameRate = 30;
std::atomic<int> frame_count(0);
{
Scenario s;
auto route = s.CreateRoutes(s.CreateClient("caller", CallClientConfig()),
{s.CreateSimulationNode(NetworkNodeConfig())},
s.CreateClient("callee", CallClientConfig()),
{s.CreateSimulationNode(NetworkNodeConfig())});
s.CreateVideoStream(route->forward(), [&](VideoStreamConfig* c) {
// TODO(srte): Replace with code checking for all simulcast streams when
// there's a hook available for that.
c->analyzer.frame_quality_handler = [&](const VideoFrameQualityInfo&) {
frame_count++;
};
c->source.framerate = kFrameRate;
// The resolution must be high enough to allow smaller layers to be
// created.
c->source.generator.width = 1024;
c->source.generator.height = 768;
c->encoder.implementation = CodecImpl::kSoftware;
c->encoder.codec = Codec::kVideoCodecVP8;
// By enabling multiple spatial layers, simulcast will be enabled for VP8.
c->encoder.layers.spatial = 3;
});
s.RunFor(kRunTime);
}
// Using 20% error margin to avoid flakyness.
const int kExpectedCount =
static_cast<int>(kRunTime.seconds<double>() * kFrameRate * 0.8);
EXPECT_GE(frame_count, kExpectedCount);
}
} // namespace test
} // namespace webrtc