Moved PayloadRouter to call/.

This is done in preparation for moving ownership of PayloadRouter to RtpTransportControllerSend.

Bug: webrtc:9517
Change-Id: I4a5b449cbcfc23db594dc5bb68ca322dd8fa33b7
Reviewed-on: https://webrtc-review.googlesource.com/88241
Commit-Queue: Stefan Holmer <stefan@webrtc.org>
Reviewed-by: Sebastian Jansson <srte@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#23936}
This commit is contained in:
Stefan Holmer
2018-07-11 17:11:31 +02:00
committed by Commit Bot
parent 28bb391918
commit a2f1533e27
8 changed files with 20 additions and 12 deletions

View File

@ -98,6 +98,8 @@ rtc_source_set("rtp_receiver") {
rtc_source_set("rtp_sender") {
sources = [
"payload_router.cc",
"payload_router.h",
"rtp_transport_controller_send.cc",
"rtp_transport_controller_send.h",
]
@ -106,10 +108,16 @@ rtc_source_set("rtp_sender") {
":rtp_interfaces",
"..:webrtc_common",
"../api/transport:network_control",
"../api/video_codecs:video_codecs_api",
"../modules/congestion_controller",
"../modules/congestion_controller/rtp:congestion_controller",
"../modules/pacing",
"../modules/rtp_rtcp:rtp_rtcp",
"../modules/rtp_rtcp:rtp_rtcp_format",
"../modules/rtp_rtcp:rtp_video_header",
"../modules/utility",
"../modules/video_coding:video_codec_interface",
"../rtc_base:checks",
"../rtc_base:rtc_base",
"../rtc_base:rtc_base_approved",
"../rtc_base:rtc_task_queue",
@ -268,6 +276,7 @@ if (rtc_include_tests) {
"bitrate_estimator_tests.cc",
"call_unittest.cc",
"flexfec_receive_stream_unittest.cc",
"payload_router_unittest.cc",
"receive_time_calculator_unittest.cc",
"rtcp_demuxer_unittest.cc",
"rtp_bitrate_configurator_unittest.cc",
@ -304,11 +313,13 @@ if (rtc_include_tests) {
"../modules/rtp_rtcp:mock_rtp_rtcp",
"../modules/rtp_rtcp:rtp_rtcp_format",
"../modules/utility:mock_process_thread",
"../modules/video_coding:video_codec_interface",
"../rtc_base:checks",
"../rtc_base:rtc_base_approved",
"../system_wrappers",
"../test:audio_codec_mocks",
"../test:direct_transport",
"../test:field_trial",
"../test:test_common",
"../test:test_support",
"../test:video_test_common",

296
call/payload_router.cc Normal file
View File

@ -0,0 +1,296 @@
/*
* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "call/payload_router.h"
#include "modules/rtp_rtcp/include/rtp_rtcp.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "modules/video_coding/include/video_codec_interface.h"
#include "rtc_base/checks.h"
#include "rtc_base/random.h"
#include "rtc_base/timeutils.h"
namespace webrtc {
namespace {
// Map information from info into rtp.
void CopyCodecSpecific(const CodecSpecificInfo* info, RTPVideoHeader* rtp) {
RTC_DCHECK(info);
rtp->codec = info->codecType;
switch (info->codecType) {
case kVideoCodecVP8: {
rtp->vp8().InitRTPVideoHeaderVP8();
rtp->vp8().nonReference = info->codecSpecific.VP8.nonReference;
rtp->vp8().temporalIdx = info->codecSpecific.VP8.temporalIdx;
rtp->vp8().layerSync = info->codecSpecific.VP8.layerSync;
rtp->vp8().keyIdx = info->codecSpecific.VP8.keyIdx;
rtp->simulcastIdx = info->codecSpecific.VP8.simulcastIdx;
return;
}
case kVideoCodecVP9: {
rtp->vp9().InitRTPVideoHeaderVP9();
rtp->vp9().inter_pic_predicted =
info->codecSpecific.VP9.inter_pic_predicted;
rtp->vp9().flexible_mode = info->codecSpecific.VP9.flexible_mode;
rtp->vp9().ss_data_available = info->codecSpecific.VP9.ss_data_available;
rtp->vp9().non_ref_for_inter_layer_pred =
info->codecSpecific.VP9.non_ref_for_inter_layer_pred;
rtp->vp9().temporal_idx = info->codecSpecific.VP9.temporal_idx;
rtp->vp9().spatial_idx = info->codecSpecific.VP9.spatial_idx;
rtp->vp9().temporal_up_switch =
info->codecSpecific.VP9.temporal_up_switch;
rtp->vp9().inter_layer_predicted =
info->codecSpecific.VP9.inter_layer_predicted;
rtp->vp9().gof_idx = info->codecSpecific.VP9.gof_idx;
rtp->vp9().num_spatial_layers =
info->codecSpecific.VP9.num_spatial_layers;
if (info->codecSpecific.VP9.ss_data_available) {
rtp->vp9().spatial_layer_resolution_present =
info->codecSpecific.VP9.spatial_layer_resolution_present;
if (info->codecSpecific.VP9.spatial_layer_resolution_present) {
for (size_t i = 0; i < info->codecSpecific.VP9.num_spatial_layers;
++i) {
rtp->vp9().width[i] = info->codecSpecific.VP9.width[i];
rtp->vp9().height[i] = info->codecSpecific.VP9.height[i];
}
}
rtp->vp9().gof.CopyGofInfoVP9(info->codecSpecific.VP9.gof);
}
rtp->vp9().num_ref_pics = info->codecSpecific.VP9.num_ref_pics;
for (int i = 0; i < info->codecSpecific.VP9.num_ref_pics; ++i) {
rtp->vp9().pid_diff[i] = info->codecSpecific.VP9.p_diff[i];
}
rtp->vp9().end_of_picture = info->codecSpecific.VP9.end_of_picture;
return;
}
case kVideoCodecH264:
rtp->h264().packetization_mode =
info->codecSpecific.H264.packetization_mode;
rtp->simulcastIdx = info->codecSpecific.H264.simulcast_idx;
return;
case kVideoCodecMultiplex:
case kVideoCodecGeneric:
rtp->codec = kVideoCodecGeneric;
rtp->simulcastIdx = info->codecSpecific.generic.simulcast_idx;
return;
default:
return;
}
}
void SetVideoTiming(VideoSendTiming* timing, const EncodedImage& image) {
if (image.timing_.flags == VideoSendTiming::TimingFrameFlags::kInvalid ||
image.timing_.flags == VideoSendTiming::TimingFrameFlags::kNotTriggered) {
timing->flags = VideoSendTiming::TimingFrameFlags::kInvalid;
return;
}
timing->encode_start_delta_ms = VideoSendTiming::GetDeltaCappedMs(
image.capture_time_ms_, image.timing_.encode_start_ms);
timing->encode_finish_delta_ms = VideoSendTiming::GetDeltaCappedMs(
image.capture_time_ms_, image.timing_.encode_finish_ms);
timing->packetization_finish_delta_ms = 0;
timing->pacer_exit_delta_ms = 0;
timing->network_timestamp_delta_ms = 0;
timing->network2_timestamp_delta_ms = 0;
timing->flags = image.timing_.flags;
}
} // namespace
// State for setting picture id and tl0 pic idx, for VP8 and VP9
// TODO(nisse): Make these properties not codec specific.
class PayloadRouter::RtpPayloadParams final {
public:
RtpPayloadParams(const uint32_t ssrc, const RtpPayloadState* state)
: ssrc_(ssrc) {
Random random(rtc::TimeMicros());
state_.picture_id =
state ? state->picture_id : (random.Rand<int16_t>() & 0x7FFF);
state_.tl0_pic_idx = state ? state->tl0_pic_idx : (random.Rand<uint8_t>());
}
~RtpPayloadParams() {}
void Set(RTPVideoHeader* rtp_video_header, bool first_frame_in_picture) {
// Always set picture id. Set tl0_pic_idx iff temporal index is set.
if (first_frame_in_picture) {
state_.picture_id =
(static_cast<uint16_t>(state_.picture_id) + 1) & 0x7FFF;
}
if (rtp_video_header->codec == kVideoCodecVP8) {
rtp_video_header->vp8().pictureId = state_.picture_id;
if (rtp_video_header->vp8().temporalIdx != kNoTemporalIdx) {
if (rtp_video_header->vp8().temporalIdx == 0) {
++state_.tl0_pic_idx;
}
rtp_video_header->vp8().tl0PicIdx = state_.tl0_pic_idx;
}
}
if (rtp_video_header->codec == kVideoCodecVP9) {
rtp_video_header->vp9().picture_id = state_.picture_id;
// Note that in the case that we have no temporal layers but we do have
// spatial layers, packets will carry layering info with a temporal_idx of
// zero, and we then have to set and increment tl0_pic_idx.
if (rtp_video_header->vp9().temporal_idx != kNoTemporalIdx ||
rtp_video_header->vp9().spatial_idx != kNoSpatialIdx) {
if (first_frame_in_picture &&
(rtp_video_header->vp9().temporal_idx == 0 ||
rtp_video_header->vp9().temporal_idx == kNoTemporalIdx)) {
++state_.tl0_pic_idx;
}
rtp_video_header->vp9().tl0_pic_idx = state_.tl0_pic_idx;
}
}
}
uint32_t ssrc() const { return ssrc_; }
RtpPayloadState state() const { return state_; }
private:
const uint32_t ssrc_;
RtpPayloadState state_;
};
PayloadRouter::PayloadRouter(const std::vector<RtpRtcp*>& rtp_modules,
const std::vector<uint32_t>& ssrcs,
int payload_type,
const std::map<uint32_t, RtpPayloadState>& states)
: active_(false), rtp_modules_(rtp_modules), payload_type_(payload_type) {
RTC_DCHECK_EQ(ssrcs.size(), rtp_modules.size());
// SSRCs are assumed to be sorted in the same order as |rtp_modules|.
for (uint32_t ssrc : ssrcs) {
// Restore state if it previously existed.
const RtpPayloadState* state = nullptr;
auto it = states.find(ssrc);
if (it != states.end()) {
state = &it->second;
}
params_.push_back(RtpPayloadParams(ssrc, state));
}
}
PayloadRouter::~PayloadRouter() {}
void PayloadRouter::SetActive(bool active) {
rtc::CritScope lock(&crit_);
if (active_ == active)
return;
const std::vector<bool> active_modules(rtp_modules_.size(), active);
SetActiveModules(active_modules);
}
void PayloadRouter::SetActiveModules(const std::vector<bool> active_modules) {
rtc::CritScope lock(&crit_);
RTC_DCHECK_EQ(rtp_modules_.size(), active_modules.size());
active_ = false;
for (size_t i = 0; i < active_modules.size(); ++i) {
if (active_modules[i]) {
active_ = true;
}
// Sends a kRtcpByeCode when going from true to false.
rtp_modules_[i]->SetSendingStatus(active_modules[i]);
// If set to false this module won't send media.
rtp_modules_[i]->SetSendingMediaStatus(active_modules[i]);
}
}
bool PayloadRouter::IsActive() {
rtc::CritScope lock(&crit_);
return active_ && !rtp_modules_.empty();
}
std::map<uint32_t, RtpPayloadState> PayloadRouter::GetRtpPayloadStates() const {
rtc::CritScope lock(&crit_);
std::map<uint32_t, RtpPayloadState> payload_states;
for (const auto& param : params_) {
payload_states[param.ssrc()] = param.state();
}
return payload_states;
}
EncodedImageCallback::Result PayloadRouter::OnEncodedImage(
const EncodedImage& encoded_image,
const CodecSpecificInfo* codec_specific_info,
const RTPFragmentationHeader* fragmentation) {
rtc::CritScope lock(&crit_);
RTC_DCHECK(!rtp_modules_.empty());
if (!active_)
return Result(Result::ERROR_SEND_FAILED);
RTPVideoHeader rtp_video_header;
if (codec_specific_info)
CopyCodecSpecific(codec_specific_info, &rtp_video_header);
rtp_video_header.rotation = encoded_image.rotation_;
rtp_video_header.content_type = encoded_image.content_type_;
rtp_video_header.playout_delay = encoded_image.playout_delay_;
SetVideoTiming(&rtp_video_header.video_timing, encoded_image);
int stream_index = rtp_video_header.simulcastIdx;
RTC_DCHECK_LT(stream_index, rtp_modules_.size());
// Sets picture id and tl0 pic idx.
const bool first_frame_in_picture =
(codec_specific_info && codec_specific_info->codecType == kVideoCodecVP9)
? codec_specific_info->codecSpecific.VP9.first_frame_in_picture
: true;
params_[stream_index].Set(&rtp_video_header, first_frame_in_picture);
uint32_t frame_id;
if (!rtp_modules_[stream_index]->Sending()) {
// The payload router could be active but this module isn't sending.
return Result(Result::ERROR_SEND_FAILED);
}
bool send_result = rtp_modules_[stream_index]->SendOutgoingData(
encoded_image._frameType, payload_type_, encoded_image._timeStamp,
encoded_image.capture_time_ms_, encoded_image._buffer,
encoded_image._length, fragmentation, &rtp_video_header, &frame_id);
if (!send_result)
return Result(Result::ERROR_SEND_FAILED);
return Result(Result::OK, frame_id);
}
void PayloadRouter::OnBitrateAllocationUpdated(
const VideoBitrateAllocation& bitrate) {
rtc::CritScope lock(&crit_);
if (IsActive()) {
if (rtp_modules_.size() == 1) {
// If spatial scalability is enabled, it is covered by a single stream.
rtp_modules_[0]->SetVideoBitrateAllocation(bitrate);
} else {
// Simulcast is in use, split the VideoBitrateAllocation into one struct
// per rtp stream, moving over the temporal layer allocation.
for (size_t si = 0; si < rtp_modules_.size(); ++si) {
// Don't send empty TargetBitrate messages on streams not being relayed.
if (!bitrate.IsSpatialLayerUsed(si)) {
// The next spatial layer could be used if the current one is
// inactive.
continue;
}
VideoBitrateAllocation layer_bitrate;
for (int tl = 0; tl < kMaxTemporalStreams; ++tl) {
if (bitrate.HasBitrate(si, tl))
layer_bitrate.SetBitrate(0, tl, bitrate.GetBitrate(si, tl));
}
rtp_modules_[si]->SetVideoBitrateAllocation(layer_bitrate);
}
}
}
}
} // namespace webrtc

84
call/payload_router.h Normal file
View File

@ -0,0 +1,84 @@
/*
* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef CALL_PAYLOAD_ROUTER_H_
#define CALL_PAYLOAD_ROUTER_H_
#include <map>
#include <vector>
#include "api/video_codecs/video_encoder.h"
#include "common_types.h" // NOLINT(build/include)
#include "modules/rtp_rtcp/source/rtp_video_header.h"
#include "rtc_base/constructormagic.h"
#include "rtc_base/criticalsection.h"
#include "rtc_base/thread_annotations.h"
namespace webrtc {
class RTPFragmentationHeader;
class RtpRtcp;
// Currently only VP8/VP9 specific.
struct RtpPayloadState {
int16_t picture_id = -1;
uint8_t tl0_pic_idx = 0;
};
// PayloadRouter routes outgoing data to the correct sending RTP module, based
// on the simulcast layer in RTPVideoHeader.
class PayloadRouter : public EncodedImageCallback {
public:
// Rtp modules are assumed to be sorted in simulcast index order.
PayloadRouter(const std::vector<RtpRtcp*>& rtp_modules,
const std::vector<uint32_t>& ssrcs,
int payload_type,
const std::map<uint32_t, RtpPayloadState>& states);
~PayloadRouter() override;
// PayloadRouter will only route packets if being active, all packets will be
// dropped otherwise.
void SetActive(bool active);
// Sets the sending status of the rtp modules and appropriately sets the
// payload router to active if any rtp modules are active.
void SetActiveModules(const std::vector<bool> active_modules);
bool IsActive();
std::map<uint32_t, RtpPayloadState> GetRtpPayloadStates() const;
// Implements EncodedImageCallback.
// Returns 0 if the packet was routed / sent, -1 otherwise.
EncodedImageCallback::Result OnEncodedImage(
const EncodedImage& encoded_image,
const CodecSpecificInfo* codec_specific_info,
const RTPFragmentationHeader* fragmentation) override;
void OnBitrateAllocationUpdated(const VideoBitrateAllocation& bitrate);
private:
class RtpPayloadParams;
void UpdateModuleSendingState() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
rtc::CriticalSection crit_;
bool active_ RTC_GUARDED_BY(crit_);
// Rtp modules are assumed to be sorted in simulcast index order. Not owned.
const std::vector<RtpRtcp*> rtp_modules_;
const int payload_type_;
std::vector<RtpPayloadParams> params_ RTC_GUARDED_BY(crit_);
RTC_DISALLOW_COPY_AND_ASSIGN(PayloadRouter);
};
} // namespace webrtc
#endif // CALL_PAYLOAD_ROUTER_H_

View File

@ -0,0 +1,719 @@
/*
* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <memory>
#include <string>
#include "call/payload_router.h"
#include "modules/rtp_rtcp/include/rtp_rtcp.h"
#include "modules/rtp_rtcp/mocks/mock_rtp_rtcp.h"
#include "modules/video_coding/include/video_codec_interface.h"
#include "test/field_trial.h"
#include "test/gmock.h"
#include "test/gtest.h"
using ::testing::_;
using ::testing::AnyNumber;
using ::testing::Invoke;
using ::testing::NiceMock;
using ::testing::Return;
using ::testing::Unused;
namespace webrtc {
namespace {
const int8_t kPayloadType = 96;
const uint32_t kSsrc1 = 12345;
const uint32_t kSsrc2 = 23456;
const uint32_t kSsrc3 = 34567;
const int16_t kPictureId = 123;
const int16_t kTl0PicIdx = 20;
const uint8_t kTemporalIdx = 1;
const int16_t kInitialPictureId1 = 222;
const int16_t kInitialPictureId2 = 44;
const int16_t kInitialTl0PicIdx1 = 99;
const int16_t kInitialTl0PicIdx2 = 199;
} // namespace
TEST(PayloadRouterTest, SendOnOneModule) {
NiceMock<MockRtpRtcp> rtp;
std::vector<RtpRtcp*> modules(1, &rtp);
uint8_t payload = 'a';
EncodedImage encoded_image;
encoded_image._timeStamp = 1;
encoded_image.capture_time_ms_ = 2;
encoded_image._frameType = kVideoFrameKey;
encoded_image._buffer = &payload;
encoded_image._length = 1;
PayloadRouter payload_router(modules, {kSsrc1}, kPayloadType, {});
EXPECT_CALL(rtp, SendOutgoingData(encoded_image._frameType, kPayloadType,
encoded_image._timeStamp,
encoded_image.capture_time_ms_, &payload,
encoded_image._length, nullptr, _, _))
.Times(0);
EXPECT_NE(
EncodedImageCallback::Result::OK,
payload_router.OnEncodedImage(encoded_image, nullptr, nullptr).error);
payload_router.SetActive(true);
EXPECT_CALL(rtp, SendOutgoingData(encoded_image._frameType, kPayloadType,
encoded_image._timeStamp,
encoded_image.capture_time_ms_, &payload,
encoded_image._length, nullptr, _, _))
.Times(1)
.WillOnce(Return(true));
EXPECT_CALL(rtp, Sending()).WillOnce(Return(true));
EXPECT_EQ(
EncodedImageCallback::Result::OK,
payload_router.OnEncodedImage(encoded_image, nullptr, nullptr).error);
payload_router.SetActive(false);
EXPECT_CALL(rtp, SendOutgoingData(encoded_image._frameType, kPayloadType,
encoded_image._timeStamp,
encoded_image.capture_time_ms_, &payload,
encoded_image._length, nullptr, _, _))
.Times(0);
EXPECT_NE(
EncodedImageCallback::Result::OK,
payload_router.OnEncodedImage(encoded_image, nullptr, nullptr).error);
payload_router.SetActive(true);
EXPECT_CALL(rtp, SendOutgoingData(encoded_image._frameType, kPayloadType,
encoded_image._timeStamp,
encoded_image.capture_time_ms_, &payload,
encoded_image._length, nullptr, _, _))
.Times(1)
.WillOnce(Return(true));
EXPECT_CALL(rtp, Sending()).WillOnce(Return(true));
EXPECT_EQ(
EncodedImageCallback::Result::OK,
payload_router.OnEncodedImage(encoded_image, nullptr, nullptr).error);
}
TEST(PayloadRouterTest, SendSimulcastSetActive) {
NiceMock<MockRtpRtcp> rtp_1;
NiceMock<MockRtpRtcp> rtp_2;
std::vector<RtpRtcp*> modules = {&rtp_1, &rtp_2};
uint8_t payload = 'a';
EncodedImage encoded_image;
encoded_image._timeStamp = 1;
encoded_image.capture_time_ms_ = 2;
encoded_image._frameType = kVideoFrameKey;
encoded_image._buffer = &payload;
encoded_image._length = 1;
PayloadRouter payload_router(modules, {kSsrc1, kSsrc2}, kPayloadType, {});
CodecSpecificInfo codec_info_1;
memset(&codec_info_1, 0, sizeof(CodecSpecificInfo));
codec_info_1.codecType = kVideoCodecVP8;
codec_info_1.codecSpecific.VP8.simulcastIdx = 0;
payload_router.SetActive(true);
EXPECT_CALL(rtp_1, Sending()).WillOnce(Return(true));
EXPECT_CALL(rtp_1, SendOutgoingData(encoded_image._frameType, kPayloadType,
encoded_image._timeStamp,
encoded_image.capture_time_ms_, &payload,
encoded_image._length, nullptr, _, _))
.Times(1)
.WillOnce(Return(true));
EXPECT_CALL(rtp_2, SendOutgoingData(_, _, _, _, _, _, _, _, _)).Times(0);
EXPECT_EQ(EncodedImageCallback::Result::OK,
payload_router.OnEncodedImage(encoded_image, &codec_info_1, nullptr)
.error);
CodecSpecificInfo codec_info_2;
memset(&codec_info_2, 0, sizeof(CodecSpecificInfo));
codec_info_2.codecType = kVideoCodecVP8;
codec_info_2.codecSpecific.VP8.simulcastIdx = 1;
EXPECT_CALL(rtp_2, Sending()).WillOnce(Return(true));
EXPECT_CALL(rtp_2, SendOutgoingData(encoded_image._frameType, kPayloadType,
encoded_image._timeStamp,
encoded_image.capture_time_ms_, &payload,
encoded_image._length, nullptr, _, _))
.Times(1)
.WillOnce(Return(true));
EXPECT_CALL(rtp_1, SendOutgoingData(_, _, _, _, _, _, _, _, _)).Times(0);
EXPECT_EQ(EncodedImageCallback::Result::OK,
payload_router.OnEncodedImage(encoded_image, &codec_info_2, nullptr)
.error);
// Inactive.
payload_router.SetActive(false);
EXPECT_CALL(rtp_1, SendOutgoingData(_, _, _, _, _, _, _, _, _)).Times(0);
EXPECT_CALL(rtp_2, SendOutgoingData(_, _, _, _, _, _, _, _, _)).Times(0);
EXPECT_NE(EncodedImageCallback::Result::OK,
payload_router.OnEncodedImage(encoded_image, &codec_info_1, nullptr)
.error);
EXPECT_NE(EncodedImageCallback::Result::OK,
payload_router.OnEncodedImage(encoded_image, &codec_info_2, nullptr)
.error);
}
// Tests how setting individual rtp modules to active affects the overall
// behavior of the payload router. First sets one module to active and checks
// that outgoing data can be sent on this module, and checks that no data can be
// sent if both modules are inactive.
TEST(PayloadRouterTest, SendSimulcastSetActiveModules) {
NiceMock<MockRtpRtcp> rtp_1;
NiceMock<MockRtpRtcp> rtp_2;
std::vector<RtpRtcp*> modules = {&rtp_1, &rtp_2};
uint8_t payload = 'a';
EncodedImage encoded_image;
encoded_image._timeStamp = 1;
encoded_image.capture_time_ms_ = 2;
encoded_image._frameType = kVideoFrameKey;
encoded_image._buffer = &payload;
encoded_image._length = 1;
PayloadRouter payload_router(modules, {kSsrc1, kSsrc2}, kPayloadType, {});
CodecSpecificInfo codec_info_1;
memset(&codec_info_1, 0, sizeof(CodecSpecificInfo));
codec_info_1.codecType = kVideoCodecVP8;
codec_info_1.codecSpecific.VP8.simulcastIdx = 0;
CodecSpecificInfo codec_info_2;
memset(&codec_info_2, 0, sizeof(CodecSpecificInfo));
codec_info_2.codecType = kVideoCodecVP8;
codec_info_2.codecSpecific.VP8.simulcastIdx = 1;
// Only setting one stream to active will still set the payload router to
// active and allow sending data on the active stream.
std::vector<bool> active_modules({true, false});
payload_router.SetActiveModules(active_modules);
EXPECT_CALL(rtp_1, Sending()).WillOnce(Return(true));
EXPECT_CALL(rtp_1, SendOutgoingData(encoded_image._frameType, kPayloadType,
encoded_image._timeStamp,
encoded_image.capture_time_ms_, &payload,
encoded_image._length, nullptr, _, _))
.Times(1)
.WillOnce(Return(true));
EXPECT_EQ(EncodedImageCallback::Result::OK,
payload_router.OnEncodedImage(encoded_image, &codec_info_1, nullptr)
.error);
// Setting both streams to inactive will turn the payload router to inactive.
active_modules = {false, false};
payload_router.SetActiveModules(active_modules);
// An incoming encoded image will not ask the module to send outgoing data
// because the payload router is inactive.
EXPECT_CALL(rtp_1, SendOutgoingData(_, _, _, _, _, _, _, _, _)).Times(0);
EXPECT_CALL(rtp_1, Sending()).Times(0);
EXPECT_CALL(rtp_2, SendOutgoingData(_, _, _, _, _, _, _, _, _)).Times(0);
EXPECT_CALL(rtp_2, Sending()).Times(0);
EXPECT_NE(EncodedImageCallback::Result::OK,
payload_router.OnEncodedImage(encoded_image, &codec_info_1, nullptr)
.error);
EXPECT_NE(EncodedImageCallback::Result::OK,
payload_router.OnEncodedImage(encoded_image, &codec_info_2, nullptr)
.error);
}
TEST(PayloadRouterTest, SimulcastTargetBitrate) {
NiceMock<MockRtpRtcp> rtp_1;
NiceMock<MockRtpRtcp> rtp_2;
std::vector<RtpRtcp*> modules = {&rtp_1, &rtp_2};
PayloadRouter payload_router(modules, {kSsrc1, kSsrc2}, kPayloadType, {});
payload_router.SetActive(true);
VideoBitrateAllocation bitrate;
bitrate.SetBitrate(0, 0, 10000);
bitrate.SetBitrate(0, 1, 20000);
bitrate.SetBitrate(1, 0, 40000);
bitrate.SetBitrate(1, 1, 80000);
VideoBitrateAllocation layer0_bitrate;
layer0_bitrate.SetBitrate(0, 0, 10000);
layer0_bitrate.SetBitrate(0, 1, 20000);
VideoBitrateAllocation layer1_bitrate;
layer1_bitrate.SetBitrate(0, 0, 40000);
layer1_bitrate.SetBitrate(0, 1, 80000);
EXPECT_CALL(rtp_1, SetVideoBitrateAllocation(layer0_bitrate)).Times(1);
EXPECT_CALL(rtp_2, SetVideoBitrateAllocation(layer1_bitrate)).Times(1);
payload_router.OnBitrateAllocationUpdated(bitrate);
}
// If the middle of three streams is inactive the first and last streams should
// be asked to send the TargetBitrate message.
TEST(PayloadRouterTest, SimulcastTargetBitrateWithInactiveStream) {
// Set up three active rtp modules.
NiceMock<MockRtpRtcp> rtp_1;
NiceMock<MockRtpRtcp> rtp_2;
NiceMock<MockRtpRtcp> rtp_3;
std::vector<RtpRtcp*> modules = {&rtp_1, &rtp_2, &rtp_3};
PayloadRouter payload_router(modules, {kSsrc1, kSsrc2, kSsrc3}, kPayloadType,
{});
payload_router.SetActive(true);
// Create bitrate allocation with bitrate only for the first and third stream.
VideoBitrateAllocation bitrate;
bitrate.SetBitrate(0, 0, 10000);
bitrate.SetBitrate(0, 1, 20000);
bitrate.SetBitrate(2, 0, 40000);
bitrate.SetBitrate(2, 1, 80000);
VideoBitrateAllocation layer0_bitrate;
layer0_bitrate.SetBitrate(0, 0, 10000);
layer0_bitrate.SetBitrate(0, 1, 20000);
VideoBitrateAllocation layer2_bitrate;
layer2_bitrate.SetBitrate(0, 0, 40000);
layer2_bitrate.SetBitrate(0, 1, 80000);
// Expect the first and third rtp module to be asked to send a TargetBitrate
// message. (No target bitrate with 0bps sent from the second one.)
EXPECT_CALL(rtp_1, SetVideoBitrateAllocation(layer0_bitrate)).Times(1);
EXPECT_CALL(rtp_2, SetVideoBitrateAllocation(_)).Times(0);
EXPECT_CALL(rtp_3, SetVideoBitrateAllocation(layer2_bitrate)).Times(1);
payload_router.OnBitrateAllocationUpdated(bitrate);
}
TEST(PayloadRouterTest, SvcTargetBitrate) {
NiceMock<MockRtpRtcp> rtp_1;
std::vector<RtpRtcp*> modules = {&rtp_1};
PayloadRouter payload_router(modules, {kSsrc1}, kPayloadType, {});
payload_router.SetActive(true);
VideoBitrateAllocation bitrate;
bitrate.SetBitrate(0, 0, 10000);
bitrate.SetBitrate(0, 1, 20000);
bitrate.SetBitrate(1, 0, 40000);
bitrate.SetBitrate(1, 1, 80000);
EXPECT_CALL(rtp_1, SetVideoBitrateAllocation(bitrate)).Times(1);
payload_router.OnBitrateAllocationUpdated(bitrate);
}
TEST(PayloadRouterTest, InfoMappedToRtpVideoHeader_Vp8) {
NiceMock<MockRtpRtcp> rtp1;
NiceMock<MockRtpRtcp> rtp2;
std::vector<RtpRtcp*> modules = {&rtp1, &rtp2};
RtpPayloadState state2;
state2.picture_id = kPictureId;
state2.tl0_pic_idx = kTl0PicIdx;
std::map<uint32_t, RtpPayloadState> states = {{kSsrc2, state2}};
PayloadRouter payload_router(modules, {kSsrc1, kSsrc2}, kPayloadType, states);
payload_router.SetActive(true);
EncodedImage encoded_image;
encoded_image.rotation_ = kVideoRotation_90;
encoded_image.content_type_ = VideoContentType::SCREENSHARE;
CodecSpecificInfo codec_info;
memset(&codec_info, 0, sizeof(CodecSpecificInfo));
codec_info.codecType = kVideoCodecVP8;
codec_info.codecSpecific.VP8.simulcastIdx = 1;
codec_info.codecSpecific.VP8.temporalIdx = kTemporalIdx;
codec_info.codecSpecific.VP8.keyIdx = kNoKeyIdx;
codec_info.codecSpecific.VP8.layerSync = true;
codec_info.codecSpecific.VP8.nonReference = true;
EXPECT_CALL(rtp2, Sending()).WillOnce(Return(true));
EXPECT_CALL(rtp2, SendOutgoingData(_, _, _, _, _, _, nullptr, _, _))
.WillOnce(Invoke([](Unused, Unused, Unused, Unused, Unused, Unused,
Unused, const RTPVideoHeader* header, Unused) {
EXPECT_EQ(kVideoRotation_90, header->rotation);
EXPECT_EQ(VideoContentType::SCREENSHARE, header->content_type);
EXPECT_EQ(1, header->simulcastIdx);
EXPECT_EQ(kVideoCodecVP8, header->codec);
EXPECT_EQ(kPictureId + 1, header->vp8().pictureId);
EXPECT_EQ(kTemporalIdx, header->vp8().temporalIdx);
EXPECT_EQ(kTl0PicIdx, header->vp8().tl0PicIdx);
EXPECT_EQ(kNoKeyIdx, header->vp8().keyIdx);
EXPECT_TRUE(header->vp8().layerSync);
EXPECT_TRUE(header->vp8().nonReference);
return true;
}));
EXPECT_EQ(
EncodedImageCallback::Result::OK,
payload_router.OnEncodedImage(encoded_image, &codec_info, nullptr).error);
}
TEST(PayloadRouterTest, InfoMappedToRtpVideoHeader_Vp9) {
RtpPayloadState state;
state.picture_id = kPictureId;
state.tl0_pic_idx = kTl0PicIdx;
std::map<uint32_t, RtpPayloadState> states = {{kSsrc1, state}};
NiceMock<MockRtpRtcp> rtp;
std::vector<RtpRtcp*> modules = {&rtp};
PayloadRouter router(modules, {kSsrc1}, kPayloadType, states);
router.SetActive(true);
EncodedImage encoded_image;
encoded_image.rotation_ = kVideoRotation_90;
encoded_image.content_type_ = VideoContentType::SCREENSHARE;
CodecSpecificInfo codec_info;
memset(&codec_info, 0, sizeof(CodecSpecificInfo));
codec_info.codecType = kVideoCodecVP9;
codec_info.codecSpecific.VP9.num_spatial_layers = 3;
codec_info.codecSpecific.VP9.first_frame_in_picture = true;
codec_info.codecSpecific.VP9.spatial_idx = 0;
codec_info.codecSpecific.VP9.temporal_idx = 2;
codec_info.codecSpecific.VP9.end_of_picture = false;
EXPECT_CALL(rtp, SendOutgoingData(_, _, _, _, _, _, nullptr, _, _))
.WillOnce(
Invoke([&codec_info](Unused, Unused, Unused, Unused, Unused, Unused,
Unused, const RTPVideoHeader* header, Unused) {
EXPECT_EQ(kVideoRotation_90, header->rotation);
EXPECT_EQ(VideoContentType::SCREENSHARE, header->content_type);
EXPECT_EQ(kVideoCodecVP9, header->codec);
EXPECT_EQ(kPictureId + 1, header->vp9().picture_id);
EXPECT_EQ(kTl0PicIdx, header->vp9().tl0_pic_idx);
EXPECT_EQ(header->vp9().temporal_idx,
codec_info.codecSpecific.VP9.temporal_idx);
EXPECT_EQ(header->vp9().spatial_idx,
codec_info.codecSpecific.VP9.spatial_idx);
EXPECT_EQ(header->vp9().num_spatial_layers,
codec_info.codecSpecific.VP9.num_spatial_layers);
EXPECT_EQ(header->vp9().end_of_picture,
codec_info.codecSpecific.VP9.end_of_picture);
return true;
}));
EXPECT_CALL(rtp, Sending()).WillOnce(Return(true));
EXPECT_EQ(EncodedImageCallback::Result::OK,
router.OnEncodedImage(encoded_image, &codec_info, nullptr).error);
// Next spatial layer.
codec_info.codecSpecific.VP9.first_frame_in_picture = false;
codec_info.codecSpecific.VP9.spatial_idx += 1;
codec_info.codecSpecific.VP9.end_of_picture = true;
EXPECT_CALL(rtp, SendOutgoingData(_, _, _, _, _, _, nullptr, _, _))
.WillOnce(
Invoke([&codec_info](Unused, Unused, Unused, Unused, Unused, Unused,
Unused, const RTPVideoHeader* header, Unused) {
EXPECT_EQ(kVideoRotation_90, header->rotation);
EXPECT_EQ(VideoContentType::SCREENSHARE, header->content_type);
EXPECT_EQ(kVideoCodecVP9, header->codec);
EXPECT_EQ(kPictureId + 1, header->vp9().picture_id);
EXPECT_EQ(kTl0PicIdx, header->vp9().tl0_pic_idx);
EXPECT_EQ(header->vp9().temporal_idx,
codec_info.codecSpecific.VP9.temporal_idx);
EXPECT_EQ(header->vp9().spatial_idx,
codec_info.codecSpecific.VP9.spatial_idx);
EXPECT_EQ(header->vp9().num_spatial_layers,
codec_info.codecSpecific.VP9.num_spatial_layers);
EXPECT_EQ(header->vp9().end_of_picture,
codec_info.codecSpecific.VP9.end_of_picture);
return true;
}));
EXPECT_CALL(rtp, Sending()).WillOnce(Return(true));
EXPECT_EQ(EncodedImageCallback::Result::OK,
router.OnEncodedImage(encoded_image, &codec_info, nullptr).error);
}
TEST(PayloadRouterTest, InfoMappedToRtpVideoHeader_H264) {
NiceMock<MockRtpRtcp> rtp1;
std::vector<RtpRtcp*> modules = {&rtp1};
PayloadRouter payload_router(modules, {kSsrc1}, kPayloadType, {});
payload_router.SetActive(true);
EncodedImage encoded_image;
CodecSpecificInfo codec_info;
memset(&codec_info, 0, sizeof(CodecSpecificInfo));
codec_info.codecType = kVideoCodecH264;
codec_info.codecSpecific.H264.packetization_mode =
H264PacketizationMode::SingleNalUnit;
EXPECT_CALL(rtp1, Sending()).WillOnce(Return(true));
EXPECT_CALL(rtp1, SendOutgoingData(_, _, _, _, _, _, nullptr, _, _))
.WillOnce(Invoke([](Unused, Unused, Unused, Unused, Unused, Unused,
Unused, const RTPVideoHeader* header, Unused) {
EXPECT_EQ(0, header->simulcastIdx);
EXPECT_EQ(kVideoCodecH264, header->codec);
EXPECT_EQ(H264PacketizationMode::SingleNalUnit,
header->h264().packetization_mode);
return true;
}));
EXPECT_EQ(
EncodedImageCallback::Result::OK,
payload_router.OnEncodedImage(encoded_image, &codec_info, nullptr).error);
}
TEST(PayloadRouterTest, CreateWithNoPreviousStates) {
NiceMock<MockRtpRtcp> rtp1;
NiceMock<MockRtpRtcp> rtp2;
std::vector<RtpRtcp*> modules = {&rtp1, &rtp2};
PayloadRouter payload_router(modules, {kSsrc1, kSsrc2}, kPayloadType, {});
payload_router.SetActive(true);
std::map<uint32_t, RtpPayloadState> initial_states =
payload_router.GetRtpPayloadStates();
EXPECT_EQ(2u, initial_states.size());
EXPECT_NE(initial_states.find(kSsrc1), initial_states.end());
EXPECT_NE(initial_states.find(kSsrc2), initial_states.end());
}
TEST(PayloadRouterTest, CreateWithPreviousStates) {
RtpPayloadState state1;
state1.picture_id = kInitialPictureId1;
state1.tl0_pic_idx = kInitialTl0PicIdx1;
RtpPayloadState state2;
state2.picture_id = kInitialPictureId2;
state2.tl0_pic_idx = kInitialTl0PicIdx2;
std::map<uint32_t, RtpPayloadState> states = {{kSsrc1, state1},
{kSsrc2, state2}};
NiceMock<MockRtpRtcp> rtp1;
NiceMock<MockRtpRtcp> rtp2;
std::vector<RtpRtcp*> modules = {&rtp1, &rtp2};
PayloadRouter payload_router(modules, {kSsrc1, kSsrc2}, kPayloadType, states);
payload_router.SetActive(true);
std::map<uint32_t, RtpPayloadState> initial_states =
payload_router.GetRtpPayloadStates();
EXPECT_EQ(2u, initial_states.size());
EXPECT_EQ(kInitialPictureId1, initial_states[kSsrc1].picture_id);
EXPECT_EQ(kInitialTl0PicIdx1, initial_states[kSsrc1].tl0_pic_idx);
EXPECT_EQ(kInitialPictureId2, initial_states[kSsrc2].picture_id);
EXPECT_EQ(kInitialTl0PicIdx2, initial_states[kSsrc2].tl0_pic_idx);
}
TEST(PayloadRouterTest, PictureIdIsSetForVp8) {
RtpPayloadState state1;
state1.picture_id = kInitialPictureId1;
state1.tl0_pic_idx = kInitialTl0PicIdx1;
RtpPayloadState state2;
state2.picture_id = kInitialPictureId2;
state2.tl0_pic_idx = kInitialTl0PicIdx2;
std::map<uint32_t, RtpPayloadState> states = {{kSsrc1, state1},
{kSsrc2, state2}};
NiceMock<MockRtpRtcp> rtp1;
NiceMock<MockRtpRtcp> rtp2;
std::vector<RtpRtcp*> modules = {&rtp1, &rtp2};
PayloadRouter router(modules, {kSsrc1, kSsrc2}, kPayloadType, states);
router.SetActive(true);
EncodedImage encoded_image;
// Modules are sending for this test.
// OnEncodedImage, simulcastIdx: 0.
CodecSpecificInfo codec_info;
memset(&codec_info, 0, sizeof(CodecSpecificInfo));
codec_info.codecType = kVideoCodecVP8;
codec_info.codecSpecific.VP8.simulcastIdx = 0;
EXPECT_CALL(rtp1, SendOutgoingData(_, _, _, _, _, _, nullptr, _, _))
.WillOnce(Invoke([](Unused, Unused, Unused, Unused, Unused, Unused,
Unused, const RTPVideoHeader* header, Unused) {
EXPECT_EQ(kVideoCodecVP8, header->codec);
EXPECT_EQ(kInitialPictureId1 + 1, header->vp8().pictureId);
return true;
}));
EXPECT_CALL(rtp1, Sending()).WillOnce(Return(true));
EXPECT_EQ(EncodedImageCallback::Result::OK,
router.OnEncodedImage(encoded_image, &codec_info, nullptr).error);
// OnEncodedImage, simulcastIdx: 1.
codec_info.codecSpecific.VP8.simulcastIdx = 1;
EXPECT_CALL(rtp2, SendOutgoingData(_, _, _, _, _, _, nullptr, _, _))
.WillOnce(Invoke([](Unused, Unused, Unused, Unused, Unused, Unused,
Unused, const RTPVideoHeader* header, Unused) {
EXPECT_EQ(kVideoCodecVP8, header->codec);
EXPECT_EQ(kInitialPictureId2 + 1, header->vp8().pictureId);
return true;
}));
EXPECT_CALL(rtp2, Sending()).WillOnce(Return(true));
EXPECT_EQ(EncodedImageCallback::Result::OK,
router.OnEncodedImage(encoded_image, &codec_info, nullptr).error);
// State should hold latest used picture id and tl0_pic_idx.
states = router.GetRtpPayloadStates();
EXPECT_EQ(2u, states.size());
EXPECT_EQ(kInitialPictureId1 + 1, states[kSsrc1].picture_id);
EXPECT_EQ(kInitialTl0PicIdx1 + 1, states[kSsrc1].tl0_pic_idx);
EXPECT_EQ(kInitialPictureId2 + 1, states[kSsrc2].picture_id);
EXPECT_EQ(kInitialTl0PicIdx2 + 1, states[kSsrc2].tl0_pic_idx);
}
TEST(PayloadRouterTest, PictureIdWraps) {
RtpPayloadState state1;
state1.picture_id = kMaxTwoBytePictureId;
state1.tl0_pic_idx = kInitialTl0PicIdx1;
NiceMock<MockRtpRtcp> rtp;
std::vector<RtpRtcp*> modules = {&rtp};
PayloadRouter router(modules, {kSsrc1}, kPayloadType, {{kSsrc1, state1}});
router.SetActive(true);
EncodedImage encoded_image;
CodecSpecificInfo codec_info;
memset(&codec_info, 0, sizeof(CodecSpecificInfo));
codec_info.codecType = kVideoCodecVP8;
codec_info.codecSpecific.VP8.temporalIdx = kNoTemporalIdx;
EXPECT_CALL(rtp, SendOutgoingData(_, _, _, _, _, _, nullptr, _, _))
.WillOnce(Invoke([](Unused, Unused, Unused, Unused, Unused, Unused,
Unused, const RTPVideoHeader* header, Unused) {
EXPECT_EQ(kVideoCodecVP8, header->codec);
EXPECT_EQ(0, header->vp8().pictureId);
return true;
}));
EXPECT_CALL(rtp, Sending()).WillOnce(Return(true));
EXPECT_EQ(EncodedImageCallback::Result::OK,
router.OnEncodedImage(encoded_image, &codec_info, nullptr).error);
// State should hold latest used picture id and tl0_pic_idx.
std::map<uint32_t, RtpPayloadState> states = router.GetRtpPayloadStates();
EXPECT_EQ(1u, states.size());
EXPECT_EQ(0, states[kSsrc1].picture_id); // Wrapped.
EXPECT_EQ(kInitialTl0PicIdx1, states[kSsrc1].tl0_pic_idx);
}
TEST(PayloadRouterTest, Tl0PicIdxUpdatedForVp8) {
RtpPayloadState state;
state.picture_id = kInitialPictureId1;
state.tl0_pic_idx = kInitialTl0PicIdx1;
std::map<uint32_t, RtpPayloadState> states = {{kSsrc1, state}};
NiceMock<MockRtpRtcp> rtp;
std::vector<RtpRtcp*> modules = {&rtp};
PayloadRouter router(modules, {kSsrc1}, kPayloadType, states);
router.SetActive(true);
EncodedImage encoded_image;
// Modules are sending for this test.
// OnEncodedImage, temporalIdx: 1.
CodecSpecificInfo codec_info;
memset(&codec_info, 0, sizeof(CodecSpecificInfo));
codec_info.codecType = kVideoCodecVP8;
codec_info.codecSpecific.VP8.temporalIdx = 1;
EXPECT_CALL(rtp, SendOutgoingData(_, _, _, _, _, _, nullptr, _, _))
.WillOnce(Invoke([](Unused, Unused, Unused, Unused, Unused, Unused,
Unused, const RTPVideoHeader* header, Unused) {
EXPECT_EQ(kVideoCodecVP8, header->codec);
EXPECT_EQ(kInitialPictureId1 + 1, header->vp8().pictureId);
EXPECT_EQ(kInitialTl0PicIdx1, header->vp8().tl0PicIdx);
return true;
}));
EXPECT_CALL(rtp, Sending()).WillOnce(Return(true));
EXPECT_EQ(EncodedImageCallback::Result::OK,
router.OnEncodedImage(encoded_image, &codec_info, nullptr).error);
// OnEncodedImage, temporalIdx: 0.
codec_info.codecSpecific.VP8.temporalIdx = 0;
EXPECT_CALL(rtp, SendOutgoingData(_, _, _, _, _, _, nullptr, _, _))
.WillOnce(Invoke([](Unused, Unused, Unused, Unused, Unused, Unused,
Unused, const RTPVideoHeader* header, Unused) {
EXPECT_EQ(kVideoCodecVP8, header->codec);
EXPECT_EQ(kInitialPictureId1 + 2, header->vp8().pictureId);
EXPECT_EQ(kInitialTl0PicIdx1 + 1, header->vp8().tl0PicIdx);
return true;
}));
EXPECT_CALL(rtp, Sending()).WillOnce(Return(true));
EXPECT_EQ(EncodedImageCallback::Result::OK,
router.OnEncodedImage(encoded_image, &codec_info, nullptr).error);
// State should hold latest used picture id and tl0_pic_idx.
states = router.GetRtpPayloadStates();
EXPECT_EQ(1u, states.size());
EXPECT_EQ(kInitialPictureId1 + 2, states[kSsrc1].picture_id);
EXPECT_EQ(kInitialTl0PicIdx1 + 1, states[kSsrc1].tl0_pic_idx);
}
TEST(PayloadRouterTest, Tl0PicIdxUpdatedForVp9) {
RtpPayloadState state;
state.picture_id = kInitialPictureId1;
state.tl0_pic_idx = kInitialTl0PicIdx1;
std::map<uint32_t, RtpPayloadState> states = {{kSsrc1, state}};
NiceMock<MockRtpRtcp> rtp;
std::vector<RtpRtcp*> modules = {&rtp};
PayloadRouter router(modules, {kSsrc1}, kPayloadType, states);
router.SetActive(true);
EncodedImage encoded_image;
// Modules are sending for this test.
// OnEncodedImage, temporalIdx: 1.
CodecSpecificInfo codec_info;
memset(&codec_info, 0, sizeof(CodecSpecificInfo));
codec_info.codecType = kVideoCodecVP9;
codec_info.codecSpecific.VP9.temporal_idx = 1;
codec_info.codecSpecific.VP9.first_frame_in_picture = true;
EXPECT_CALL(rtp, SendOutgoingData(_, _, _, _, _, _, nullptr, _, _))
.WillOnce(Invoke([](Unused, Unused, Unused, Unused, Unused, Unused,
Unused, const RTPVideoHeader* header, Unused) {
EXPECT_EQ(kVideoCodecVP9, header->codec);
EXPECT_EQ(kInitialPictureId1 + 1, header->vp9().picture_id);
EXPECT_EQ(kInitialTl0PicIdx1, header->vp9().tl0_pic_idx);
return true;
}));
EXPECT_CALL(rtp, Sending()).WillOnce(Return(true));
EXPECT_EQ(EncodedImageCallback::Result::OK,
router.OnEncodedImage(encoded_image, &codec_info, nullptr).error);
// OnEncodedImage, temporalIdx: 0.
codec_info.codecSpecific.VP9.temporal_idx = 0;
EXPECT_CALL(rtp, SendOutgoingData(_, _, _, _, _, _, nullptr, _, _))
.WillOnce(Invoke([](Unused, Unused, Unused, Unused, Unused, Unused,
Unused, const RTPVideoHeader* header, Unused) {
EXPECT_EQ(kVideoCodecVP9, header->codec);
EXPECT_EQ(kInitialPictureId1 + 2, header->vp9().picture_id);
EXPECT_EQ(kInitialTl0PicIdx1 + 1, header->vp9().tl0_pic_idx);
return true;
}));
EXPECT_CALL(rtp, Sending()).WillOnce(Return(true));
EXPECT_EQ(EncodedImageCallback::Result::OK,
router.OnEncodedImage(encoded_image, &codec_info, nullptr).error);
// OnEncodedImage, first_frame_in_picture = false
codec_info.codecSpecific.VP9.first_frame_in_picture = false;
EXPECT_CALL(rtp, SendOutgoingData(_, _, _, _, _, _, nullptr, _, _))
.WillOnce(Invoke([](Unused, Unused, Unused, Unused, Unused, Unused,
Unused, const RTPVideoHeader* header, Unused) {
EXPECT_EQ(kVideoCodecVP9, header->codec);
EXPECT_EQ(kInitialPictureId1 + 2, header->vp9().picture_id);
EXPECT_EQ(kInitialTl0PicIdx1 + 1, header->vp9().tl0_pic_idx);
return true;
}));
EXPECT_CALL(rtp, Sending()).WillOnce(Return(true));
EXPECT_EQ(EncodedImageCallback::Result::OK,
router.OnEncodedImage(encoded_image, &codec_info, nullptr).error);
// State should hold latest used picture id and tl0_pic_idx.
states = router.GetRtpPayloadStates();
EXPECT_EQ(1u, states.size());
EXPECT_EQ(kInitialPictureId1 + 2, states[kSsrc1].picture_id);
EXPECT_EQ(kInitialTl0PicIdx1 + 1, states[kSsrc1].tl0_pic_idx);
}
} // namespace webrtc