Make VideoFrameType an enum class, and move to separate file and target

Bug: webrtc:5876, webrtc:6883
Change-Id: I1435cfa9e8e54c4ba2978261048ff3fbb993ce0e
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/126225
Commit-Queue: Niels Moller <nisse@webrtc.org>
Reviewed-by: Karl Wiberg <kwiberg@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#27239}
This commit is contained in:
Niels Möller
2019-03-21 15:43:58 +01:00
committed by Commit Bot
parent 3198fa4956
commit 8f7ce222e7
85 changed files with 685 additions and 589 deletions

View File

@ -212,8 +212,9 @@ class RtpRtcpRtxNackTest : public ::testing::Test {
EXPECT_TRUE(rtp_rtcp_module_->OnSendingRtpFrame(timestamp, timestamp / 90,
kPayloadType, false));
EXPECT_TRUE(rtp_sender_video_->SendVideo(
webrtc::kVideoFrameDelta, kPayloadType, timestamp, timestamp / 90,
payload_data, payload_data_length, nullptr, &video_header, 0));
VideoFrameType::kVideoFrameDelta, kPayloadType, timestamp,
timestamp / 90, payload_data, payload_data_length, nullptr,
&video_header, 0));
// Min required delay until retransmit = 5 + RTT ms (RTT = 0).
fake_clock.AdvanceTimeMilliseconds(5);
int length = BuildNackList(nack_list);
@ -263,8 +264,9 @@ TEST_F(RtpRtcpRtxNackTest, LongNackList) {
EXPECT_TRUE(rtp_rtcp_module_->OnSendingRtpFrame(timestamp, timestamp / 90,
kPayloadType, false));
EXPECT_TRUE(rtp_sender_video_->SendVideo(
webrtc::kVideoFrameDelta, kPayloadType, timestamp, timestamp / 90,
payload_data, payload_data_length, nullptr, &video_header, 0));
VideoFrameType::kVideoFrameDelta, kPayloadType, timestamp,
timestamp / 90, payload_data, payload_data_length, nullptr,
&video_header, 0));
// Prepare next frame.
timestamp += 3000;
fake_clock.AdvanceTimeMilliseconds(33);

View File

@ -486,7 +486,7 @@ bool RtpDepacketizerH264::ProcessStapAOrSingleNalu(
nalu_start_offsets.push_back(0);
}
h264_header.nalu_type = nal_type;
parsed_payload->frame_type = kVideoFrameDelta;
parsed_payload->frame_type = VideoFrameType::kVideoFrameDelta;
nalu_start_offsets.push_back(length_ + kLengthFieldSize); // End offset.
for (size_t i = 0; i < nalu_start_offsets.size() - 1; ++i) {
@ -572,7 +572,7 @@ bool RtpDepacketizerH264::ProcessStapAOrSingleNalu(
} else {
RTC_LOG(LS_WARNING) << "Failed to parse SPS id from SPS slice.";
}
parsed_payload->frame_type = kVideoFrameKey;
parsed_payload->frame_type = VideoFrameType::kVideoFrameKey;
break;
}
case H264::NaluType::kPps: {
@ -590,7 +590,7 @@ bool RtpDepacketizerH264::ProcessStapAOrSingleNalu(
break;
}
case H264::NaluType::kIdr:
parsed_payload->frame_type = kVideoFrameKey;
parsed_payload->frame_type = VideoFrameType::kVideoFrameKey;
RTC_FALLTHROUGH();
case H264::NaluType::kSlice: {
absl::optional<uint32_t> pps_id = PpsParser::ParsePpsIdFromSlice(
@ -665,9 +665,9 @@ bool RtpDepacketizerH264::ParseFuaNalu(
}
if (original_nal_type == H264::NaluType::kIdr) {
parsed_payload->frame_type = kVideoFrameKey;
parsed_payload->frame_type = VideoFrameType::kVideoFrameKey;
} else {
parsed_payload->frame_type = kVideoFrameDelta;
parsed_payload->frame_type = VideoFrameType::kVideoFrameDelta;
}
parsed_payload->video_header().width = 0;
parsed_payload->video_header().height = 0;

View File

@ -608,7 +608,7 @@ TEST_F(RtpDepacketizerH264Test, TestSingleNalu) {
ASSERT_TRUE(depacketizer_->Parse(&payload, packet, sizeof(packet)));
ExpectPacket(&payload, packet, sizeof(packet));
EXPECT_EQ(kVideoFrameKey, payload.frame_type);
EXPECT_EQ(VideoFrameType::kVideoFrameKey, payload.frame_type);
EXPECT_EQ(kVideoCodecH264, payload.video_header().codec);
EXPECT_TRUE(payload.video_header().is_first_packet_in_frame);
EXPECT_EQ(kH264SingleNalu, payload.h264().packetization_type);
@ -623,7 +623,7 @@ TEST_F(RtpDepacketizerH264Test, TestSingleNaluSpsWithResolution) {
ASSERT_TRUE(depacketizer_->Parse(&payload, packet, sizeof(packet)));
ExpectPacket(&payload, packet, sizeof(packet));
EXPECT_EQ(kVideoFrameKey, payload.frame_type);
EXPECT_EQ(VideoFrameType::kVideoFrameKey, payload.frame_type);
EXPECT_EQ(kVideoCodecH264, payload.video_header().codec);
EXPECT_TRUE(payload.video_header().is_first_packet_in_frame);
EXPECT_EQ(kH264SingleNalu, payload.h264().packetization_type);
@ -652,7 +652,7 @@ TEST_F(RtpDepacketizerH264Test, TestStapAKey) {
H264ParsedPayload payload;
ASSERT_TRUE(depacketizer_->Parse(&payload, packet, sizeof(packet)));
ExpectPacket(&payload, packet, sizeof(packet));
EXPECT_EQ(kVideoFrameKey, payload.frame_type);
EXPECT_EQ(VideoFrameType::kVideoFrameKey, payload.frame_type);
EXPECT_EQ(kVideoCodecH264, payload.video_header().codec);
EXPECT_TRUE(payload.video_header().is_first_packet_in_frame);
const RTPVideoHeaderH264& h264 = payload.h264();
@ -683,7 +683,7 @@ TEST_F(RtpDepacketizerH264Test, TestStapANaluSpsWithResolution) {
ASSERT_TRUE(depacketizer_->Parse(&payload, packet, sizeof(packet)));
ExpectPacket(&payload, packet, sizeof(packet));
EXPECT_EQ(kVideoFrameKey, payload.frame_type);
EXPECT_EQ(VideoFrameType::kVideoFrameKey, payload.frame_type);
EXPECT_EQ(kVideoCodecH264, payload.video_header().codec);
EXPECT_TRUE(payload.video_header().is_first_packet_in_frame);
EXPECT_EQ(kH264StapA, payload.h264().packetization_type);
@ -810,7 +810,7 @@ TEST_F(RtpDepacketizerH264Test, TestStapADelta) {
ASSERT_TRUE(depacketizer_->Parse(&payload, packet, sizeof(packet)));
ExpectPacket(&payload, packet, sizeof(packet));
EXPECT_EQ(kVideoFrameDelta, payload.frame_type);
EXPECT_EQ(VideoFrameType::kVideoFrameDelta, payload.frame_type);
EXPECT_EQ(kVideoCodecH264, payload.video_header().codec);
EXPECT_TRUE(payload.video_header().is_first_packet_in_frame);
EXPECT_EQ(kH264StapA, payload.h264().packetization_type);
@ -849,7 +849,7 @@ TEST_F(RtpDepacketizerH264Test, TestFuA) {
// has been replaced by the original nal header.
ASSERT_TRUE(depacketizer_->Parse(&payload, packet1, sizeof(packet1)));
ExpectPacket(&payload, kExpected1, sizeof(kExpected1));
EXPECT_EQ(kVideoFrameKey, payload.frame_type);
EXPECT_EQ(VideoFrameType::kVideoFrameKey, payload.frame_type);
EXPECT_EQ(kVideoCodecH264, payload.video_header().codec);
EXPECT_TRUE(payload.video_header().is_first_packet_in_frame);
const RTPVideoHeaderH264& h264 = payload.h264();
@ -865,7 +865,7 @@ TEST_F(RtpDepacketizerH264Test, TestFuA) {
payload = H264ParsedPayload();
ASSERT_TRUE(depacketizer_->Parse(&payload, packet2, sizeof(packet2)));
ExpectPacket(&payload, kExpected2, sizeof(kExpected2));
EXPECT_EQ(kVideoFrameKey, payload.frame_type);
EXPECT_EQ(VideoFrameType::kVideoFrameKey, payload.frame_type);
EXPECT_EQ(kVideoCodecH264, payload.video_header().codec);
EXPECT_FALSE(payload.video_header().is_first_packet_in_frame);
{
@ -879,7 +879,7 @@ TEST_F(RtpDepacketizerH264Test, TestFuA) {
payload = H264ParsedPayload();
ASSERT_TRUE(depacketizer_->Parse(&payload, packet3, sizeof(packet3)));
ExpectPacket(&payload, kExpected3, sizeof(kExpected3));
EXPECT_EQ(kVideoFrameKey, payload.frame_type);
EXPECT_EQ(VideoFrameType::kVideoFrameKey, payload.frame_type);
EXPECT_EQ(kVideoCodecH264, payload.video_header().codec);
EXPECT_FALSE(payload.video_header().is_first_packet_in_frame);
{
@ -936,7 +936,7 @@ TEST_F(RtpDepacketizerH264Test, TestSeiPacket) {
H264ParsedPayload payload;
ASSERT_TRUE(depacketizer_->Parse(&payload, kPayload, sizeof(kPayload)));
const RTPVideoHeaderH264& h264 = payload.h264();
EXPECT_EQ(kVideoFrameDelta, payload.frame_type);
EXPECT_EQ(VideoFrameType::kVideoFrameDelta, payload.frame_type);
EXPECT_EQ(kH264SingleNalu, h264.packetization_type);
EXPECT_EQ(kSei, h264.nalu_type);
ASSERT_EQ(1u, h264.nalus_length);

View File

@ -75,7 +75,7 @@ void RtpPacketizerGeneric::BuildHeader(const RTPVideoHeader& rtp_video_header,
VideoFrameType frame_type) {
header_size_ = kGenericHeaderLength;
header_[0] = RtpFormatVideoGeneric::kFirstPacketBit;
if (frame_type == kVideoFrameKey) {
if (frame_type == VideoFrameType::kVideoFrameKey) {
header_[0] |= RtpFormatVideoGeneric::kKeyFrameBit;
}
if (rtp_video_header.generic.has_value()) {
@ -105,8 +105,8 @@ bool RtpDepacketizerGeneric::Parse(ParsedPayload* parsed_payload,
parsed_payload->frame_type =
((generic_header & RtpFormatVideoGeneric::kKeyFrameBit) != 0)
? kVideoFrameKey
: kVideoFrameDelta;
? VideoFrameType::kVideoFrameKey
: VideoFrameType::kVideoFrameDelta;
parsed_payload->video_header().is_first_packet_in_frame =
(generic_header & RtpFormatVideoGeneric::kFirstPacketBit) != 0;
parsed_payload->video_header().codec = kVideoCodecGeneric;

View File

@ -49,7 +49,7 @@ TEST(RtpPacketizerVideoGeneric, RespectsMaxPayloadSize) {
RtpPacketizer::PayloadSizeLimits limits;
limits.max_payload_len = 6;
RtpPacketizerGeneric packetizer(kPayload, limits, RTPVideoHeader(),
kVideoFrameKey);
VideoFrameType::kVideoFrameKey);
std::vector<int> payload_sizes = NextPacketFillPayloadSizes(&packetizer);
@ -63,7 +63,7 @@ TEST(RtpPacketizerVideoGeneric, UsesMaxPayloadSize) {
RtpPacketizer::PayloadSizeLimits limits;
limits.max_payload_len = 6;
RtpPacketizerGeneric packetizer(kPayload, limits, RTPVideoHeader(),
kVideoFrameKey);
VideoFrameType::kVideoFrameKey);
std::vector<int> payload_sizes = NextPacketFillPayloadSizes(&packetizer);
@ -79,7 +79,7 @@ TEST(RtpPacketizerVideoGeneric, WritesExtendedHeaderWhenPictureIdIsSet) {
RTPVideoHeader rtp_video_header;
rtp_video_header.generic.emplace().frame_id = 37;
RtpPacketizerGeneric packetizer(kPayload, kNoSizeLimits, rtp_video_header,
kVideoFrameKey);
VideoFrameType::kVideoFrameKey);
RtpPacketToSend packet(nullptr);
ASSERT_TRUE(packetizer.NextPacket(&packet));
@ -101,7 +101,7 @@ TEST(RtpPacketizerVideoGeneric, RespectsMaxPayloadSizeWithExtendedHeader) {
RTPVideoHeader rtp_video_header;
rtp_video_header.generic.emplace().frame_id = 37;
RtpPacketizerGeneric packetizer(kPayload, limits, rtp_video_header,
kVideoFrameKey);
VideoFrameType::kVideoFrameKey);
std::vector<int> payload_sizes = NextPacketFillPayloadSizes(&packetizer);
@ -117,7 +117,7 @@ TEST(RtpPacketizerVideoGeneric, UsesMaxPayloadSizeWithExtendedHeader) {
RTPVideoHeader rtp_video_header;
rtp_video_header.generic.emplace().frame_id = 37;
RtpPacketizerGeneric packetizer(kPayload, limits, rtp_video_header,
kVideoFrameKey);
VideoFrameType::kVideoFrameKey);
std::vector<int> payload_sizes = NextPacketFillPayloadSizes(&packetizer);
// With kPayloadSize > max_payload_len^2, there should be packets that use
@ -132,7 +132,7 @@ TEST(RtpPacketizerVideoGeneric, FrameIdOver15bitsWrapsAround) {
RTPVideoHeader rtp_video_header;
rtp_video_header.generic.emplace().frame_id = 0x8137;
RtpPacketizerGeneric packetizer(kPayload, kNoSizeLimits, rtp_video_header,
kVideoFrameKey);
VideoFrameType::kVideoFrameKey);
RtpPacketToSend packet(nullptr);
ASSERT_TRUE(packetizer.NextPacket(&packet));
@ -149,7 +149,7 @@ TEST(RtpPacketizerVideoGeneric, NoFrameIdDoesNotWriteExtendedHeader) {
const uint8_t kPayload[kPayloadSize] = {};
RtpPacketizerGeneric packetizer(kPayload, kNoSizeLimits, RTPVideoHeader(),
kVideoFrameKey);
VideoFrameType::kVideoFrameKey);
RtpPacketToSend packet(nullptr);
ASSERT_TRUE(packetizer.NextPacket(&packet));

View File

@ -131,7 +131,7 @@ int ParseVP8Extension(RTPVideoHeaderVP8* vp8,
int ParseVP8FrameSize(RtpDepacketizer::ParsedPayload* parsed_payload,
const uint8_t* data,
size_t data_length) {
if (parsed_payload->frame_type != kVideoFrameKey) {
if (parsed_payload->frame_type != VideoFrameType::kVideoFrameKey) {
// Included in payload header for I-frames.
return 0;
}
@ -357,10 +357,11 @@ bool RtpDepacketizerVp8::Parse(ParsedPayload* parsed_payload,
// Read P bit from payload header (only at beginning of first partition).
if (beginning_of_partition && partition_id == 0) {
parsed_payload->frame_type =
(*payload_data & 0x01) ? kVideoFrameDelta : kVideoFrameKey;
parsed_payload->frame_type = (*payload_data & 0x01)
? VideoFrameType::kVideoFrameDelta
: VideoFrameType::kVideoFrameKey;
} else {
parsed_payload->frame_type = kVideoFrameDelta;
parsed_payload->frame_type = VideoFrameType::kVideoFrameDelta;
}
if (ParseVP8FrameSize(parsed_payload, payload_data, payload_data_length) !=

View File

@ -198,7 +198,7 @@ TEST_F(RtpDepacketizerVp8Test, BasicHeader) {
ExpectPacket(&payload, packet + kHeaderLength,
sizeof(packet) - kHeaderLength);
EXPECT_EQ(kVideoFrameDelta, payload.frame_type);
EXPECT_EQ(VideoFrameType::kVideoFrameDelta, payload.frame_type);
EXPECT_EQ(kVideoCodecVP8, payload.video_header().codec);
VerifyBasicHeader(&payload.video_header(), 0, 1, 4);
VerifyExtensions(&payload.video_header(), kNoPictureId, kNoTl0PicIdx,
@ -218,7 +218,7 @@ TEST_F(RtpDepacketizerVp8Test, PictureID) {
ASSERT_TRUE(depacketizer_->Parse(&payload, packet, sizeof(packet)));
ExpectPacket(&payload, packet + kHeaderLength1,
sizeof(packet) - kHeaderLength1);
EXPECT_EQ(kVideoFrameDelta, payload.frame_type);
EXPECT_EQ(VideoFrameType::kVideoFrameDelta, payload.frame_type);
EXPECT_EQ(kVideoCodecVP8, payload.video_header().codec);
VerifyBasicHeader(&payload.video_header(), 1, 0, 0);
VerifyExtensions(&payload.video_header(), kPictureId, kNoTl0PicIdx,
@ -249,7 +249,7 @@ TEST_F(RtpDepacketizerVp8Test, Tl0PicIdx) {
ASSERT_TRUE(depacketizer_->Parse(&payload, packet, sizeof(packet)));
ExpectPacket(&payload, packet + kHeaderLength,
sizeof(packet) - kHeaderLength);
EXPECT_EQ(kVideoFrameKey, payload.frame_type);
EXPECT_EQ(VideoFrameType::kVideoFrameKey, payload.frame_type);
EXPECT_EQ(kVideoCodecVP8, payload.video_header().codec);
VerifyBasicHeader(&payload.video_header(), 0, 1, 0);
VerifyExtensions(&payload.video_header(), kNoPictureId, kTl0PicIdx,
@ -267,7 +267,7 @@ TEST_F(RtpDepacketizerVp8Test, TIDAndLayerSync) {
ASSERT_TRUE(depacketizer_->Parse(&payload, packet, sizeof(packet)));
ExpectPacket(&payload, packet + kHeaderLength,
sizeof(packet) - kHeaderLength);
EXPECT_EQ(kVideoFrameDelta, payload.frame_type);
EXPECT_EQ(VideoFrameType::kVideoFrameDelta, payload.frame_type);
EXPECT_EQ(kVideoCodecVP8, payload.video_header().codec);
VerifyBasicHeader(&payload.video_header(), 0, 0, 8);
VerifyExtensions(&payload.video_header(), kNoPictureId, kNoTl0PicIdx, 2,
@ -289,7 +289,7 @@ TEST_F(RtpDepacketizerVp8Test, KeyIdx) {
ASSERT_TRUE(depacketizer_->Parse(&payload, packet, sizeof(packet)));
ExpectPacket(&payload, packet + kHeaderLength,
sizeof(packet) - kHeaderLength);
EXPECT_EQ(kVideoFrameDelta, payload.frame_type);
EXPECT_EQ(VideoFrameType::kVideoFrameDelta, payload.frame_type);
EXPECT_EQ(kVideoCodecVP8, payload.video_header().codec);
VerifyBasicHeader(&payload.video_header(), 0, 0, 8);
VerifyExtensions(&payload.video_header(), kNoPictureId, kNoTl0PicIdx,
@ -310,7 +310,7 @@ TEST_F(RtpDepacketizerVp8Test, MultipleExtensions) {
ASSERT_TRUE(depacketizer_->Parse(&payload, packet, sizeof(packet)));
ExpectPacket(&payload, packet + kHeaderLength,
sizeof(packet) - kHeaderLength);
EXPECT_EQ(kVideoFrameDelta, payload.frame_type);
EXPECT_EQ(VideoFrameType::kVideoFrameDelta, payload.frame_type);
EXPECT_EQ(kVideoCodecVP8, payload.video_header().codec);
VerifyBasicHeader(&payload.video_header(), 0, 0, 8);
VerifyExtensions(&payload.video_header(), (17 << 8) + 17, 42, 1, 17);
@ -351,7 +351,7 @@ TEST_F(RtpDepacketizerVp8Test, TestWithPacketizer) {
depacketizer_->Parse(&payload, rtp_payload.data(), rtp_payload.size()));
auto vp8_payload = rtp_payload.subview(kHeaderLength);
ExpectPacket(&payload, vp8_payload.data(), vp8_payload.size());
EXPECT_EQ(kVideoFrameKey, payload.frame_type);
EXPECT_EQ(VideoFrameType::kVideoFrameKey, payload.frame_type);
EXPECT_EQ(kVideoCodecVP8, payload.video_header().codec);
VerifyBasicHeader(&payload.video_header(), 1, 1, 0);
VerifyExtensions(&payload.video_header(), input_header.pictureId,

View File

@ -608,7 +608,8 @@ bool RtpDepacketizerVp9::Parse(ParsedPayload* parsed_payload,
parsed_payload->video_header().simulcastIdx = 0;
parsed_payload->video_header().codec = kVideoCodecVP9;
parsed_payload->frame_type = p_bit ? kVideoFrameDelta : kVideoFrameKey;
parsed_payload->frame_type =
p_bit ? VideoFrameType::kVideoFrameDelta : VideoFrameType::kVideoFrameKey;
auto& vp9_header = parsed_payload->video_header()
.video_type_header.emplace<RTPVideoHeaderVP9>();

View File

@ -749,7 +749,7 @@ TEST_F(RtpDepacketizerVp9Test, ParseFirstPacketInKeyFrame) {
RtpDepacketizer::ParsedPayload parsed;
ASSERT_TRUE(depacketizer_->Parse(&parsed, packet, sizeof(packet)));
EXPECT_EQ(kVideoFrameKey, parsed.frame_type);
EXPECT_EQ(VideoFrameType::kVideoFrameKey, parsed.frame_type);
EXPECT_TRUE(parsed.video_header().is_first_packet_in_frame);
}
@ -759,7 +759,7 @@ TEST_F(RtpDepacketizerVp9Test, ParseLastPacketInDeltaFrame) {
RtpDepacketizer::ParsedPayload parsed;
ASSERT_TRUE(depacketizer_->Parse(&parsed, packet, sizeof(packet)));
EXPECT_EQ(kVideoFrameDelta, parsed.frame_type);
EXPECT_EQ(VideoFrameType::kVideoFrameDelta, parsed.frame_type);
EXPECT_FALSE(parsed.video_header().is_first_packet_in_frame);
}

View File

@ -231,9 +231,9 @@ class RtpRtcpImplTest : public ::testing::Test {
const uint8_t payload[100] = {0};
EXPECT_TRUE(module->impl_->OnSendingRtpFrame(0, 0, codec_.plType, true));
EXPECT_TRUE(sender->SendVideo(kVideoFrameKey, codec_.plType, 0, 0, payload,
sizeof(payload), nullptr, &rtp_video_header,
0));
EXPECT_TRUE(sender->SendVideo(VideoFrameType::kVideoFrameKey, codec_.plType,
0, 0, payload, sizeof(payload), nullptr,
&rtp_video_header, 0));
}
void IncomingRtcpNack(const RtpRtcpModule* module, uint16_t sequence_number) {

View File

@ -520,7 +520,7 @@ TEST_P(RtpSenderTestWithoutPacer, OnSendSideDelayUpdated) {
int64_t capture_time_ms = fake_clock_.TimeInMilliseconds();
fake_clock_.AdvanceTimeMilliseconds(10);
EXPECT_TRUE(rtp_sender_video.SendVideo(
kVideoFrameKey, kPayloadType,
VideoFrameType::kVideoFrameKey, kPayloadType,
capture_time_ms * kCaptureTimeMsToRtpTimestamp, capture_time_ms,
kPayloadData, sizeof(kPayloadData), nullptr, &video_header,
kDefaultExpectedRetransmissionTimeMs));
@ -531,7 +531,7 @@ TEST_P(RtpSenderTestWithoutPacer, OnSendSideDelayUpdated) {
.Times(1);
fake_clock_.AdvanceTimeMilliseconds(10);
EXPECT_TRUE(rtp_sender_video.SendVideo(
kVideoFrameKey, kPayloadType,
VideoFrameType::kVideoFrameKey, kPayloadType,
capture_time_ms * kCaptureTimeMsToRtpTimestamp, capture_time_ms,
kPayloadData, sizeof(kPayloadData), nullptr, &video_header,
kDefaultExpectedRetransmissionTimeMs));
@ -543,7 +543,7 @@ TEST_P(RtpSenderTestWithoutPacer, OnSendSideDelayUpdated) {
.Times(1);
capture_time_ms = fake_clock_.TimeInMilliseconds();
EXPECT_TRUE(rtp_sender_video.SendVideo(
kVideoFrameKey, kPayloadType,
VideoFrameType::kVideoFrameKey, kPayloadType,
capture_time_ms * kCaptureTimeMsToRtpTimestamp, capture_time_ms,
kPayloadData, sizeof(kPayloadData), nullptr, &video_header,
kDefaultExpectedRetransmissionTimeMs));
@ -556,7 +556,7 @@ TEST_P(RtpSenderTestWithoutPacer, OnSendSideDelayUpdated) {
EXPECT_CALL(send_side_delay_observer_, SendSideDelayUpdated(1, 1, kSsrc))
.Times(1);
EXPECT_TRUE(rtp_sender_video.SendVideo(
kVideoFrameKey, kPayloadType,
VideoFrameType::kVideoFrameKey, kPayloadType,
capture_time_ms * kCaptureTimeMsToRtpTimestamp, capture_time_ms,
kPayloadData, sizeof(kPayloadData), nullptr, &video_header,
kDefaultExpectedRetransmissionTimeMs));
@ -1078,8 +1078,9 @@ TEST_P(RtpSenderTestWithoutPacer, SendGenericVideo) {
// Send keyframe
RTPVideoHeader video_header;
ASSERT_TRUE(rtp_sender_video.SendVideo(
kVideoFrameKey, payload_type, 1234, 4321, payload, sizeof(payload),
nullptr, &video_header, kDefaultExpectedRetransmissionTimeMs));
VideoFrameType::kVideoFrameKey, payload_type, 1234, 4321, payload,
sizeof(payload), nullptr, &video_header,
kDefaultExpectedRetransmissionTimeMs));
auto sent_payload = transport_.last_sent_packet().payload();
uint8_t generic_header = sent_payload[0];
@ -1093,8 +1094,9 @@ TEST_P(RtpSenderTestWithoutPacer, SendGenericVideo) {
payload[4] = 13;
ASSERT_TRUE(rtp_sender_video.SendVideo(
kVideoFrameDelta, payload_type, 1234, 4321, payload, sizeof(payload),
nullptr, &video_header, kDefaultExpectedRetransmissionTimeMs));
VideoFrameType::kVideoFrameDelta, payload_type, 1234, 4321, payload,
sizeof(payload), nullptr, &video_header,
kDefaultExpectedRetransmissionTimeMs));
sent_payload = transport_.last_sent_packet().payload();
generic_header = sent_payload[0];
@ -1148,7 +1150,7 @@ TEST_P(RtpSenderTest, SendFlexfecPackets) {
RTPVideoHeader video_header;
EXPECT_TRUE(rtp_sender_video.SendVideo(
kVideoFrameKey, kMediaPayloadType, kTimestamp,
VideoFrameType::kVideoFrameKey, kMediaPayloadType, kTimestamp,
fake_clock_.TimeInMilliseconds(), kPayloadData, sizeof(kPayloadData),
nullptr, &video_header, kDefaultExpectedRetransmissionTimeMs));
@ -1226,9 +1228,9 @@ TEST_P(RtpSenderTest, NoFlexfecForTimingFrames) {
RTPVideoHeader video_header;
video_header.video_timing.flags = VideoSendTiming::kTriggeredByTimer;
EXPECT_TRUE(rtp_sender_video.SendVideo(
kVideoFrameKey, kMediaPayloadType, kTimestamp, kCaptureTimeMs,
kPayloadData, sizeof(kPayloadData), nullptr, &video_header,
kDefaultExpectedRetransmissionTimeMs));
VideoFrameType::kVideoFrameKey, kMediaPayloadType, kTimestamp,
kCaptureTimeMs, kPayloadData, sizeof(kPayloadData), nullptr,
&video_header, kDefaultExpectedRetransmissionTimeMs));
EXPECT_CALL(mock_rtc_event_log_,
LogProxy(SameRtcEventTypeAs(RtcEvent::Type::RtpPacketOutgoing)))
@ -1252,9 +1254,9 @@ TEST_P(RtpSenderTest, NoFlexfecForTimingFrames) {
kSeqNum + 1, _, _, false));
video_header.video_timing.flags = VideoSendTiming::kInvalid;
EXPECT_TRUE(rtp_sender_video.SendVideo(
kVideoFrameKey, kMediaPayloadType, kTimestamp + 1, kCaptureTimeMs + 1,
kPayloadData, sizeof(kPayloadData), nullptr, &video_header,
kDefaultExpectedRetransmissionTimeMs));
VideoFrameType::kVideoFrameKey, kMediaPayloadType, kTimestamp + 1,
kCaptureTimeMs + 1, kPayloadData, sizeof(kPayloadData), nullptr,
&video_header, kDefaultExpectedRetransmissionTimeMs));
EXPECT_CALL(mock_rtc_event_log_,
LogProxy(SameRtcEventTypeAs(RtcEvent::Type::RtpPacketOutgoing)))
@ -1315,7 +1317,7 @@ TEST_P(RtpSenderTestWithoutPacer, SendFlexfecPackets) {
.Times(2);
RTPVideoHeader video_header;
EXPECT_TRUE(rtp_sender_video.SendVideo(
kVideoFrameKey, kMediaPayloadType, kTimestamp,
VideoFrameType::kVideoFrameKey, kMediaPayloadType, kTimestamp,
fake_clock_.TimeInMilliseconds(), kPayloadData, sizeof(kPayloadData),
nullptr, &video_header, kDefaultExpectedRetransmissionTimeMs));
@ -1448,7 +1450,7 @@ TEST_P(RtpSenderTest, FecOverheadRate) {
RTPVideoHeader video_header;
EXPECT_TRUE(rtp_sender_video.SendVideo(
kVideoFrameKey, kMediaPayloadType, kTimestamp,
VideoFrameType::kVideoFrameKey, kMediaPayloadType, kTimestamp,
fake_clock_.TimeInMilliseconds(), kPayloadData, sizeof(kPayloadData),
nullptr, &video_header, kDefaultExpectedRetransmissionTimeMs));
@ -1526,8 +1528,9 @@ TEST_P(RtpSenderTest, BitrateCallbacks) {
RTPVideoHeader video_header;
for (uint32_t i = 0; i < kNumPackets; ++i) {
ASSERT_TRUE(rtp_sender_video.SendVideo(
kVideoFrameKey, payload_type, 1234, 4321, payload, sizeof(payload),
nullptr, &video_header, kDefaultExpectedRetransmissionTimeMs));
VideoFrameType::kVideoFrameKey, payload_type, 1234, 4321, payload,
sizeof(payload), nullptr, &video_header,
kDefaultExpectedRetransmissionTimeMs));
fake_clock_.AdvanceTimeMilliseconds(kPacketInterval);
}
@ -1598,8 +1601,9 @@ TEST_P(RtpSenderTestWithoutPacer, StreamDataCountersCallbacks) {
// Send a frame.
RTPVideoHeader video_header;
ASSERT_TRUE(rtp_sender_video.SendVideo(
kVideoFrameKey, payload_type, 1234, 4321, payload, sizeof(payload),
nullptr, &video_header, kDefaultExpectedRetransmissionTimeMs));
VideoFrameType::kVideoFrameKey, payload_type, 1234, 4321, payload,
sizeof(payload), nullptr, &video_header,
kDefaultExpectedRetransmissionTimeMs));
StreamDataCounters expected;
expected.transmitted.payload_bytes = 6;
expected.transmitted.header_bytes = 12;
@ -1640,8 +1644,9 @@ TEST_P(RtpSenderTestWithoutPacer, StreamDataCountersCallbacks) {
fec_params.max_fec_frames = 1;
rtp_sender_video.SetFecParameters(fec_params, fec_params);
ASSERT_TRUE(rtp_sender_video.SendVideo(
kVideoFrameDelta, payload_type, 1234, 4321, payload, sizeof(payload),
nullptr, &video_header, kDefaultExpectedRetransmissionTimeMs));
VideoFrameType::kVideoFrameDelta, payload_type, 1234, 4321, payload,
sizeof(payload), nullptr, &video_header,
kDefaultExpectedRetransmissionTimeMs));
expected.transmitted.payload_bytes = 40;
expected.transmitted.header_bytes = 60;
expected.transmitted.packets = 5;

View File

@ -72,7 +72,7 @@ void AddRtpHeaderExtensions(const RTPVideoHeader& video_header,
packet->SetExtension<VideoOrientation>(video_header.rotation);
// Report content type only for key frames.
if (last_packet && frame_type == kVideoFrameKey &&
if (last_packet && frame_type == VideoFrameType::kVideoFrameKey &&
video_header.content_type != VideoContentType::UNSPECIFIED)
packet->SetExtension<VideoContentTypeExtension>(video_header.content_type);
@ -116,7 +116,7 @@ void AddRtpHeaderExtensions(const RTPVideoHeader& video_header,
generic_descriptor.SetTemporalLayer(video_header.generic->temporal_index);
if (frame_type == kVideoFrameKey) {
if (frame_type == VideoFrameType::kVideoFrameKey) {
generic_descriptor.SetResolution(video_header.width,
video_header.height);
}
@ -168,11 +168,11 @@ bool IsBaseLayer(const RTPVideoHeader& video_header) {
const char* FrameTypeToString(VideoFrameType frame_type) {
switch (frame_type) {
case kEmptyFrame:
case VideoFrameType::kEmptyFrame:
return "empty";
case kVideoFrameKey:
case VideoFrameType::kVideoFrameKey:
return "video_key";
case kVideoFrameDelta:
case VideoFrameType::kVideoFrameDelta:
return "video_delta";
default:
RTC_NOTREACHED();
@ -429,13 +429,10 @@ bool RTPSenderVideo::SendVideo(VideoFrameType frame_type,
const RTPFragmentationHeader* fragmentation,
const RTPVideoHeader* video_header,
int64_t expected_retransmission_time_ms) {
RTC_DCHECK(frame_type == kVideoFrameKey || frame_type == kVideoFrameDelta ||
frame_type == kEmptyFrame);
TRACE_EVENT_ASYNC_STEP1("webrtc", "Video", capture_time_ms, "Send", "type",
FrameTypeToString(frame_type));
if (frame_type == kEmptyFrame)
if (frame_type == VideoFrameType::kEmptyFrame)
return true;
if (payload_size == 0)
@ -466,7 +463,7 @@ bool RTPSenderVideo::SendVideo(VideoFrameType frame_type,
// value sent.
// Set rotation when key frame or when changed (to follow standard).
// Or when different from 0 (to follow current receiver implementation).
set_video_rotation = frame_type == kVideoFrameKey ||
set_video_rotation = frame_type == VideoFrameType::kVideoFrameKey ||
video_header->rotation != last_rotation_ ||
video_header->rotation != kVideoRotation_0;
last_rotation_ = video_header->rotation;
@ -479,8 +476,8 @@ bool RTPSenderVideo::SendVideo(VideoFrameType frame_type,
set_color_space = true;
transmit_color_space_next_frame_ = !IsBaseLayer(*video_header);
} else {
set_color_space =
frame_type == kVideoFrameKey || transmit_color_space_next_frame_;
set_color_space = frame_type == VideoFrameType::kVideoFrameKey ||
transmit_color_space_next_frame_;
transmit_color_space_next_frame_ = transmit_color_space_next_frame_
? !IsBaseLayer(*video_header)
: false;
@ -488,7 +485,8 @@ bool RTPSenderVideo::SendVideo(VideoFrameType frame_type,
// FEC settings.
const FecProtectionParams& fec_params =
frame_type == kVideoFrameKey ? key_fec_params_ : delta_fec_params_;
frame_type == VideoFrameType::kVideoFrameKey ? key_fec_params_
: delta_fec_params_;
if (flexfec_enabled())
flexfec_sender_->SetFecParameters(fec_params);
if (ulpfec_enabled())

View File

@ -188,9 +188,9 @@ TEST_P(RtpSenderVideoTest, KeyFrameHasCVO) {
RTPVideoHeader hdr;
hdr.rotation = kVideoRotation_0;
rtp_sender_video_.SendVideo(kVideoFrameKey, kPayload, kTimestamp, 0, kFrame,
sizeof(kFrame), nullptr, &hdr,
kDefaultExpectedRetransmissionTimeMs);
rtp_sender_video_.SendVideo(VideoFrameType::kVideoFrameKey, kPayload,
kTimestamp, 0, kFrame, sizeof(kFrame), nullptr,
&hdr, kDefaultExpectedRetransmissionTimeMs);
VideoRotation rotation;
EXPECT_TRUE(
@ -214,9 +214,10 @@ TEST_P(RtpSenderVideoTest, TimingFrameHasPacketizationTimstampSet) {
hdr.video_timing.encode_finish_delta_ms = kEncodeFinishDeltaMs;
fake_clock_.AdvanceTimeMilliseconds(kPacketizationTimeMs);
rtp_sender_video_.SendVideo(
kVideoFrameKey, kPayload, kTimestamp, kCaptureTimestamp, kFrame,
sizeof(kFrame), nullptr, &hdr, kDefaultExpectedRetransmissionTimeMs);
rtp_sender_video_.SendVideo(VideoFrameType::kVideoFrameKey, kPayload,
kTimestamp, kCaptureTimestamp, kFrame,
sizeof(kFrame), nullptr, &hdr,
kDefaultExpectedRetransmissionTimeMs);
VideoSendTiming timing;
EXPECT_TRUE(transport_.last_sent_packet().GetExtension<VideoTimingExtension>(
&timing));
@ -233,13 +234,13 @@ TEST_P(RtpSenderVideoTest, DeltaFrameHasCVOWhenChanged) {
RTPVideoHeader hdr;
hdr.rotation = kVideoRotation_90;
EXPECT_TRUE(rtp_sender_video_.SendVideo(
kVideoFrameKey, kPayload, kTimestamp, 0, kFrame, sizeof(kFrame), nullptr,
&hdr, kDefaultExpectedRetransmissionTimeMs));
VideoFrameType::kVideoFrameKey, kPayload, kTimestamp, 0, kFrame,
sizeof(kFrame), nullptr, &hdr, kDefaultExpectedRetransmissionTimeMs));
hdr.rotation = kVideoRotation_0;
EXPECT_TRUE(rtp_sender_video_.SendVideo(
kVideoFrameDelta, kPayload, kTimestamp + 1, 0, kFrame, sizeof(kFrame),
nullptr, &hdr, kDefaultExpectedRetransmissionTimeMs));
VideoFrameType::kVideoFrameDelta, kPayload, kTimestamp + 1, 0, kFrame,
sizeof(kFrame), nullptr, &hdr, kDefaultExpectedRetransmissionTimeMs));
VideoRotation rotation;
EXPECT_TRUE(
@ -255,12 +256,12 @@ TEST_P(RtpSenderVideoTest, DeltaFrameHasCVOWhenNonZero) {
RTPVideoHeader hdr;
hdr.rotation = kVideoRotation_90;
EXPECT_TRUE(rtp_sender_video_.SendVideo(
kVideoFrameKey, kPayload, kTimestamp, 0, kFrame, sizeof(kFrame), nullptr,
&hdr, kDefaultExpectedRetransmissionTimeMs));
VideoFrameType::kVideoFrameKey, kPayload, kTimestamp, 0, kFrame,
sizeof(kFrame), nullptr, &hdr, kDefaultExpectedRetransmissionTimeMs));
EXPECT_TRUE(rtp_sender_video_.SendVideo(
kVideoFrameDelta, kPayload, kTimestamp + 1, 0, kFrame, sizeof(kFrame),
nullptr, &hdr, kDefaultExpectedRetransmissionTimeMs));
VideoFrameType::kVideoFrameDelta, kPayload, kTimestamp + 1, 0, kFrame,
sizeof(kFrame), nullptr, &hdr, kDefaultExpectedRetransmissionTimeMs));
VideoRotation rotation;
EXPECT_TRUE(
@ -285,18 +286,18 @@ TEST_P(RtpSenderVideoTest, CheckH264FrameMarking) {
hdr.frame_marking.temporal_id = kNoTemporalIdx;
hdr.frame_marking.tl0_pic_idx = 99;
hdr.frame_marking.base_layer_sync = true;
rtp_sender_video_.SendVideo(kVideoFrameDelta, kPayload,
kTimestamp, 0, kFrame, sizeof(kFrame), &frag,
&hdr, kDefaultExpectedRetransmissionTimeMs);
rtp_sender_video_.SendVideo(VideoFrameType::kVideoFrameDelta, kPayload,
kTimestamp, 0, kFrame, sizeof(kFrame), &frag,
&hdr, kDefaultExpectedRetransmissionTimeMs);
FrameMarking fm;
EXPECT_FALSE(
transport_.last_sent_packet().GetExtension<FrameMarkingExtension>(&fm));
hdr.frame_marking.temporal_id = 0;
rtp_sender_video_.SendVideo(kVideoFrameDelta, kPayload,
kTimestamp + 1, 0, kFrame, sizeof(kFrame), &frag,
&hdr, kDefaultExpectedRetransmissionTimeMs);
rtp_sender_video_.SendVideo(VideoFrameType::kVideoFrameDelta, kPayload,
kTimestamp + 1, 0, kFrame, sizeof(kFrame), &frag,
&hdr, kDefaultExpectedRetransmissionTimeMs);
EXPECT_TRUE(
transport_.last_sent_packet().GetExtension<FrameMarkingExtension>(&fm));
@ -563,9 +564,9 @@ void RtpSenderVideoTest::PopulateGenericFrameDescriptor(int version) {
generic.higher_spatial_layers.push_back(4);
generic.dependencies.push_back(kFrameId - 1);
generic.dependencies.push_back(kFrameId - 500);
rtp_sender_video_.SendVideo(kVideoFrameDelta, kPayload, kTimestamp, 0, kFrame,
sizeof(kFrame), nullptr, &hdr,
kDefaultExpectedRetransmissionTimeMs);
rtp_sender_video_.SendVideo(VideoFrameType::kVideoFrameDelta, kPayload,
kTimestamp, 0, kFrame, sizeof(kFrame), nullptr,
&hdr, kDefaultExpectedRetransmissionTimeMs);
RtpGenericFrameDescriptor descriptor_wire;
EXPECT_EQ(1, transport_.packets_sent());
@ -618,9 +619,9 @@ void RtpSenderVideoTest::
RTPVideoHeader::GenericDescriptorInfo& generic = hdr.generic.emplace();
generic.frame_id = kFrameId;
rtp_sender_video_.RegisterPayloadType(kPayload, "vp8");
rtp_sender_video_.SendVideo(kVideoFrameDelta, kPayload, kTimestamp, 0, kFrame,
sizeof(kFrame), nullptr, &hdr,
kDefaultExpectedRetransmissionTimeMs);
rtp_sender_video_.SendVideo(VideoFrameType::kVideoFrameDelta, kPayload,
kTimestamp, 0, kFrame, sizeof(kFrame), nullptr,
&hdr, kDefaultExpectedRetransmissionTimeMs);
ASSERT_EQ(transport_.packets_sent(), 1);
// Expect only minimal 1-byte vp8 descriptor was generated.