Add a flags field to video timing extension.

The rtp header extension for video timing shuold have an additional
field for signaling metadata, such as what triggered the extension for
this particular frame. This will allow separating frames select because
of outlier sizes from regular frames, for more accurate stats.

This implementation is backwards compatible in that it can read video
timing extensions without the new flag field, but it always sends with
it included.

BUG=webrtc:7594

Review-Url: https://codereview.webrtc.org/3000753002
Cr-Commit-Position: refs/heads/master@{#19353}
This commit is contained in:
sprang
2017-08-15 05:33:27 -07:00
committed by Commit Bot
parent 892dab52b6
commit cf5d485e14
26 changed files with 211 additions and 83 deletions

View File

@ -27,7 +27,8 @@ TimingFrameInfo::TimingFrameInfo()
receive_finish_ms(-1),
decode_start_ms(-1),
decode_finish_ms(-1),
render_time_ms(-1) {}
render_time_ms(-1),
flags(TimingFrameFlags::kDefault) {}
int64_t TimingFrameInfo::EndToEndDelay() const {
return capture_time_ms >= 0 ? decode_finish_ms - capture_time_ms : -1;
@ -38,14 +39,31 @@ bool TimingFrameInfo::IsLongerThan(const TimingFrameInfo& other) const {
return other_delay == -1 || EndToEndDelay() > other_delay;
}
bool TimingFrameInfo::IsOutlier() const {
return !IsInvalid() && (flags & TimingFrameFlags::kTriggeredBySize);
}
bool TimingFrameInfo::IsTimerTriggered() const {
return !IsInvalid() && (flags & TimingFrameFlags::kTriggeredByTimer);
}
bool TimingFrameInfo::IsInvalid() const {
return flags == TimingFrameFlags::kInvalid;
}
std::string TimingFrameInfo::ToString() const {
std::stringstream out;
if (IsInvalid()) {
out << "[Invalid]";
} else {
out << rtp_timestamp << ',' << capture_time_ms << ',' << encode_start_ms
<< ',' << encode_finish_ms << ',' << packetization_finish_ms << ','
<< pacer_exit_ms << ',' << network_timestamp_ms << ','
<< network2_timestamp_ms << ',' << receive_start_ms << ','
<< receive_finish_ms << ',' << decode_start_ms << ',' << decode_finish_ms
<< ',' << render_time_ms;
<< receive_finish_ms << ',' << decode_start_ms << ','
<< decode_finish_ms << ',' << render_time_ms << ", outlier_triggered "
<< IsOutlier() << ", timer_triggered " << IsTimerTriggered();
}
return out.str();
}

View File

@ -13,6 +13,7 @@
#include <stdint.h>
#include <limits>
#include <string>
#include "webrtc/rtc_base/checks.h"
@ -20,15 +21,25 @@
namespace webrtc {
enum TimingFrameFlags : uint8_t {
kDefault = 0, // No flags set (used by old protocol)
kTriggeredByTimer = 1 << 0, // Frame marked for tracing by periodic timer.
kTriggeredBySize = 1 << 1, // Frame marked for tracing due to size.
kInvalid = std::numeric_limits<uint8_t>::max() // Invalid, ignore!
};
// Video timing timestamps in ms counted from capture_time_ms of a frame.
// This structure represents data sent in video-timing RTP header extension.
struct VideoSendTiming {
static const uint8_t kEncodeStartDeltaIdx = 0;
static const uint8_t kEncodeFinishDeltaIdx = 1;
static const uint8_t kPacketizationFinishDeltaIdx = 2;
static const uint8_t kPacerExitDeltaIdx = 3;
static const uint8_t kNetworkTimestampDeltaIdx = 4;
static const uint8_t kNetwork2TimestampDeltaIdx = 5;
// Offsets of the fields in the RTP header extension, counting from the first
// byte after the one-byte header.
static constexpr uint8_t kFlagsOffset = 0;
static constexpr uint8_t kEncodeStartDeltaOffset = 1;
static constexpr uint8_t kEncodeFinishDeltaOffset = 3;
static constexpr uint8_t kPacketizationFinishDeltaOffset = 5;
static constexpr uint8_t kPacerExitDeltaOffset = 7;
static constexpr uint8_t kNetworkTimestampDeltaOffset = 9;
static constexpr uint8_t kNetwork2TimestampDeltaOffset = 11;
// Returns |time_ms - base_ms| capped at max 16-bit value.
// Used to fill this data structure as per
@ -45,7 +56,7 @@ struct VideoSendTiming {
uint16_t pacer_exit_delta_ms;
uint16_t network_timstamp_delta_ms;
uint16_t network2_timstamp_delta_ms;
bool is_timing_frame;
uint8_t flags;
};
// Used to report precise timings of a 'timing frames'. Contains all important
@ -64,6 +75,18 @@ struct TimingFrameInfo {
// preferred.
bool IsLongerThan(const TimingFrameInfo& other) const;
// Returns true if flags are set to indicate this frame was marked for tracing
// due to the size being outside some limit.
bool IsOutlier() const;
// Returns true if flags are set to indicate this frame was marked fro tracing
// due to cyclic timer.
bool IsTimerTriggered() const;
// Returns true if the timing data is marked as invalid, in which case it
// should be ignored.
bool IsInvalid() const;
std::string ToString() const;
uint32_t rtp_timestamp; // Identifier of a frame.
@ -84,6 +107,8 @@ struct TimingFrameInfo {
int64_t decode_start_ms; // Decode start time.
int64_t decode_finish_ms; // Decode completion time.
int64_t render_time_ms; // Proposed render time to insure smooth playback.
uint8_t flags; // Flags indicating validity and/or why tracing was triggered.
};
} // namespace webrtc

View File

@ -66,7 +66,7 @@ class EncodedImage {
// Timing information should be updatable on const instances.
mutable struct Timing {
bool is_timing_frame = false;
uint8_t flags = TimingFrameFlags::kInvalid;
int64_t encode_start_ms = 0;
int64_t encode_finish_ms = 0;
int64_t packetization_finish_ms = 0;

View File

@ -49,7 +49,6 @@ EncodedImage::EncodedImage(uint8_t* buffer, size_t length, size_t size)
void EncodedImage::SetEncodeTime(int64_t encode_start_ms,
int64_t encode_finish_ms) const {
timing_.is_timing_frame = true;
timing_.encode_start_ms = encode_start_ms;
timing_.encode_finish_ms = encode_finish_ms;
}

View File

@ -248,18 +248,24 @@ bool VideoContentTypeExtension::Write(uint8_t* data,
// Video Timing.
// 6 timestamps in milliseconds counted from capture time stored in rtp header:
// encode start/finish, packetization complete, pacer exit and reserved for
// modification by the network modification.
// modification by the network modification. |flags| is a bitmask and has the
// following allowed values:
// 0 = Valid data, but no flags available (backwards compatibility)
// 1 = Frame marked as timing frame due to cyclic timer.
// 2 = Frame marked as timing frame due to size being outside limit.
// 255 = Invalid. The whole timing frame extension should be ignored.
//
// 0 1 2 3
// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
// | ID | len=11| encode start ms delta | encode finish |
// | ID | len=12| flags | encode start ms delta |
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
// | ms delta | packetizer finish ms delta | pacer exit |
// | encode finish ms delta | packetizer finish ms delta |
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
// | ms delta | network timestamp ms delta | network2 time-|
// | pacer exit ms delta | network timestamp ms delta |
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
// | stamp ms delta|
// +-+-+-+-+-+-+-+-+
// | network2 timestamp ms delta |
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
constexpr RTPExtensionType VideoTimingExtension::kId;
constexpr uint8_t VideoTimingExtension::kValueSizeBytes;
@ -268,47 +274,62 @@ constexpr const char VideoTimingExtension::kUri[];
bool VideoTimingExtension::Parse(rtc::ArrayView<const uint8_t> data,
VideoSendTiming* timing) {
RTC_DCHECK(timing);
if (data.size() != kValueSizeBytes)
// TODO(sprang): Deprecate support for old wire format.
ptrdiff_t off = 0;
switch (data.size()) {
case kValueSizeBytes - 1:
timing->flags = 0;
off = 1; // Old wire format without the flags field.
break;
case kValueSizeBytes:
timing->flags = ByteReader<uint8_t>::ReadBigEndian(data.data());
break;
default:
return false;
timing->encode_start_delta_ms =
ByteReader<uint16_t>::ReadBigEndian(data.data());
}
timing->encode_start_delta_ms = ByteReader<uint16_t>::ReadBigEndian(
data.data() + VideoSendTiming::kEncodeStartDeltaOffset - off);
timing->encode_finish_delta_ms = ByteReader<uint16_t>::ReadBigEndian(
data.data() + 2 * VideoSendTiming::kEncodeFinishDeltaIdx);
data.data() + VideoSendTiming::kEncodeFinishDeltaOffset - off);
timing->packetization_finish_delta_ms = ByteReader<uint16_t>::ReadBigEndian(
data.data() + 2 * VideoSendTiming::kPacketizationFinishDeltaIdx);
data.data() + VideoSendTiming::kPacketizationFinishDeltaOffset - off);
timing->pacer_exit_delta_ms = ByteReader<uint16_t>::ReadBigEndian(
data.data() + 2 * VideoSendTiming::kPacerExitDeltaIdx);
data.data() + VideoSendTiming::kPacerExitDeltaOffset - off);
timing->network_timstamp_delta_ms = ByteReader<uint16_t>::ReadBigEndian(
data.data() + 2 * VideoSendTiming::kNetworkTimestampDeltaIdx);
data.data() + VideoSendTiming::kNetworkTimestampDeltaOffset - off);
timing->network2_timstamp_delta_ms = ByteReader<uint16_t>::ReadBigEndian(
data.data() + 2 * VideoSendTiming::kNetwork2TimestampDeltaIdx);
timing->is_timing_frame = true;
data.data() + VideoSendTiming::kNetwork2TimestampDeltaOffset - off);
return true;
}
bool VideoTimingExtension::Write(uint8_t* data, const VideoSendTiming& timing) {
ByteWriter<uint16_t>::WriteBigEndian(data, timing.encode_start_delta_ms);
ByteWriter<uint8_t>::WriteBigEndian(data + VideoSendTiming::kFlagsOffset,
timing.flags);
ByteWriter<uint16_t>::WriteBigEndian(
data + 2 * VideoSendTiming::kEncodeFinishDeltaIdx,
data + VideoSendTiming::kEncodeStartDeltaOffset,
timing.encode_start_delta_ms);
ByteWriter<uint16_t>::WriteBigEndian(
data + VideoSendTiming::kEncodeFinishDeltaOffset,
timing.encode_finish_delta_ms);
ByteWriter<uint16_t>::WriteBigEndian(
data + 2 * VideoSendTiming::kPacketizationFinishDeltaIdx,
data + VideoSendTiming::kPacketizationFinishDeltaOffset,
timing.packetization_finish_delta_ms);
ByteWriter<uint16_t>::WriteBigEndian(
data + 2 * VideoSendTiming::kPacerExitDeltaIdx,
data + VideoSendTiming::kPacerExitDeltaOffset,
timing.pacer_exit_delta_ms);
ByteWriter<uint16_t>::WriteBigEndian(
data + 2 * VideoSendTiming::kNetworkTimestampDeltaIdx, 0); // reserved
data + VideoSendTiming::kNetworkTimestampDeltaOffset, 0); // reserved
ByteWriter<uint16_t>::WriteBigEndian(
data + 2 * VideoSendTiming::kNetwork2TimestampDeltaIdx, 0); // reserved
data + VideoSendTiming::kNetwork2TimestampDeltaOffset, 0); // reserved
return true;
}
bool VideoTimingExtension::Write(uint8_t* data,
uint16_t time_delta_ms,
uint8_t idx) {
RTC_DCHECK_LT(idx, 6);
ByteWriter<uint16_t>::WriteBigEndian(data + 2 * idx, time_delta_ms);
uint8_t offset) {
RTC_DCHECK_LT(offset, kValueSizeBytes - sizeof(uint16_t));
ByteWriter<uint16_t>::WriteBigEndian(data + offset, time_delta_ms);
return true;
}

View File

@ -130,7 +130,7 @@ class VideoContentTypeExtension {
class VideoTimingExtension {
public:
static constexpr RTPExtensionType kId = kRtpExtensionVideoTiming;
static constexpr uint8_t kValueSizeBytes = 12;
static constexpr uint8_t kValueSizeBytes = 13;
static constexpr const char kUri[] =
"http://www.webrtc.org/experiments/rtp-hdrext/video-timing";

View File

@ -33,25 +33,25 @@ class RtpPacketToSend : public rtp::Packet {
void set_packetization_finish_time_ms(int64_t time) {
SetExtension<VideoTimingExtension>(
VideoSendTiming::GetDeltaCappedMs(capture_time_ms_, time),
VideoSendTiming::kPacketizationFinishDeltaIdx);
VideoSendTiming::kPacketizationFinishDeltaOffset);
}
void set_pacer_exit_time_ms(int64_t time) {
SetExtension<VideoTimingExtension>(
VideoSendTiming::GetDeltaCappedMs(capture_time_ms_, time),
VideoSendTiming::kPacerExitDeltaIdx);
VideoSendTiming::kPacerExitDeltaOffset);
}
void set_network_time_ms(int64_t time) {
SetExtension<VideoTimingExtension>(
VideoSendTiming::GetDeltaCappedMs(capture_time_ms_, time),
VideoSendTiming::kNetworkTimestampDeltaIdx);
VideoSendTiming::kNetworkTimestampDeltaOffset);
}
void set_network2_time_ms(int64_t time) {
SetExtension<VideoTimingExtension>(
VideoSendTiming::GetDeltaCappedMs(capture_time_ms_, time),
VideoSendTiming::kNetwork2TimestampDeltaIdx);
VideoSendTiming::kNetwork2TimestampDeltaOffset);
}
private:

View File

@ -32,6 +32,7 @@ constexpr uint8_t kTransmissionOffsetExtensionId = 1;
constexpr uint8_t kAudioLevelExtensionId = 9;
constexpr uint8_t kRtpStreamIdExtensionId = 0xa;
constexpr uint8_t kRtpMidExtensionId = 0xb;
constexpr uint8_t kVideoTimingExtensionId = 0xc;
constexpr int32_t kTimeOffset = 0x56ce;
constexpr bool kVoiceActive = true;
constexpr uint8_t kAudioLevel = 0x5a;
@ -97,8 +98,19 @@ constexpr uint8_t kPacketWithInvalidExtension[] = {
(kTransmissionOffsetExtensionId << 4) | 6, // (6+1)-byte extension, but
'e', 'x', 't', // Transmission Offset
'd', 'a', 't', 'a', // expected to be 3-bytes.
'p', 'a', 'y', 'l', 'o', 'a', 'd'
};
'p', 'a', 'y', 'l', 'o', 'a', 'd'};
constexpr uint8_t kPacketWithLegacyTimingExtension[] = {
0x90, kPayloadType, kSeqNumFirstByte, kSeqNumSecondByte,
0x65, 0x43, 0x12, 0x78, // kTimestamp.
0x12, 0x34, 0x56, 0x78, // kSSrc.
0xbe, 0xde, 0x00, 0x04, // Extension block of size 4 x 32bit words.
(kVideoTimingExtensionId << 4)
| VideoTimingExtension::kValueSizeBytes - 2, // Old format without flags.
0x00, 0x01, 0x00,
0x02, 0x00, 0x03, 0x00,
0x04, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00};
// clang-format on
} // namespace
@ -498,4 +510,57 @@ TEST(RtpPacketTest, RawExtensionFunctionsAcceptZeroIdAndReturnFalse) {
EXPECT_THAT(packet.AllocateRawExtension(kInvalidId, 3), IsEmpty());
}
TEST(RtpPacketTest, CreateAndParseTimingFrameExtension) {
// Create a packet with video frame timing extension populated.
RtpPacketToSend::ExtensionManager send_extensions;
send_extensions.Register(kRtpExtensionVideoTiming, kVideoTimingExtensionId);
RtpPacketToSend send_packet(&send_extensions);
send_packet.SetPayloadType(kPayloadType);
send_packet.SetSequenceNumber(kSeqNum);
send_packet.SetTimestamp(kTimestamp);
send_packet.SetSsrc(kSsrc);
VideoSendTiming timing;
timing.encode_start_delta_ms = 1;
timing.encode_finish_delta_ms = 2;
timing.packetization_finish_delta_ms = 3;
timing.pacer_exit_delta_ms = 4;
timing.flags =
TimingFrameFlags::kTriggeredByTimer + TimingFrameFlags::kTriggeredBySize;
send_packet.SetExtension<VideoTimingExtension>(timing);
// Serialize the packet and then parse it again.
RtpPacketReceived::ExtensionManager extensions;
extensions.Register<VideoTimingExtension>(kVideoTimingExtensionId);
RtpPacketReceived receive_packet(&extensions);
EXPECT_TRUE(receive_packet.Parse(send_packet.Buffer()));
VideoSendTiming receivied_timing;
EXPECT_TRUE(
receive_packet.GetExtension<VideoTimingExtension>(&receivied_timing));
// Only check first and last timestamp (covered by other tests) plus flags.
EXPECT_EQ(receivied_timing.encode_start_delta_ms,
timing.encode_start_delta_ms);
EXPECT_EQ(receivied_timing.pacer_exit_delta_ms, timing.pacer_exit_delta_ms);
EXPECT_EQ(receivied_timing.flags, timing.flags);
}
TEST(RtpPacketTest, ParseLegacyTimingFrameExtension) {
// Parse the modified packet.
RtpPacketReceived::ExtensionManager extensions;
extensions.Register<VideoTimingExtension>(kVideoTimingExtensionId);
RtpPacketReceived packet(&extensions);
EXPECT_TRUE(packet.Parse(kPacketWithLegacyTimingExtension,
sizeof(kPacketWithLegacyTimingExtension)));
VideoSendTiming receivied_timing;
EXPECT_TRUE(packet.GetExtension<VideoTimingExtension>(&receivied_timing));
// Check first and last timestamp are still OK. Flags should now be 0.
EXPECT_EQ(receivied_timing.encode_start_delta_ms, 1);
EXPECT_EQ(receivied_timing.pacer_exit_delta_ms, 4);
EXPECT_EQ(receivied_timing.flags, 0);
}
} // namespace webrtc

View File

@ -91,7 +91,7 @@ int32_t RTPReceiverVideo::ParseRtpPacket(WebRtcRTPHeader* rtp_header,
rtp_header->type = parsed_payload.type;
rtp_header->type.Video.rotation = kVideoRotation_0;
rtp_header->type.Video.content_type = VideoContentType::UNSPECIFIED;
rtp_header->type.Video.video_timing.is_timing_frame = false;
rtp_header->type.Video.video_timing.flags = TimingFrameFlags::kInvalid;
// Retrieve the video rotation information.
if (rtp_header->header.extension.hasVideoRotation) {
@ -107,7 +107,6 @@ int32_t RTPReceiverVideo::ParseRtpPacket(WebRtcRTPHeader* rtp_header,
if (rtp_header->header.extension.has_video_timing) {
rtp_header->type.Video.video_timing =
rtp_header->header.extension.video_timing;
rtp_header->type.Video.video_timing.is_timing_frame = true;
}
rtp_header->type.Video.playout_delay =

View File

@ -11,6 +11,7 @@
#include <memory>
#include <vector>
#include "webrtc/api/video/video_timing.h"
#include "webrtc/logging/rtc_event_log/mock/mock_rtc_event_log.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_cvo.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_header_extension_map.h"
@ -994,7 +995,7 @@ TEST_P(RtpSenderTest, NoFlexfecForTimingFrames) {
0, 1500));
RTPVideoHeader video_header;
memset(&video_header, 0, sizeof(RTPVideoHeader));
video_header.video_timing.is_timing_frame = true;
video_header.video_timing.flags = TimingFrameFlags::kTriggeredByTimer;
EXPECT_TRUE(rtp_sender_->SendOutgoingData(
kVideoFrameKey, kPayloadType, kTimestamp, kCaptureTimeMs, kPayloadData,
sizeof(kPayloadData), nullptr, &video_header, nullptr));
@ -1019,7 +1020,7 @@ TEST_P(RtpSenderTest, NoFlexfecForTimingFrames) {
EXPECT_CALL(mock_paced_sender_,
InsertPacket(RtpPacketSender::kLowPriority, kMediaSsrc,
kSeqNum + 1, _, _, false));
video_header.video_timing.is_timing_frame = false;
video_header.video_timing.flags = TimingFrameFlags::kInvalid;
EXPECT_TRUE(rtp_sender_->SendOutgoingData(
kVideoFrameKey, kPayloadType, kTimestamp + 1, kCaptureTimeMs + 1,
kPayloadData, sizeof(kPayloadData), nullptr, &video_header, nullptr));
@ -1571,7 +1572,7 @@ TEST_P(RtpSenderVideoTest, TimingFrameHasPacketizationTimstampSet) {
const int64_t kCaptureTimestamp = fake_clock_.TimeInMilliseconds();
RTPVideoHeader hdr = {0};
hdr.video_timing.is_timing_frame = true;
hdr.video_timing.flags = TimingFrameFlags::kTriggeredByTimer;
hdr.video_timing.encode_start_delta_ms = kEncodeStartDeltaMs;
hdr.video_timing.encode_finish_delta_ms = kEncodeFinishDeltaMs;

View File

@ -332,7 +332,7 @@ bool RTPSenderVideo::SendVideo(RtpVideoCodecTypes video_type,
last_packet->SetExtension<VideoContentTypeExtension>(
video_header->content_type);
}
if (video_header->video_timing.is_timing_frame) {
if (video_header->video_timing.flags != TimingFrameFlags::kInvalid) {
last_packet->SetExtension<VideoTimingExtension>(
video_header->video_timing);
}

View File

@ -370,7 +370,7 @@ int32_t H264EncoderImpl::Encode(const VideoFrame& input_frame,
encoded_image_.content_type_ = (mode_ == kScreensharing)
? VideoContentType::SCREENSHARE
: VideoContentType::UNSPECIFIED;
encoded_image_.timing_.is_timing_frame = false;
encoded_image_.timing_.flags = TimingFrameFlags::kInvalid;
encoded_image_._frameType = ConvertToVideoFrameType(info.eFrameType);
// Split encoded image up into fragments. This also updates |encoded_image_|.

View File

@ -900,7 +900,7 @@ int VP8EncoderImpl::GetEncodedPartitions(
encoded_images_[encoder_idx].content_type_ =
(codec_.mode == kScreensharing) ? VideoContentType::SCREENSHARE
: VideoContentType::UNSPECIFIED;
encoded_images_[encoder_idx].timing_.is_timing_frame = false;
encoded_images_[encoder_idx].timing_.flags = TimingFrameFlags::kInvalid;
int qp = -1;
vpx_codec_control(&encoders_[encoder_idx], VP8E_GET_LAST_QUANTIZER_64, &qp);

View File

@ -710,7 +710,7 @@ int VP9EncoderImpl::GetEncodedLayerFrame(const vpx_codec_cx_pkt* pkt) {
: VideoContentType::UNSPECIFIED;
encoded_image_._encodedHeight = raw_->d_h;
encoded_image_._encodedWidth = raw_->d_w;
encoded_image_.timing_.is_timing_frame = false;
encoded_image_.timing_.flags = TimingFrameFlags::kInvalid;
int qp = -1;
vpx_codec_control(encoder_, VP8E_GET_LAST_QUANTIZER, &qp);
encoded_image_.qp_ = qp;

View File

@ -88,7 +88,7 @@ void VCMEncodedFrame::Reset() {
_codec = kVideoCodecUnknown;
rotation_ = kVideoRotation_0;
content_type_ = VideoContentType::UNSPECIFIED;
timing_.is_timing_frame = false;
timing_.flags = TimingFrameFlags::kInvalid;
_rotation_set = false;
}

View File

@ -164,8 +164,7 @@ VCMFrameBufferEnum VCMFrameBuffer::InsertPacket(
rotation_ = packet.video_header.rotation;
_rotation_set = true;
content_type_ = packet.video_header.content_type;
if (packet.video_header.video_timing.is_timing_frame) {
timing_.is_timing_frame = true;
if (packet.video_header.video_timing.flags != TimingFrameFlags::kInvalid) {
timing_.encode_start_ms =
ntp_time_ms_ + packet.video_header.video_timing.encode_start_delta_ms;
timing_.encode_finish_ms =
@ -182,9 +181,8 @@ VCMFrameBufferEnum VCMFrameBuffer::InsertPacket(
timing_.network2_timestamp_ms =
ntp_time_ms_ +
packet.video_header.video_timing.network2_timstamp_delta_ms;
} else {
timing_.is_timing_frame = false;
}
timing_.flags = packet.video_header.video_timing.flags;
}
if (packet.is_first_packet_in_frame) {

View File

@ -32,6 +32,7 @@ RtpFrameObject::RtpFrameObject(PacketBuffer* packet_buffer,
: packet_buffer_(packet_buffer),
first_seq_num_(first_seq_num),
last_seq_num_(last_seq_num),
timestamp_(0),
received_time_(received_time),
times_nacked_(times_nacked) {
VCMPacket* first_packet = packet_buffer_->GetPacket(first_seq_num);
@ -113,10 +114,10 @@ RtpFrameObject::RtpFrameObject(PacketBuffer* packet_buffer,
rotation_ = last_packet->video_header.rotation;
_rotation_set = true;
content_type_ = last_packet->video_header.content_type;
if (last_packet->video_header.video_timing.is_timing_frame) {
if (last_packet->video_header.video_timing.flags !=
TimingFrameFlags::kInvalid) {
// ntp_time_ms_ may be -1 if not estimated yet. This is not a problem,
// as this will be dealt with at the time of reporting.
timing_.is_timing_frame = true;
timing_.encode_start_ms =
ntp_time_ms_ +
last_packet->video_header.video_timing.encode_start_delta_ms;
@ -138,9 +139,8 @@ RtpFrameObject::RtpFrameObject(PacketBuffer* packet_buffer,
timing_.receive_start_ms = first_packet->receive_time_ms;
timing_.receive_finish_ms = last_packet->receive_time_ms;
} else {
timing_.is_timing_frame = false;
}
timing_.flags = last_packet->video_header.video_timing.flags;
}
RtpFrameObject::~RtpFrameObject() {

View File

@ -92,7 +92,7 @@ void VCMDecodedFrameCallback::Decoded(VideoFrame& decodedImage,
frameInfo->renderTimeMs);
// Report timing information.
if (frameInfo->timing.is_timing_frame) {
if (frameInfo->timing.flags != TimingFrameFlags::kInvalid) {
int64_t capture_time_ms = decodedImage.ntp_time_ms() - ntp_offset_;
// Convert remote timestamps to local time from ntp timestamps.
frameInfo->timing.encode_start_ms -= ntp_offset_;
@ -137,6 +137,7 @@ void VCMDecodedFrameCallback::Decoded(VideoFrame& decodedImage,
timing_frame_info.decode_finish_ms = now_ms;
timing_frame_info.render_time_ms = frameInfo->renderTimeMs;
timing_frame_info.rtp_timestamp = decodedImage.timestamp();
timing_frame_info.flags = frameInfo->timing.flags;
_timing->SetTimingFrameInfo(timing_frame_info);
}

View File

@ -231,7 +231,7 @@ EncodedImageCallback::Result VCMEncodedFrameCallback::OnEncodedImage(
rtc::Optional<size_t> outlier_frame_size;
rtc::Optional<int64_t> encode_start_ms;
bool is_timing_frame = false;
uint8_t timing_flags = TimingFrameFlags::kInvalid;
{
rtc::CritScope crit(&timing_params_lock_);
@ -274,14 +274,16 @@ EncodedImageCallback::Result VCMEncodedFrameCallback::OnEncodedImage(
if (last_timing_frame_time_ms_ == -1 ||
timing_frame_delay_ms >= timing_frames_thresholds_.delay_ms ||
timing_frame_delay_ms == 0) {
is_timing_frame = true;
timing_flags = TimingFrameFlags::kTriggeredByTimer;
last_timing_frame_time_ms_ = encoded_image.capture_time_ms_;
}
// Outliers trigger timing frames, but do not affect scheduled timing
// frames.
if (outlier_frame_size && encoded_image._length >= *outlier_frame_size) {
is_timing_frame = true;
if (timing_flags == TimingFrameFlags::kInvalid)
timing_flags = 0;
timing_flags |= TimingFrameFlags::kTriggeredBySize;
}
}
@ -290,9 +292,10 @@ EncodedImageCallback::Result VCMEncodedFrameCallback::OnEncodedImage(
// drift relative to rtc::TimeMillis(). We can't use it for Timing frames,
// because to being sent in the network capture time required to be less than
// all the other timestamps.
if (is_timing_frame && encode_start_ms) {
if (timing_flags != TimingFrameFlags::kInvalid && encode_start_ms) {
encoded_image.SetEncodeTime(*encode_start_ms, rtc::TimeMillis());
}
encoded_image.timing_.flags = timing_flags;
Result result = post_encode_callback_->OnEncodedImage(
encoded_image, codec_specific, fragmentation_header);

View File

@ -31,7 +31,8 @@ class FakeEncodedImageCallback : public EncodedImageCallback {
Result OnEncodedImage(const EncodedImage& encoded_image,
const CodecSpecificInfo* codec_specific_info,
const RTPFragmentationHeader* fragmentation) override {
last_frame_was_timing_ = encoded_image.timing_.is_timing_frame;
last_frame_was_timing_ =
encoded_image.timing_.flags != TimingFrameFlags::kInvalid;
return Result(Result::OK);
};

View File

@ -1162,7 +1162,7 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
(codec_mode_ == webrtc::VideoCodecMode::kScreensharing)
? webrtc::VideoContentType::SCREENSHARE
: webrtc::VideoContentType::UNSPECIFIED;
image->timing_.is_timing_frame = false;
image->timing_.flags = webrtc::TimingFrameFlags::kInvalid;
image->_frameType =
(key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta);
image->_completeFrame = true;

View File

@ -20,7 +20,7 @@
@synthesize timeStamp = _timeStamp;
@synthesize captureTimeMs = _captureTimeMs;
@synthesize ntpTimeMs = _ntpTimeMs;
@synthesize isTimingFrame = _isTimingFrame;
@synthesize flags = _flags;
@synthesize encodeStartMs = _encodeStartMs;
@synthesize encodeFinishMs = _encodeFinishMs;
@synthesize frameType = _frameType;
@ -40,7 +40,7 @@
_timeStamp = encodedImage._timeStamp;
_captureTimeMs = encodedImage.capture_time_ms_;
_ntpTimeMs = encodedImage.ntp_time_ms_;
_isTimingFrame = encodedImage.timing_.is_timing_frame;
_flags = encodedImage.timing_.flags;
_encodeStartMs = encodedImage.timing_.encode_start_ms;
_encodeFinishMs = encodedImage.timing_.encode_finish_ms;
_frameType = (RTCFrameType)encodedImage._frameType;
@ -64,7 +64,7 @@
encodedImage._timeStamp = _timeStamp;
encodedImage.capture_time_ms_ = _captureTimeMs;
encodedImage.ntp_time_ms_ = _ntpTimeMs;
encodedImage.timing_.is_timing_frame = _isTimingFrame;
encodedImage.timing_.flags = _flags;
encodedImage.timing_.encode_start_ms = _encodeStartMs;
encodedImage.timing_.encode_finish_ms = _encodeFinishMs;
encodedImage._frameType = webrtc::FrameType(_frameType);

View File

@ -657,7 +657,7 @@ CFStringRef ExtractProfile(const cricket::VideoCodec &codec) {
frame.rotation = rotation;
frame.contentType = (_mode == RTCVideoCodecModeScreensharing) ? RTCVideoContentTypeScreenshare :
RTCVideoContentTypeUnspecified;
frame.isTimingFrame = NO;
frame.flags = webrtc::TimingFrameFlags::kInvalid;
int qp;
_h264BitstreamParser.ParseBitstream(buffer->data(), buffer->size());

View File

@ -40,7 +40,7 @@ RTC_EXPORT
@property(nonatomic, assign) uint32_t timeStamp;
@property(nonatomic, assign) long captureTimeMs;
@property(nonatomic, assign) long ntpTimeMs;
@property(nonatomic, assign) BOOL isTimingFrame;
@property(nonatomic, assign) uint8_t flags;
@property(nonatomic, assign) long encodeStartMs;
@property(nonatomic, assign) long encodeFinishMs;
@property(nonatomic, assign) RTCFrameType frameType;

View File

@ -130,7 +130,7 @@ EncodedImageCallback::Result PayloadRouter::OnEncodedImage(
CopyCodecSpecific(codec_specific_info, &rtp_video_header);
rtp_video_header.rotation = encoded_image.rotation_;
rtp_video_header.content_type = encoded_image.content_type_;
if (encoded_image.timing_.is_timing_frame) {
if (encoded_image.timing_.flags != TimingFrameFlags::kInvalid) {
rtp_video_header.video_timing.encode_start_delta_ms =
VideoSendTiming::GetDeltaCappedMs(
encoded_image.capture_time_ms_,
@ -143,10 +143,8 @@ EncodedImageCallback::Result PayloadRouter::OnEncodedImage(
rtp_video_header.video_timing.pacer_exit_delta_ms = 0;
rtp_video_header.video_timing.network_timstamp_delta_ms = 0;
rtp_video_header.video_timing.network2_timstamp_delta_ms = 0;
rtp_video_header.video_timing.is_timing_frame = true;
} else {
rtp_video_header.video_timing.is_timing_frame = false;
}
rtp_video_header.video_timing.flags = encoded_image.timing_.flags;
rtp_video_header.playout_delay = encoded_image.playout_delay_;
int stream_index = rtp_video_header.simulcastIdx;

View File

@ -560,7 +560,6 @@ void RtpVideoStreamReceiver::NotifyReceiverOfFecPacket(
rtp_header.type.Video.video_timing = {0u, 0u, 0u, 0u, 0u, 0u, false};
if (header.extension.has_video_timing) {
rtp_header.type.Video.video_timing = header.extension.video_timing;
rtp_header.type.Video.video_timing.is_timing_frame = true;
}
rtp_header.type.Video.playout_delay = header.extension.playout_delay;