Cleanup webrtc:: namespace from leaked TimingFrameFlags

Bug: webrtc:9351
Change-Id: Ifbc0a522bf13ab62a2e490b9f129eacfabe7796f
Reviewed-on: https://webrtc-review.googlesource.com/80961
Commit-Queue: Ilya Nikolaevskiy <ilnik@webrtc.org>
Reviewed-by: Karl Wiberg <kwiberg@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#23520}
This commit is contained in:
Ilya Nikolaevskiy
2018-06-05 15:21:32 +02:00
committed by Commit Bot
parent 5352ad7928
commit b6c462d4e4
21 changed files with 43 additions and 33 deletions

View File

@ -28,7 +28,7 @@ TimingFrameInfo::TimingFrameInfo()
decode_start_ms(-1),
decode_finish_ms(-1),
render_time_ms(-1),
flags(TimingFrameFlags::kNotTriggered) {}
flags(VideoSendTiming::kNotTriggered) {}
int64_t TimingFrameInfo::EndToEndDelay() const {
return capture_time_ms >= 0 ? decode_finish_ms - capture_time_ms : -1;
@ -48,15 +48,15 @@ bool TimingFrameInfo::operator<=(const TimingFrameInfo& other) const {
}
bool TimingFrameInfo::IsOutlier() const {
return !IsInvalid() && (flags & TimingFrameFlags::kTriggeredBySize);
return !IsInvalid() && (flags & VideoSendTiming::kTriggeredBySize);
}
bool TimingFrameInfo::IsTimerTriggered() const {
return !IsInvalid() && (flags & TimingFrameFlags::kTriggeredByTimer);
return !IsInvalid() && (flags & VideoSendTiming::kTriggeredByTimer);
}
bool TimingFrameInfo::IsInvalid() const {
return flags == TimingFrameFlags::kInvalid;
return flags == VideoSendTiming::kInvalid;
}
std::string TimingFrameInfo::ToString() const {

View File

@ -21,6 +21,8 @@
namespace webrtc {
// Deprecated. use VideoSendTiming::TimingFrameFlags instead.
// TODO(ilnik): remove after some time.
enum TimingFrameFlags : uint8_t {
kNotTriggered = 0, // Timing info valid, but not to be transmitted.
// Used on send-side only.
@ -32,6 +34,14 @@ enum TimingFrameFlags : uint8_t {
// Video timing timestamps in ms counted from capture_time_ms of a frame.
// This structure represents data sent in video-timing RTP header extension.
struct VideoSendTiming {
enum TimingFrameFlags : uint8_t {
kNotTriggered = 0, // Timing info valid, but not to be transmitted.
// Used on send-side only.
kTriggeredByTimer = 1 << 0, // Frame marked for tracing by periodic timer.
kTriggeredBySize = 1 << 1, // Frame marked for tracing due to size.
kInvalid = std::numeric_limits<uint8_t>::max() // Invalid, ignore!
};
// Offsets of the fields in the RTP header extension, counting from the first
// byte after the one-byte header.
static constexpr uint8_t kFlagsOffset = 0;

View File

@ -60,7 +60,7 @@ class EncodedImage {
PlayoutDelay playout_delay_ = {-1, -1};
struct Timing {
uint8_t flags = TimingFrameFlags::kInvalid;
uint8_t flags = VideoSendTiming::kInvalid;
int64_t encode_start_ms = 0;
int64_t encode_finish_ms = 0;
int64_t packetization_finish_ms = 0;

View File

@ -526,7 +526,7 @@ TEST(RtpPacketTest, CreateAndParseTimingFrameExtension) {
timing.packetization_finish_delta_ms = 3;
timing.pacer_exit_delta_ms = 4;
timing.flags =
TimingFrameFlags::kTriggeredByTimer + TimingFrameFlags::kTriggeredBySize;
VideoSendTiming::kTriggeredByTimer | VideoSendTiming::kTriggeredBySize;
send_packet.SetExtension<VideoTimingExtension>(timing);

View File

@ -85,7 +85,7 @@ int32_t RTPReceiverVideo::ParseRtpPacket(WebRtcRTPHeader* rtp_header,
rtp_header->type = parsed_payload.type;
rtp_header->type.Video.rotation = kVideoRotation_0;
rtp_header->type.Video.content_type = VideoContentType::UNSPECIFIED;
rtp_header->type.Video.video_timing.flags = TimingFrameFlags::kInvalid;
rtp_header->type.Video.video_timing.flags = VideoSendTiming::kInvalid;
// Retrieve the video rotation information.
if (rtp_header->header.extension.hasVideoRotation) {

View File

@ -1103,7 +1103,7 @@ TEST_P(RtpSenderTest, NoFlexfecForTimingFrames) {
0, 1500));
RTPVideoHeader video_header;
memset(&video_header, 0, sizeof(RTPVideoHeader));
video_header.video_timing.flags = TimingFrameFlags::kTriggeredByTimer;
video_header.video_timing.flags = VideoSendTiming::kTriggeredByTimer;
EXPECT_TRUE(rtp_sender_->SendOutgoingData(
kVideoFrameKey, kPayloadType, kTimestamp, kCaptureTimeMs, kPayloadData,
sizeof(kPayloadData), nullptr, &video_header, nullptr,
@ -1129,7 +1129,7 @@ TEST_P(RtpSenderTest, NoFlexfecForTimingFrames) {
EXPECT_CALL(mock_paced_sender_,
InsertPacket(RtpPacketSender::kLowPriority, kMediaSsrc,
kSeqNum + 1, _, _, false));
video_header.video_timing.flags = TimingFrameFlags::kInvalid;
video_header.video_timing.flags = VideoSendTiming::kInvalid;
EXPECT_TRUE(rtp_sender_->SendOutgoingData(
kVideoFrameKey, kPayloadType, kTimestamp + 1, kCaptureTimeMs + 1,
kPayloadData, sizeof(kPayloadData), nullptr, &video_header, nullptr,
@ -1705,7 +1705,7 @@ TEST_P(RtpSenderVideoTest, TimingFrameHasPacketizationTimstampSet) {
const int64_t kCaptureTimestamp = fake_clock_.TimeInMilliseconds();
RTPVideoHeader hdr = {0};
hdr.video_timing.flags = TimingFrameFlags::kTriggeredByTimer;
hdr.video_timing.flags = VideoSendTiming::kTriggeredByTimer;
hdr.video_timing.encode_start_delta_ms = kEncodeStartDeltaMs;
hdr.video_timing.encode_finish_delta_ms = kEncodeFinishDeltaMs;

View File

@ -335,7 +335,7 @@ bool RTPSenderVideo::SendVideo(enum VideoCodecType video_type,
last_packet->SetExtension<VideoContentTypeExtension>(
video_header->content_type);
}
if (video_header->video_timing.flags != TimingFrameFlags::kInvalid) {
if (video_header->video_timing.flags != VideoSendTiming::kInvalid) {
last_packet->SetExtension<VideoTimingExtension>(
video_header->video_timing);
}

View File

@ -376,7 +376,7 @@ int32_t H264EncoderImpl::Encode(const VideoFrame& input_frame,
encoded_image_.content_type_ = (mode_ == kScreensharing)
? VideoContentType::SCREENSHARE
: VideoContentType::UNSPECIFIED;
encoded_image_.timing_.flags = TimingFrameFlags::kInvalid;
encoded_image_.timing_.flags = VideoSendTiming::kInvalid;
encoded_image_._frameType = ConvertToVideoFrameType(info.eFrameType);
// Split encoded image up into fragments. This also updates |encoded_image_|.

View File

@ -927,7 +927,7 @@ int LibvpxVp8Encoder::GetEncodedPartitions(
encoded_images_[encoder_idx].content_type_ =
(codec_.mode == kScreensharing) ? VideoContentType::SCREENSHARE
: VideoContentType::UNSPECIFIED;
encoded_images_[encoder_idx].timing_.flags = TimingFrameFlags::kInvalid;
encoded_images_[encoder_idx].timing_.flags = VideoSendTiming::kInvalid;
int qp = -1;
vpx_codec_control(&encoders_[encoder_idx], VP8E_GET_LAST_QUANTIZER_64, &qp);

View File

@ -888,7 +888,7 @@ int VP9EncoderImpl::GetEncodedLayerFrame(const vpx_codec_cx_pkt* pkt) {
pkt->data.frame.height[layer_id.spatial_layer_id];
encoded_image_._encodedWidth =
pkt->data.frame.width[layer_id.spatial_layer_id];
encoded_image_.timing_.flags = TimingFrameFlags::kInvalid;
encoded_image_.timing_.flags = VideoSendTiming::kInvalid;
int qp = -1;
vpx_codec_control(encoder_, VP8E_GET_LAST_QUANTIZER, &qp);
encoded_image_.qp_ = qp;

View File

@ -85,7 +85,7 @@ void VCMEncodedFrame::Reset() {
_codec = kVideoCodecUnknown;
rotation_ = kVideoRotation_0;
content_type_ = VideoContentType::UNSPECIFIED;
timing_.flags = TimingFrameFlags::kInvalid;
timing_.flags = VideoSendTiming::kInvalid;
_rotation_set = false;
}

View File

@ -164,7 +164,7 @@ VCMFrameBufferEnum VCMFrameBuffer::InsertPacket(
rotation_ = packet.video_header.rotation;
_rotation_set = true;
content_type_ = packet.video_header.content_type;
if (packet.video_header.video_timing.flags != TimingFrameFlags::kInvalid) {
if (packet.video_header.video_timing.flags != VideoSendTiming::kInvalid) {
timing_.encode_start_ms =
ntp_time_ms_ + packet.video_header.video_timing.encode_start_delta_ms;
timing_.encode_finish_ms =

View File

@ -84,7 +84,7 @@ RtpFrameObject::RtpFrameObject(PacketBuffer* packet_buffer,
_rotation_set = true;
content_type_ = last_packet->video_header.content_type;
if (last_packet->video_header.video_timing.flags !=
TimingFrameFlags::kInvalid) {
VideoSendTiming::kInvalid) {
// ntp_time_ms_ may be -1 if not estimated yet. This is not a problem,
// as this will be dealt with at the time of reporting.
timing_.encode_start_ms =

View File

@ -91,7 +91,7 @@ void VCMDecodedFrameCallback::Decoded(VideoFrame& decodedImage,
frameInfo->renderTimeMs);
// Report timing information.
if (frameInfo->timing.flags != TimingFrameFlags::kInvalid) {
if (frameInfo->timing.flags != VideoSendTiming::kInvalid) {
int64_t capture_time_ms = decodedImage.ntp_time_ms() - ntp_offset_;
// Convert remote timestamps to local time from ntp timestamps.
frameInfo->timing.encode_start_ms -= ntp_offset_;

View File

@ -310,7 +310,7 @@ void VCMEncodedFrameCallback::FillTimingInfo(size_t simulcast_svc_idx,
EncodedImage* encoded_image) {
rtc::Optional<size_t> outlier_frame_size;
rtc::Optional<int64_t> encode_start_ms;
uint8_t timing_flags = TimingFrameFlags::kNotTriggered;
uint8_t timing_flags = VideoSendTiming::kNotTriggered;
{
rtc::CritScope crit(&timing_params_lock_);
@ -336,7 +336,7 @@ void VCMEncodedFrameCallback::FillTimingInfo(size_t simulcast_svc_idx,
// Outliers trigger timing frames, but do not affect scheduled timing
// frames.
if (outlier_frame_size && encoded_image->_length >= *outlier_frame_size) {
timing_flags |= TimingFrameFlags::kTriggeredBySize;
timing_flags |= VideoSendTiming::kTriggeredBySize;
}
// Check if it's time to send a timing frame.
@ -348,7 +348,7 @@ void VCMEncodedFrameCallback::FillTimingInfo(size_t simulcast_svc_idx,
if (last_timing_frame_time_ms_ == -1 ||
timing_frame_delay_ms >= timing_frames_thresholds_.delay_ms ||
timing_frame_delay_ms == 0) {
timing_flags = TimingFrameFlags::kTriggeredByTimer;
timing_flags |= VideoSendTiming::kTriggeredByTimer;
last_timing_frame_time_ms_ = encoded_image->capture_time_ms_;
}
} // rtc::CritScope crit(&timing_params_lock_);
@ -377,7 +377,7 @@ void VCMEncodedFrameCallback::FillTimingInfo(size_t simulcast_svc_idx,
encoded_image->SetEncodeTime(*encode_start_ms, now_ms);
encoded_image->timing_.flags = timing_flags;
} else {
encoded_image->timing_.flags = TimingFrameFlags::kInvalid;
encoded_image->timing_.flags = VideoSendTiming::kInvalid;
}
}

View File

@ -36,8 +36,8 @@ class FakeEncodedImageCallback : public EncodedImageCallback {
const CodecSpecificInfo* codec_specific_info,
const RTPFragmentationHeader* fragmentation) override {
last_frame_was_timing_ =
encoded_image.timing_.flags != TimingFrameFlags::kInvalid &&
encoded_image.timing_.flags != TimingFrameFlags::kNotTriggered;
encoded_image.timing_.flags != VideoSendTiming::kInvalid &&
encoded_image.timing_.flags != VideoSendTiming::kNotTriggered;
last_capture_timestamp_ = encoded_image.capture_time_ms_;
return Result(Result::OK);
};

View File

@ -1004,7 +1004,7 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
image->content_type_ = (codec_mode_ == VideoCodecMode::kScreensharing)
? VideoContentType::SCREENSHARE
: VideoContentType::UNSPECIFIED;
image->timing_.flags = TimingFrameFlags::kInvalid;
image->timing_.flags = VideoSendTiming::kInvalid;
image->_frameType = (key_frame ? kVideoFrameKey : kVideoFrameDelta);
image->_completeFrame = true;
CodecSpecificInfo info;

View File

@ -739,7 +739,7 @@ CFStringRef ExtractProfile(webrtc::SdpVideoFormat videoFormat) {
frame.rotation = rotation;
frame.contentType = (_mode == RTCVideoCodecModeScreensharing) ? RTCVideoContentTypeScreenshare :
RTCVideoContentTypeUnspecified;
frame.flags = webrtc::TimingFrameFlags::kInvalid;
frame.flags = webrtc::VideoSendTiming::kInvalid;
int qp;
_h264BitstreamParser.ParseBitstream(buffer->data(), buffer->size());

View File

@ -220,8 +220,8 @@ EncodedImageCallback::Result PayloadRouter::OnEncodedImage(
CopyCodecSpecific(codec_specific_info, &rtp_video_header);
rtp_video_header.rotation = encoded_image.rotation_;
rtp_video_header.content_type = encoded_image.content_type_;
if (encoded_image.timing_.flags != TimingFrameFlags::kInvalid &&
encoded_image.timing_.flags != TimingFrameFlags::kNotTriggered) {
if (encoded_image.timing_.flags != VideoSendTiming::kInvalid &&
encoded_image.timing_.flags != VideoSendTiming::kNotTriggered) {
rtp_video_header.video_timing.encode_start_delta_ms =
VideoSendTiming::GetDeltaCappedMs(
encoded_image.capture_time_ms_,
@ -236,7 +236,7 @@ EncodedImageCallback::Result PayloadRouter::OnEncodedImage(
rtp_video_header.video_timing.network2_timestamp_delta_ms = 0;
rtp_video_header.video_timing.flags = encoded_image.timing_.flags;
} else {
rtp_video_header.video_timing.flags = TimingFrameFlags::kInvalid;
rtp_video_header.video_timing.flags = VideoSendTiming::kInvalid;
}
rtp_video_header.playout_delay = encoded_image.playout_delay_;

View File

@ -918,7 +918,7 @@ void SendStatisticsProxy::OnSendEncodedImage(
// If any of the simulcast streams have a huge frame, it should be counted
// as a single difficult input frame.
// https://w3c.github.io/webrtc-stats/#dom-rtcvideosenderstats-hugeframessent
if (encoded_image.timing_.flags & TimingFrameFlags::kTriggeredBySize) {
if (encoded_image.timing_.flags & VideoSendTiming::kTriggeredBySize) {
if (!last_outlier_timestamp_ ||
*last_outlier_timestamp_ < encoded_image.capture_time_ms_) {
last_outlier_timestamp_.emplace(encoded_image.capture_time_ms_);

View File

@ -901,7 +901,7 @@ EncodedImageCallback::Result VideoStreamEncoder::OnEncodedImage(
encoded_image.capture_time_ms_ * rtc::kNumMicrosecsPerMillisec;
rtc::Optional<int> encode_duration_us;
if (encoded_image.timing_.flags != TimingFrameFlags::kInvalid) {
if (encoded_image.timing_.flags != VideoSendTiming::kInvalid) {
encode_duration_us.emplace(
// TODO(nisse): Maybe use capture_time_ms_ rather than encode_start_ms_?
rtc::kNumMicrosecsPerMillisec *