Report timing frames info in GetStats.

Some frames are already marked as 'timing frames' via video-timing RTP header extension. Timestamps along full WebRTC pipeline are gathered for these frames. This CL implements reporting of these timestamps for a single
timing frame since the last GetStats(). The frame with the longest end-to-end delay between two consecutive GetStats calls is reported.

The purpose of this timing information is not to provide a realtime statistics but to provide debugging information as it will help identify problematic places in video pipeline for outliers (frames which took longest to process).

BUG=webrtc:7594

Review-Url: https://codereview.webrtc.org/2946413002
Cr-Commit-Position: refs/heads/master@{#18909}
This commit is contained in:
ilnik
2017-07-06 03:06:50 -07:00
committed by Commit Bot
parent 5b7fc8ce42
commit 2edc6845ac
34 changed files with 376 additions and 67 deletions

View File

@ -149,6 +149,7 @@ FrameBuffer::ReturnReason FrameBuffer::NextFrame(
}
UpdateJitterDelay();
UpdateTimingFrameInfo();
PropagateDecodability(next_frame_it_->second);
// Sanity check for RTP timestamp monotonicity.
@ -534,8 +535,15 @@ void FrameBuffer::UpdateJitterDelay() {
}
}
void FrameBuffer::UpdateTimingFrameInfo() {
TRACE_EVENT0("webrtc", "FrameBuffer::UpdateTimingFrameInfo");
rtc::Optional<TimingFrameInfo> info = timing_->GetTimingFrameInfo();
if (info)
stats_callback_->OnTimingFrameInfoUpdated(*info);
}
void FrameBuffer::ClearFramesAndHistory() {
TRACE_EVENT0("webrtc", "FrameBuffer::UpdateJitterDelay");
TRACE_EVENT0("webrtc", "FrameBuffer::ClearFramesAndHistory");
frames_.clear();
last_decoded_frame_it_ = frames_.end();
last_continuous_frame_it_ = frames_.end();

View File

@ -153,6 +153,8 @@ class FrameBuffer {
void UpdateJitterDelay() EXCLUSIVE_LOCKS_REQUIRED(crit_);
void UpdateTimingFrameInfo() EXCLUSIVE_LOCKS_REQUIRED(crit_);
void ClearFramesAndHistory() EXCLUSIVE_LOCKS_REQUIRED(crit_);
bool HasBadRenderTiming(const FrameObject& frame, int64_t now_ms)

View File

@ -116,6 +116,7 @@ class VCMReceiveStatisticsCallbackMock : public VCMReceiveStatisticsCallback {
int jitter_buffer_ms,
int min_playout_delay_ms,
int render_delay_ms));
MOCK_METHOD1(OnTimingFrameInfoUpdated, void(const TimingFrameInfo& info));
};
class TestFrameBuffer2 : public ::testing::Test {

View File

@ -10,6 +10,8 @@
#include "webrtc/modules/video_coding/generic_decoder.h"
#include <algorithm>
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/timeutils.h"
@ -91,6 +93,7 @@ void VCMDecodedFrameCallback::Decoded(VideoFrame& decodedImage,
// Report timing information.
if (frameInfo->timing.is_timing_frame) {
int64_t capture_time_ms = decodedImage.ntp_time_ms() - ntp_offset_;
// Convert remote timestamps to local time from ntp timestamps.
frameInfo->timing.encode_start_ms -= ntp_offset_;
frameInfo->timing.encode_finish_ms -= ntp_offset_;
@ -98,19 +101,44 @@ void VCMDecodedFrameCallback::Decoded(VideoFrame& decodedImage,
frameInfo->timing.pacer_exit_ms -= ntp_offset_;
frameInfo->timing.network_timestamp_ms -= ntp_offset_;
frameInfo->timing.network2_timestamp_ms -= ntp_offset_;
// TODO(ilnik): Report timing information here.
// Capture time: decodedImage.ntp_time_ms() - ntp_offset
// Encode start: frameInfo->timing.encode_start_ms
// Encode finish: frameInfo->timing.encode_finish_ms
// Packetization done: frameInfo->timing.packetization_finish_ms
// Pacer exit: frameInfo->timing.pacer_exit_ms
// Network timestamp: frameInfo->timing.network_timestamp_ms
// Network2 timestamp: frameInfo->timing.network2_timestamp_ms
// Receive start: frameInfo->timing.receive_start_ms
// Receive finish: frameInfo->timing.receive_finish_ms
// Decode start: frameInfo->decodeStartTimeMs
// Decode finish: now_ms
// Render time: frameInfo->renderTimeMs
int64_t sender_delta_ms = 0;
if (decodedImage.ntp_time_ms() < 0) {
// Sender clock is not estimated yet. Make sure that sender times are all
// negative to indicate that. Yet they still should be relatively correct.
sender_delta_ms =
std::max({capture_time_ms, frameInfo->timing.encode_start_ms,
frameInfo->timing.encode_finish_ms,
frameInfo->timing.packetization_finish_ms,
frameInfo->timing.pacer_exit_ms,
frameInfo->timing.network_timestamp_ms,
frameInfo->timing.network2_timestamp_ms}) +
1;
}
TimingFrameInfo timing_frame_info;
timing_frame_info.capture_time_ms = capture_time_ms - sender_delta_ms;
timing_frame_info.encode_start_ms =
frameInfo->timing.encode_start_ms - sender_delta_ms;
timing_frame_info.encode_finish_ms =
frameInfo->timing.encode_finish_ms - sender_delta_ms;
timing_frame_info.packetization_finish_ms =
frameInfo->timing.packetization_finish_ms - sender_delta_ms;
timing_frame_info.pacer_exit_ms =
frameInfo->timing.pacer_exit_ms - sender_delta_ms;
timing_frame_info.network_timestamp_ms =
frameInfo->timing.network_timestamp_ms - sender_delta_ms;
timing_frame_info.network2_timestamp_ms =
frameInfo->timing.network2_timestamp_ms - sender_delta_ms;
timing_frame_info.receive_start_ms = frameInfo->timing.receive_start_ms;
timing_frame_info.receive_finish_ms = frameInfo->timing.receive_finish_ms;
timing_frame_info.decode_start_ms = frameInfo->decodeStartTimeMs;
timing_frame_info.decode_finish_ms = now_ms;
timing_frame_info.render_time_ms = frameInfo->renderTimeMs;
timing_frame_info.rtp_timestamp = decodedImage.timestamp();
_timing->SetTimingFrameInfo(timing_frame_info);
}
decodedImage.set_timestamp_us(

View File

@ -109,6 +109,8 @@ class VCMReceiveStatisticsCallback {
int min_playout_delay_ms,
int render_delay_ms) = 0;
virtual void OnTimingFrameInfoUpdated(const TimingFrameInfo& info) = 0;
protected:
virtual ~VCMReceiveStatisticsCallback() {}
};

View File

@ -21,21 +21,22 @@
namespace webrtc {
VCMTiming::VCMTiming(Clock* clock, VCMTiming* master_timing)
: clock_(clock),
master_(false),
ts_extrapolator_(),
codec_timer_(new VCMCodecTimer()),
render_delay_ms_(kDefaultRenderDelayMs),
min_playout_delay_ms_(0),
max_playout_delay_ms_(10000),
jitter_delay_ms_(0),
current_delay_ms_(0),
last_decode_ms_(0),
prev_frame_timestamp_(0),
num_decoded_frames_(0),
num_delayed_decoded_frames_(0),
first_decoded_frame_ms_(-1),
sum_missed_render_deadline_ms_(0) {
: clock_(clock),
master_(false),
ts_extrapolator_(),
codec_timer_(new VCMCodecTimer()),
render_delay_ms_(kDefaultRenderDelayMs),
min_playout_delay_ms_(0),
max_playout_delay_ms_(10000),
jitter_delay_ms_(0),
current_delay_ms_(0),
last_decode_ms_(0),
prev_frame_timestamp_(0),
timing_frame_info_(),
num_decoded_frames_(0),
num_delayed_decoded_frames_(0),
first_decoded_frame_ms_(-1),
sum_missed_render_deadline_ms_(0) {
if (master_timing == NULL) {
master_ = true;
ts_extrapolator_ = new TimestampExtrapolator(clock_->TimeInMilliseconds());
@ -304,4 +305,14 @@ bool VCMTiming::GetTimings(int* decode_ms,
return (num_decoded_frames_ > 0);
}
void VCMTiming::SetTimingFrameInfo(const TimingFrameInfo& info) {
rtc::CritScope cs(&crit_sect_);
timing_frame_info_.emplace(info);
}
rtc::Optional<TimingFrameInfo> VCMTiming::GetTimingFrameInfo() {
rtc::CritScope cs(&crit_sect_);
return timing_frame_info_;
}
} // namespace webrtc

View File

@ -102,6 +102,9 @@ class VCMTiming {
int* min_playout_delay_ms,
int* render_delay_ms) const;
void SetTimingFrameInfo(const TimingFrameInfo& info);
rtc::Optional<TimingFrameInfo> GetTimingFrameInfo();
enum { kDefaultRenderDelayMs = 10 };
enum { kDelayMaxChangeMsPerS = 100 };
@ -131,6 +134,7 @@ class VCMTiming {
int current_delay_ms_ GUARDED_BY(crit_sect_);
int last_decode_ms_ GUARDED_BY(crit_sect_);
uint32_t prev_frame_timestamp_ GUARDED_BY(crit_sect_);
rtc::Optional<TimingFrameInfo> timing_frame_info_ GUARDED_BY(crit_sect_);
// Statistics.
size_t num_decoded_frames_ GUARDED_BY(crit_sect_);