New statistic added to VideoReceiveStream to determine latency to first decode.

This change introduces a new measurement into the VideoReceiveStream::Stats
structure to measure the latency between the first frame being received and
the first frame being decoded in WebRTC. The goal here is to measure the latency
difference when a FrameEncryptor is attached and not attached.

Change-Id: I0f0178aff73b66f25dbc6617098033e226da2958
Bug: webrtc:10105
Reviewed-on: https://webrtc-review.googlesource.com/c/113328
Commit-Queue: Benjamin Wright <benwright@webrtc.org>
Reviewed-by: Steve Anton <steveanton@webrtc.org>
Reviewed-by: Niels Moller <nisse@webrtc.org>
Reviewed-by: Ilya Nikolaevskiy <ilnik@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#25956}
This commit is contained in:
Benjamin Wright
2018-12-10 09:55:17 -08:00
committed by Commit Bot
parent 60aaa03ee1
commit 514f084c26
9 changed files with 30 additions and 3 deletions

View File

@ -516,6 +516,8 @@ const char* StatsReport::Value::display_name() const {
return "googFirsReceived";
case kStatsValueNameFirsSent:
return "googFirsSent";
case kStatsValueNameFirstFrameReceivedToDecodedMs:
return "googFirstFrameReceivedToDecodedMs";
case kStatsValueNameFrameHeightInput:
return "googFrameHeightInput";
case kStatsValueNameFrameHeightReceived:

View File

@ -176,6 +176,7 @@ class StatsReport {
kStatsValueNameFingerprintAlgorithm,
kStatsValueNameFirsReceived,
kStatsValueNameFirsSent,
kStatsValueNameFirstFrameReceivedToDecodedMs,
kStatsValueNameFrameHeightInput,
kStatsValueNameFrameHeightReceived,
kStatsValueNameFrameHeightSent,

View File

@ -35,7 +35,7 @@ VideoReceiveStream::Stats::Stats() = default;
VideoReceiveStream::Stats::~Stats() = default;
std::string VideoReceiveStream::Stats::ToString(int64_t time_ms) const {
char buf[1024];
char buf[2048];
rtc::SimpleStringBuilder ss(buf);
ss << "VideoReceiveStream stats: " << time_ms << ", {ssrc: " << ssrc << ", ";
ss << "total_bps: " << total_bitrate_bps << ", ";
@ -48,6 +48,8 @@ std::string VideoReceiveStream::Stats::ToString(int64_t time_ms) const {
ss << "render_fps: " << render_frame_rate << ", ";
ss << "decode_ms: " << decode_ms << ", ";
ss << "max_decode_ms: " << max_decode_ms << ", ";
ss << "first_frame_received_to_decoded_ms: "
<< first_frame_received_to_decoded_ms << ", ";
ss << "cur_delay_ms: " << current_delay_ms << ", ";
ss << "targ_delay_ms: " << target_delay_ms << ", ";
ss << "jb_delay_ms: " << jitter_buffer_ms << ", ";

View File

@ -78,6 +78,7 @@ class VideoReceiveStream {
int render_delay_ms = 10;
int64_t interframe_delay_max_ms = -1;
uint32_t frames_decoded = 0;
int64_t first_frame_received_to_decoded_ms = -1;
absl::optional<uint64_t> qp_sum;
int current_payload_type = -1;

View File

@ -558,6 +558,9 @@ struct VideoReceiverInfo : public MediaReceiverInfo {
// Estimated capture start time in NTP time in ms.
int64_t capture_start_ntp_time_ms = -1;
// First frame received to first frame decoded latency.
int64_t first_frame_received_to_decoded_ms = -1;
// Timing frame info: all important timestamps for a full lifetime of a
// single 'timing frame'.
absl::optional<webrtc::TimingFrameInfo> timing_frame_info;

View File

@ -2532,7 +2532,8 @@ WebRtcVideoChannel::WebRtcVideoReceiveStream::GetVideoReceiverInfo(
info.frames_decoded = stats.frames_decoded;
info.frames_rendered = stats.frames_rendered;
info.qp_sum = stats.qp_sum;
info.first_frame_received_to_decoded_ms =
stats.first_frame_received_to_decoded_ms;
info.interframe_delay_max_ms = stats.interframe_delay_max_ms;
info.content_type = stats.content_type;

View File

@ -252,6 +252,10 @@ void ExtractStats(const cricket::VideoReceiverInfo& info, StatsReport* report) {
report->AddInt64(StatsReport::kStatsValueNameCaptureStartNtpTimeMs,
info.capture_start_ntp_time_ms);
}
if (info.first_frame_received_to_decoded_ms >= 0) {
report->AddInt64(StatsReport::kStatsValueNameFirstFrameReceivedToDecodedMs,
info.first_frame_received_to_decoded_ms);
}
if (info.qp_sum)
report->AddInt64(StatsReport::kStatsValueNameQpSum, *info.qp_sum);

View File

@ -642,6 +642,17 @@ void ReceiveStatisticsProxy::OnTimingFrameInfoUpdated(
int64_t now_ms = clock_->TimeInMilliseconds();
timing_frame_info_counter_.Add(info, now_ms);
}
// Measure initial decoding latency between the first frame arriving and the
// first frame being decoded.
if (!first_frame_received_time_ms_.has_value()) {
first_frame_received_time_ms_ = info.receive_finish_ms;
}
if (stats_.first_frame_received_to_decoded_ms == -1 &&
first_decoded_frame_time_ms_) {
stats_.first_frame_received_to_decoded_ms =
*first_decoded_frame_time_ms_ - *first_frame_received_time_ms_;
}
}
void ReceiveStatisticsProxy::RtcpPacketTypesCounterUpdated(
@ -749,8 +760,9 @@ void ReceiveStatisticsProxy::OnDecodedFrame(absl::optional<uint8_t> qp,
interframe_delay_ms);
content_specific_stats->flow_duration_ms += interframe_delay_ms;
}
if (stats_.frames_decoded == 1)
if (stats_.frames_decoded == 1) {
first_decoded_frame_time_ms_.emplace(now);
}
last_decoded_frame_time_ms_.emplace(now);
}

View File

@ -181,6 +181,7 @@ class ReceiveStatisticsProxy : public VCMReceiveStatisticsCallback,
mutable std::map<int64_t, size_t> frame_window_ RTC_GUARDED_BY(&crit_);
VideoContentType last_content_type_ RTC_GUARDED_BY(&crit_);
VideoCodecType last_codec_type_ RTC_GUARDED_BY(&crit_);
absl::optional<int64_t> first_frame_received_time_ms_ RTC_GUARDED_BY(&crit_);
absl::optional<int64_t> first_decoded_frame_time_ms_ RTC_GUARDED_BY(&crit_);
absl::optional<int64_t> last_decoded_frame_time_ms_ RTC_GUARDED_BY(&crit_);
size_t num_delayed_frames_rendered_ RTC_GUARDED_BY(&crit_);