Add histogram for end-to-end delay:

"WebRTC.Video.EndToEndDelayInMs"

Make capture time in local timebase available for decoded VP9 video frames (propagate ntp_time_ms from EncodedImage to decoded VideoFrame).

BUG=webrtc:6409

Review-Url: https://codereview.webrtc.org/1905563002
Cr-Commit-Position: refs/heads/master@{#14367}
This commit is contained in:
asapersson
2016-09-23 02:09:46 -07:00
committed by Commit bot
parent 6d4c8c307e
commit 1490f7aa55
6 changed files with 27 additions and 7 deletions

View File

@ -931,14 +931,16 @@ int VP9DecoderImpl::Decode(const EncodedImage& input_image,
// It may be released by libvpx during future vpx_codec_decode or
// vpx_codec_destroy calls.
img = vpx_codec_get_frame(decoder_, &iter);
int ret = ReturnFrame(img, input_image._timeStamp);
int ret = ReturnFrame(img, input_image._timeStamp, input_image.ntp_time_ms_);
if (ret != 0) {
return ret;
}
return WEBRTC_VIDEO_CODEC_OK;
}
int VP9DecoderImpl::ReturnFrame(const vpx_image_t* img, uint32_t timestamp) {
int VP9DecoderImpl::ReturnFrame(const vpx_image_t* img,
uint32_t timestamp,
int64_t ntp_time_ms) {
if (img == NULL) {
// Decoder OK and NULL image => No show frame.
return WEBRTC_VIDEO_CODEC_NO_OUTPUT;
@ -964,6 +966,7 @@ int VP9DecoderImpl::ReturnFrame(const vpx_image_t* img, uint32_t timestamp) {
VideoFrame decoded_image(img_wrapped_buffer, timestamp,
0 /* render_time_ms */, webrtc::kVideoRotation_0);
decoded_image.set_ntp_time_ms(ntp_time_ms);
int ret = decode_complete_callback_->Decoded(decoded_image);
if (ret != 0)

View File

@ -153,7 +153,9 @@ class VP9DecoderImpl : public VP9Decoder {
const char* ImplementationName() const override;
private:
int ReturnFrame(const vpx_image_t* img, uint32_t timeStamp);
int ReturnFrame(const vpx_image_t* img,
uint32_t timestamp,
int64_t ntp_time_ms);
// Memory pool used to share buffers between libvpx and webrtc.
Vp9FrameBufferPool frame_buffer_pool_;

View File

@ -2189,6 +2189,8 @@ void EndToEndTest::VerifyHistogramStats(bool use_rtx,
(*receive_configs)[0].rtp.rtx[kFakeVideoSendPayloadType].payload_type =
kSendRtxPayloadType;
}
// RTT needed for RemoteNtpTimeEstimator for the receive stream.
(*receive_configs)[0].rtp.rtcp_xr.receiver_reference_time_report = true;
encoder_config->content_type =
screenshare_ ? VideoEncoderConfig::ContentType::kScreen
: VideoEncoderConfig::ContentType::kRealtimeVideo;
@ -2287,6 +2289,7 @@ void EndToEndTest::VerifyHistogramStats(bool use_rtx,
EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.CurrentDelayInMs"));
EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.OnewayDelayInMs"));
EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.EndToEndDelayInMs"));
EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.RenderSqrtPixelsPerSecond"));
EXPECT_EQ(1, metrics::NumSamples(video_prefix + "EncodeTimeInMs"));

View File

@ -94,11 +94,14 @@ void ReceiveStatisticsProxy::UpdateHistograms() {
RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.CurrentDelayInMs",
current_delay_ms);
}
int delay_ms = delay_counter_.Avg(kMinRequiredDecodeSamples);
if (delay_ms != -1)
RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.OnewayDelayInMs", delay_ms);
int e2e_delay_ms = e2e_delay_counter_.Avg(kMinRequiredSamples);
if (e2e_delay_ms != -1)
RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.EndToEndDelayInMs", e2e_delay_ms);
StreamDataCounters rtp = stats_.rtp_stats;
StreamDataCounters rtx;
for (auto it : rtx_stats_)
@ -248,7 +251,9 @@ void ReceiveStatisticsProxy::OnDecodedFrame() {
stats_.decode_frame_rate = decode_fps_estimator_.Rate(now).value_or(0);
}
void ReceiveStatisticsProxy::OnRenderedFrame(int width, int height) {
void ReceiveStatisticsProxy::OnRenderedFrame(const VideoFrame& frame) {
int width = frame.width();
int height = frame.height();
RTC_DCHECK_GT(width, 0);
RTC_DCHECK_GT(height, 0);
uint64_t now = clock_->TimeInMilliseconds();
@ -262,6 +267,12 @@ void ReceiveStatisticsProxy::OnRenderedFrame(int width, int height) {
render_height_counter_.Add(height);
render_fps_tracker_.AddSamples(1);
render_pixel_tracker_.AddSamples(sqrt(width * height));
if (frame.ntp_time_ms() > 0) {
int64_t delay_ms = clock_->CurrentNtpInMilliseconds() - frame.ntp_time_ms();
if (delay_ms >= 0)
e2e_delay_counter_.Add(delay_ms);
}
}
void ReceiveStatisticsProxy::OnSyncOffsetUpdated(int64_t sync_offset_ms) {

View File

@ -45,7 +45,7 @@ class ReceiveStatisticsProxy : public VCMReceiveStatisticsCallback,
void OnDecodedFrame();
void OnSyncOffsetUpdated(int64_t sync_offset_ms);
void OnRenderedFrame(int width, int height);
void OnRenderedFrame(const VideoFrame& frame);
void OnIncomingPayloadType(int payload_type);
void OnDecoderImplementationName(const char* implementation_name);
void OnIncomingRate(unsigned int framerate, unsigned int bitrate_bps);
@ -120,6 +120,7 @@ class ReceiveStatisticsProxy : public VCMReceiveStatisticsCallback,
SampleCounter target_delay_counter_ GUARDED_BY(crit_);
SampleCounter current_delay_counter_ GUARDED_BY(crit_);
SampleCounter delay_counter_ GUARDED_BY(crit_);
SampleCounter e2e_delay_counter_ GUARDED_BY(crit_);
ReportBlockStats report_block_stats_ GUARDED_BY(crit_);
QpCounters qp_counters_; // Only accessed on the decoding thread.
std::map<uint32_t, StreamDataCounters> rtx_stats_ GUARDED_BY(crit_);

View File

@ -350,7 +350,7 @@ void VideoReceiveStream::OnFrame(const VideoFrame& video_frame) {
config_.renderer->OnFrame(video_frame);
// TODO(tommi): OnRenderFrame grabs a lock too.
stats_proxy_.OnRenderedFrame(video_frame.width(), video_frame.height());
stats_proxy_.OnRenderedFrame(video_frame);
}
// TODO(asapersson): Consider moving callback from video_encoder.h or