Revert of Update histogram "WebRTC.Video.OnewayDelayInMs" to use the estimated one-way delay. (patchset #4 id:60001 of https://codereview.webrtc.org/1688143003/ )
Reason for revert: The delay stats are high. Original issue's description: > Update histogram "WebRTC.Video.OnewayDelayInMs" to use the estimated one-way delay. > Previous logged delay was: network delay (rtt/2) + jitter delay + decode time + render delay. > > Make capture time in local timebase available for decoded VP9 video frames (propagate ntp_time_ms from EncodedImage to decoded VideoFrame). > > BUG= > > Committed: https://crrev.com/5249599a9b69ad9c2d513210d694719f1011f977 > Cr-Commit-Position: refs/heads/master@{#11901} TBR=stefan@webrtc.org,pbos@webrtc.org # Not skipping CQ checks because original CL landed more than 1 days ago. BUG=chromium:603838 Review URL: https://codereview.webrtc.org/1893543003 Cr-Commit-Position: refs/heads/master@{#12400}
This commit is contained in:
@ -914,16 +914,14 @@ int VP9DecoderImpl::Decode(const EncodedImage& input_image,
|
||||
// It may be released by libvpx during future vpx_codec_decode or
|
||||
// vpx_codec_destroy calls.
|
||||
img = vpx_codec_get_frame(decoder_, &iter);
|
||||
int ret = ReturnFrame(img, input_image._timeStamp, input_image.ntp_time_ms_);
|
||||
int ret = ReturnFrame(img, input_image._timeStamp);
|
||||
if (ret != 0) {
|
||||
return ret;
|
||||
}
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
|
||||
int VP9DecoderImpl::ReturnFrame(const vpx_image_t* img,
|
||||
uint32_t timestamp,
|
||||
int64_t ntp_time_ms) {
|
||||
int VP9DecoderImpl::ReturnFrame(const vpx_image_t* img, uint32_t timestamp) {
|
||||
if (img == NULL) {
|
||||
// Decoder OK and NULL image => No show frame.
|
||||
return WEBRTC_VIDEO_CODEC_NO_OUTPUT;
|
||||
@ -950,7 +948,6 @@ int VP9DecoderImpl::ReturnFrame(const vpx_image_t* img,
|
||||
VideoFrame decoded_image;
|
||||
decoded_image.set_video_frame_buffer(img_wrapped_buffer);
|
||||
decoded_image.set_timestamp(timestamp);
|
||||
decoded_image.set_ntp_time_ms(ntp_time_ms);
|
||||
int ret = decode_complete_callback_->Decoded(decoded_image);
|
||||
if (ret != 0)
|
||||
return ret;
|
||||
|
||||
@ -153,9 +153,7 @@ class VP9DecoderImpl : public VP9Decoder {
|
||||
const char* ImplementationName() const override;
|
||||
|
||||
private:
|
||||
int ReturnFrame(const vpx_image_t* img,
|
||||
uint32_t timestamp,
|
||||
int64_t ntp_time_ms);
|
||||
int ReturnFrame(const vpx_image_t* img, uint32_t timeStamp);
|
||||
|
||||
// Memory pool used to share buffers between libvpx and webrtc.
|
||||
Vp9FrameBufferPool frame_buffer_pool_;
|
||||
|
||||
@ -2160,8 +2160,6 @@ void EndToEndTest::VerifyHistogramStats(bool use_rtx,
|
||||
(*receive_configs)[0].rtp.rtx[kFakeVideoSendPayloadType].payload_type =
|
||||
kSendRtxPayloadType;
|
||||
}
|
||||
// RTT needed for RemoteNtpTimeEstimator for the receive stream.
|
||||
(*receive_configs)[0].rtp.rtcp_xr.receiver_reference_time_report = true;
|
||||
encoder_config->content_type =
|
||||
screenshare_ ? VideoEncoderConfig::ContentType::kScreen
|
||||
: VideoEncoderConfig::ContentType::kRealtimeVideo;
|
||||
|
||||
@ -170,6 +170,9 @@ void ReceiveStatisticsProxy::OnDecoderTiming(int decode_ms,
|
||||
stats_.min_playout_delay_ms = min_playout_delay_ms;
|
||||
stats_.render_delay_ms = render_delay_ms;
|
||||
decode_time_counter_.Add(decode_ms);
|
||||
// Network delay (rtt/2) + target_delay_ms (jitter delay + decode time +
|
||||
// render delay).
|
||||
delay_counter_.Add(target_delay_ms + rtt_ms / 2);
|
||||
}
|
||||
|
||||
void ReceiveStatisticsProxy::RtcpPacketTypesCounterUpdated(
|
||||
@ -226,9 +229,7 @@ void ReceiveStatisticsProxy::OnDecodedFrame() {
|
||||
stats_.decode_frame_rate = decode_fps_estimator_.Rate(now);
|
||||
}
|
||||
|
||||
void ReceiveStatisticsProxy::OnRenderedFrame(const VideoFrame& frame) {
|
||||
int width = frame.width();
|
||||
int height = frame.height();
|
||||
void ReceiveStatisticsProxy::OnRenderedFrame(int width, int height) {
|
||||
RTC_DCHECK_GT(width, 0);
|
||||
RTC_DCHECK_GT(height, 0);
|
||||
uint64_t now = clock_->TimeInMilliseconds();
|
||||
@ -240,12 +241,6 @@ void ReceiveStatisticsProxy::OnRenderedFrame(const VideoFrame& frame) {
|
||||
render_height_counter_.Add(height);
|
||||
render_fps_tracker_.AddSamples(1);
|
||||
render_pixel_tracker_.AddSamples(sqrt(width * height));
|
||||
|
||||
if (frame.ntp_time_ms() > 0) {
|
||||
int64_t delay_ms = clock_->CurrentNtpInMilliseconds() - frame.ntp_time_ms();
|
||||
if (delay_ms >= 0)
|
||||
delay_counter_.Add(delay_ms);
|
||||
}
|
||||
}
|
||||
|
||||
void ReceiveStatisticsProxy::OnSyncOffsetUpdated(int64_t sync_offset_ms) {
|
||||
|
||||
@ -44,8 +44,8 @@ class ReceiveStatisticsProxy : public VCMReceiveStatisticsCallback,
|
||||
VideoReceiveStream::Stats GetStats() const;
|
||||
|
||||
void OnDecodedFrame();
|
||||
void OnRenderedFrame(const VideoFrame& frame);
|
||||
void OnSyncOffsetUpdated(int64_t sync_offset_ms);
|
||||
void OnRenderedFrame(int width, int height);
|
||||
void OnIncomingPayloadType(int payload_type);
|
||||
void OnDecoderImplementationName(const char* implementation_name);
|
||||
void OnIncomingRate(unsigned int framerate, unsigned int bitrate_bps);
|
||||
|
||||
@ -385,7 +385,7 @@ int VideoReceiveStream::RenderFrame(const uint32_t /*stream_id*/,
|
||||
if (config_.renderer)
|
||||
config_.renderer->OnFrame(video_frame);
|
||||
|
||||
stats_proxy_.OnRenderedFrame(video_frame);
|
||||
stats_proxy_.OnRenderedFrame(video_frame.width(), video_frame.height());
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user