Calculate max/avg encode/decode latency in codec tests
Bug: none Change-Id: Ie42461dd06b1764c99308393477921ea25319ab4 Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/251687 Reviewed-by: Erik Språng <sprang@webrtc.org> Commit-Queue: Sergey Silkin <ssilkin@webrtc.org> Cr-Commit-Position: refs/heads/main@{#36007}
This commit is contained in:
committed by
WebRTC LUCI CQ
parent
b92d3e6ef9
commit
0b02d637c0
@ -101,6 +101,11 @@ class VideoCodecTestStats {
|
||||
float enc_speed_fps = 0.0f;
|
||||
float dec_speed_fps = 0.0f;
|
||||
|
||||
float avg_encode_latency_sec = 0.0f;
|
||||
float max_encode_latency_sec = 0.0f;
|
||||
float avg_decode_latency_sec = 0.0f;
|
||||
float max_decode_latency_sec = 0.0f;
|
||||
|
||||
float avg_delay_sec = 0.0f;
|
||||
float max_key_frame_delay_sec = 0.0f;
|
||||
float max_delta_frame_delay_sec = 0.0f;
|
||||
|
||||
@ -332,6 +332,16 @@ VideoStatistics VideoCodecTestStatsImpl::SliceAndCalcVideoStatistic(
|
||||
? 1000000.0f / mean_decode_time_us
|
||||
: std::numeric_limits<float>::max();
|
||||
|
||||
video_stat.avg_encode_latency_sec =
|
||||
frame_encoding_time_us.GetMean().value_or(0) / 1000000.0f;
|
||||
video_stat.max_encode_latency_sec =
|
||||
frame_encoding_time_us.GetMax().value_or(0) / 1000000.0f;
|
||||
|
||||
video_stat.avg_decode_latency_sec =
|
||||
frame_decoding_time_us.GetMean().value_or(0) / 1000000.0f;
|
||||
video_stat.max_decode_latency_sec =
|
||||
frame_decoding_time_us.GetMax().value_or(0) / 1000000.0f;
|
||||
|
||||
auto MaxDelaySec = [target_bitrate_kbps](
|
||||
const webrtc_impl::RunningStatistics<size_t>& stats) {
|
||||
return 8 * stats.GetMax().value_or(0) / 1000 / target_bitrate_kbps;
|
||||
|
||||
Reference in New Issue
Block a user