Add frame receive to frame rendered metric to video_quality_analyzer
Bug: webrtc:10975 Change-Id: I6b36566efbbb52d27ca6cb44cb3b40aaf0cacb7b Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/153660 Reviewed-by: Karl Wiberg <kwiberg@webrtc.org> Reviewed-by: Artem Titov <titovartem@webrtc.org> Commit-Queue: Johannes Kron <kron@webrtc.org> Cr-Commit-Position: refs/heads/master@{#29243}
This commit is contained in:

committed by
Commit Bot

parent
f0be5b5380
commit
c12db81e79
@ -78,8 +78,8 @@ class VideoQualityAnalyzerInterface : public StatsObserverInterface {
|
||||
// Will be called for each frame dropped by encoder.
|
||||
virtual void OnFrameDropped(EncodedImageCallback::DropReason reason) {}
|
||||
// Will be called before calling the decoder.
|
||||
virtual void OnFrameReceived(uint16_t frame_id,
|
||||
const EncodedImage& encoded_image) {}
|
||||
virtual void OnFramePreDecode(uint16_t frame_id,
|
||||
const EncodedImage& encoded_image) {}
|
||||
// Will be called after decoding the frame. |decode_time_ms| is a decode
|
||||
// time provided by decoder itself. If decoder doesn’t produce such
|
||||
// information can be omitted.
|
||||
|
@ -187,7 +187,7 @@ void DefaultVideoQualityAnalyzer::OnFrameDropped(
|
||||
// Here we do nothing, because we will see this drop on renderer side.
|
||||
}
|
||||
|
||||
void DefaultVideoQualityAnalyzer::OnFrameReceived(
|
||||
void DefaultVideoQualityAnalyzer::OnFramePreDecode(
|
||||
uint16_t frame_id,
|
||||
const webrtc::EncodedImage& input_image) {
|
||||
rtc::CritScope crit(&lock_);
|
||||
@ -198,7 +198,17 @@ void DefaultVideoQualityAnalyzer::OnFrameReceived(
|
||||
<< it->second.stream_label;
|
||||
frame_counters_.received++;
|
||||
stream_frame_counters_[it->second.stream_label].received++;
|
||||
it->second.received_time = Now();
|
||||
it->second.decode_start_time = Now();
|
||||
// Determine the time of the last received packet of this video frame.
|
||||
RTC_DCHECK(!input_image.PacketInfos().empty());
|
||||
int64_t last_receive_time =
|
||||
std::max_element(input_image.PacketInfos().cbegin(),
|
||||
input_image.PacketInfos().cend(),
|
||||
[](const RtpPacketInfo& a, const RtpPacketInfo& b) {
|
||||
return a.receive_time_ms() < b.receive_time_ms();
|
||||
})
|
||||
->receive_time_ms();
|
||||
it->second.received_time = Timestamp::ms(last_receive_time);
|
||||
}
|
||||
|
||||
void DefaultVideoQualityAnalyzer::OnFrameDecoded(
|
||||
@ -210,7 +220,7 @@ void DefaultVideoQualityAnalyzer::OnFrameDecoded(
|
||||
RTC_DCHECK(it != frame_stats_.end());
|
||||
frame_counters_.decoded++;
|
||||
stream_frame_counters_[it->second.stream_label].decoded++;
|
||||
it->second.decoded_time = Now();
|
||||
it->second.decode_end_time = Now();
|
||||
}
|
||||
|
||||
void DefaultVideoQualityAnalyzer::OnFrameRendered(
|
||||
@ -542,7 +552,9 @@ void DefaultVideoQualityAnalyzer::ProcessComparison(
|
||||
stats->total_delay_incl_transport_ms.AddSample(
|
||||
(frame_stats.rendered_time - frame_stats.captured_time).ms());
|
||||
stats->decode_time_ms.AddSample(
|
||||
(frame_stats.decoded_time - frame_stats.received_time).ms());
|
||||
(frame_stats.decode_end_time - frame_stats.decode_start_time).ms());
|
||||
stats->receive_to_render_time_ms.AddSample(
|
||||
(frame_stats.rendered_time - frame_stats.received_time).ms());
|
||||
|
||||
if (frame_stats.prev_frame_rendered_time.IsFinite()) {
|
||||
TimeDelta time_between_rendered_frames =
|
||||
@ -643,6 +655,8 @@ void DefaultVideoQualityAnalyzer::ReportResults(
|
||||
stats.psnr.IsEmpty() ? 0 : stats.psnr.GetMin(), "dB",
|
||||
/*important=*/false);
|
||||
ReportResult("decode_time", test_case_name, stats.decode_time_ms, "ms");
|
||||
ReportResult("receive_to_render_time", test_case_name,
|
||||
stats.receive_to_render_time_ms, "ms");
|
||||
test::PrintResult("dropped_frames", "", test_case_name,
|
||||
frame_counters.dropped, "count",
|
||||
/*important=*/false);
|
||||
|
@ -81,6 +81,8 @@ struct StreamStats {
|
||||
RateCounter encode_frame_rate;
|
||||
SamplesStatsCounter encode_time_ms;
|
||||
SamplesStatsCounter decode_time_ms;
|
||||
// Time from last packet of frame is received until it's sent to the renderer.
|
||||
SamplesStatsCounter receive_to_render_time_ms;
|
||||
// Max frames skipped between two nearest.
|
||||
SamplesStatsCounter skipped_between_rendered;
|
||||
// In the next 2 metrics freeze is a pause that is longer, than maximum:
|
||||
@ -133,8 +135,8 @@ class DefaultVideoQualityAnalyzer : public VideoQualityAnalyzerInterface {
|
||||
void OnFrameEncoded(uint16_t frame_id,
|
||||
const EncodedImage& encoded_image) override;
|
||||
void OnFrameDropped(EncodedImageCallback::DropReason reason) override;
|
||||
void OnFrameReceived(uint16_t frame_id,
|
||||
const EncodedImage& input_image) override;
|
||||
void OnFramePreDecode(uint16_t frame_id,
|
||||
const EncodedImage& input_image) override;
|
||||
void OnFrameDecoded(const VideoFrame& frame,
|
||||
absl::optional<int32_t> decode_time_ms,
|
||||
absl::optional<uint8_t> qp) override;
|
||||
@ -172,8 +174,10 @@ class DefaultVideoQualityAnalyzer : public VideoQualityAnalyzerInterface {
|
||||
Timestamp captured_time;
|
||||
Timestamp pre_encode_time = Timestamp::MinusInfinity();
|
||||
Timestamp encoded_time = Timestamp::MinusInfinity();
|
||||
// Time when last packet of a frame was received.
|
||||
Timestamp received_time = Timestamp::MinusInfinity();
|
||||
Timestamp decoded_time = Timestamp::MinusInfinity();
|
||||
Timestamp decode_start_time = Timestamp::MinusInfinity();
|
||||
Timestamp decode_end_time = Timestamp::MinusInfinity();
|
||||
Timestamp rendered_time = Timestamp::MinusInfinity();
|
||||
Timestamp prev_frame_rendered_time = Timestamp::MinusInfinity();
|
||||
|
||||
|
@ -64,7 +64,7 @@ void ExampleVideoQualityAnalyzer::OnFrameDropped(
|
||||
++frames_dropped_;
|
||||
}
|
||||
|
||||
void ExampleVideoQualityAnalyzer::OnFrameReceived(
|
||||
void ExampleVideoQualityAnalyzer::OnFramePreDecode(
|
||||
uint16_t frame_id,
|
||||
const webrtc::EncodedImage& encoded_image) {
|
||||
rtc::CritScope crit(&lock_);
|
||||
|
@ -40,8 +40,8 @@ class ExampleVideoQualityAnalyzer : public VideoQualityAnalyzerInterface {
|
||||
void OnFrameEncoded(uint16_t frame_id,
|
||||
const EncodedImage& encoded_image) override;
|
||||
void OnFrameDropped(EncodedImageCallback::DropReason reason) override;
|
||||
void OnFrameReceived(uint16_t frame_id,
|
||||
const EncodedImage& encoded_image) override;
|
||||
void OnFramePreDecode(uint16_t frame_id,
|
||||
const EncodedImage& encoded_image) override;
|
||||
void OnFrameDecoded(const VideoFrame& frame,
|
||||
absl::optional<int32_t> decode_time_ms,
|
||||
absl::optional<uint8_t> qp) override;
|
||||
|
@ -87,7 +87,7 @@ int32_t QualityAnalyzingVideoDecoder::Decode(const EncodedImage& input_image,
|
||||
// We can safely dereference |origin_image|, because it can be removed from
|
||||
// the map only after |delegate_| Decode method will be invoked. Image will be
|
||||
// removed inside DecodedImageCallback, which can be done on separate thread.
|
||||
analyzer_->OnFrameReceived(out.id, *origin_image);
|
||||
analyzer_->OnFramePreDecode(out.id, *origin_image);
|
||||
int32_t result =
|
||||
delegate_->Decode(*origin_image, missing_frames, render_time_ms);
|
||||
if (result != WEBRTC_VIDEO_CODEC_OK) {
|
||||
|
Reference in New Issue
Block a user