diff --git a/api/test/video_quality_analyzer_interface.h b/api/test/video_quality_analyzer_interface.h index 49f4dd1ff3..ab896f7dcf 100644 --- a/api/test/video_quality_analyzer_interface.h +++ b/api/test/video_quality_analyzer_interface.h @@ -131,9 +131,14 @@ class VideoQualityAnalyzerInterface // All available codes are listed in // modules/video_coding/include/video_error_codes.h // `peer_name` is name of the peer on which side error acquired. + // TODO(titovartem): remove this method when downstreams are migrated. virtual void OnDecoderError(absl::string_view peer_name, uint16_t frame_id, int32_t error_code) {} + virtual void OnDecoderError(absl::string_view peer_name, + uint16_t frame_id, + int32_t error_code, + const DecoderStats& stats) {} // Will be called every time new stats reports are available for the // Peer Connection identified by `pc_label`. void OnStatsReports( diff --git a/test/pc/e2e/analyzer/video/default_video_quality_analyzer.cc b/test/pc/e2e/analyzer/video/default_video_quality_analyzer.cc index 81231d71c7..b758c92d19 100644 --- a/test/pc/e2e/analyzer/video/default_video_quality_analyzer.cc +++ b/test/pc/e2e/analyzer/video/default_video_quality_analyzer.cc @@ -43,13 +43,22 @@ constexpr absl::string_view kSkipRenderedFrameReasonDropped = "considered dropped"; void LogFrameCounters(const std::string& name, const FrameCounters& counters) { - RTC_LOG(LS_INFO) << "[" << name << "] Captured : " << counters.captured; - RTC_LOG(LS_INFO) << "[" << name << "] Pre encoded : " << counters.pre_encoded; - RTC_LOG(LS_INFO) << "[" << name << "] Encoded : " << counters.encoded; - RTC_LOG(LS_INFO) << "[" << name << "] Received : " << counters.received; - RTC_LOG(LS_INFO) << "[" << name << "] Decoded : " << counters.decoded; - RTC_LOG(LS_INFO) << "[" << name << "] Rendered : " << counters.rendered; - RTC_LOG(LS_INFO) << "[" << name << "] Dropped : " << counters.dropped; + RTC_LOG(LS_INFO) << "[" << name + << "] Captured : " << counters.captured; + RTC_LOG(LS_INFO) << "[" << name + << "] Pre encoded : " << counters.pre_encoded; + RTC_LOG(LS_INFO) << "[" << name + << "] Encoded : " << counters.encoded; + RTC_LOG(LS_INFO) << "[" << name + << "] Received : " << counters.received; + RTC_LOG(LS_INFO) << "[" << name + << "] Decoded : " << counters.decoded; + RTC_LOG(LS_INFO) << "[" << name + << "] Rendered : " << counters.rendered; + RTC_LOG(LS_INFO) << "[" << name + << "] Dropped : " << counters.dropped; + RTC_LOG(LS_INFO) << "[" << name + << "] Failed to decode : " << counters.failed_to_decode; } void LogStreamInternalStats(const std::string& name, @@ -392,7 +401,7 @@ void DefaultVideoQualityAnalyzer::OnFrameDecoded( it->second.HasDecodeEndTime(peer_index)) { // It means this frame was decoded before, so we can skip it. It may happen // when we have multiple simulcast streams in one track and received - // the same picture from two different streams because SFU can't reliably + // the same frame from two different streams because SFU can't reliably // correlate two simulcast streams and started relaying the second stream // from the same frame it has relayed right before for the first stream. return; @@ -529,9 +538,45 @@ void DefaultVideoQualityAnalyzer::OnEncoderError( void DefaultVideoQualityAnalyzer::OnDecoderError(absl::string_view peer_name, uint16_t frame_id, - int32_t error_code) { + int32_t error_code, + const DecoderStats& stats) { RTC_LOG(LS_ERROR) << "Decoder error for frame_id=" << frame_id << ", code=" << error_code; + + MutexLock lock(&mutex_); + RTC_CHECK_EQ(state_, State::kActive) + << "DefaultVideoQualityAnalyzer has to be started before use"; + + size_t peer_index = peers_->index(peer_name); + + if (frame_id == VideoFrame::kNotSetId) { + frame_counters_.failed_to_decode++; + unknown_sender_frame_counters_[std::string(peer_name)].failed_to_decode++; + return; + } + + auto it = captured_frames_in_flight_.find(frame_id); + if (it == captured_frames_in_flight_.end() || + it->second.HasDecodeEndTime(peer_index)) { + // It means this frame was decoded before, so we can skip it. It may happen + // when we have multiple simulcast streams in one track and received + // the same frame from two different streams because SFU can't reliably + // correlate two simulcast streams and started relaying the second stream + // from the same frame it has relayed right before for the first stream. + return; + } + frame_counters_.failed_to_decode++; + InternalStatsKey key(it->second.stream(), + stream_to_sender_.at(it->second.stream()), peer_index); + stream_frame_counters_.at(key).failed_to_decode++; + Timestamp now = Now(); + StreamCodecInfo used_decoder; + used_decoder.codec_name = stats.decoder_name; + used_decoder.first_frame_id = frame_id; + used_decoder.last_frame_id = frame_id; + used_decoder.switched_on_at = now; + used_decoder.switched_from_at = now; + it->second.OnDecoderError(peer_index, used_decoder); } void DefaultVideoQualityAnalyzer::RegisterParticipantInCall( diff --git a/test/pc/e2e/analyzer/video/default_video_quality_analyzer.h b/test/pc/e2e/analyzer/video/default_video_quality_analyzer.h index 44bcd28ef9..7b56ce985a 100644 --- a/test/pc/e2e/analyzer/video/default_video_quality_analyzer.h +++ b/test/pc/e2e/analyzer/video/default_video_quality_analyzer.h @@ -78,7 +78,8 @@ class DefaultVideoQualityAnalyzer : public VideoQualityAnalyzerInterface { int32_t error_code) override; void OnDecoderError(absl::string_view peer_name, uint16_t frame_id, - int32_t error_code) override; + int32_t error_code, + const DecoderStats& stats) override; void RegisterParticipantInCall(absl::string_view peer_name) override; void UnregisterParticipantInCall(absl::string_view peer_name) override; diff --git a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frame_in_flight.cc b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frame_in_flight.cc index 13e77b4586..b6031fdec9 100644 --- a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frame_in_flight.cc +++ b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frame_in_flight.cc @@ -127,11 +127,17 @@ bool FrameInFlight::HasReceivedTime(size_t peer) const { void FrameInFlight::OnFrameDecoded(size_t peer, webrtc::Timestamp time, - StreamCodecInfo used_decoder) { + const StreamCodecInfo& used_decoder) { receiver_stats_[peer].decode_end_time = time; receiver_stats_[peer].used_decoder = used_decoder; } +void FrameInFlight::OnDecoderError(size_t peer, + const StreamCodecInfo& used_decoder) { + receiver_stats_[peer].decoder_failed = true; + receiver_stats_[peer].used_decoder = used_decoder; +} + bool FrameInFlight::HasDecodeEndTime(size_t peer) const { auto it = receiver_stats_.find(peer); if (it == receiver_stats_.end()) { @@ -187,6 +193,7 @@ FrameStats FrameInFlight::GetStatsForPeer(size_t peer) const { stats.used_decoder = receiver_stats->used_decoder; stats.pre_decoded_frame_type = receiver_stats->frame_type; stats.pre_decoded_image_size = receiver_stats->encoded_image_size; + stats.decoder_failed = receiver_stats->decoder_failed; } return stats; } diff --git a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frame_in_flight.h b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frame_in_flight.h index a1ce8faf5b..9ce2e010a8 100644 --- a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frame_in_flight.h +++ b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frame_in_flight.h @@ -44,6 +44,7 @@ struct ReceiverFrameStats { absl::optional used_decoder = absl::nullopt; bool dropped = false; + bool decoder_failed = false; }; // Represents a frame which was sent by sender and is currently on the way to @@ -100,7 +101,8 @@ class FrameInFlight { void OnFrameDecoded(size_t peer, webrtc::Timestamp time, - StreamCodecInfo used_decoder); + const StreamCodecInfo& used_decoder); + void OnDecoderError(size_t peer, const StreamCodecInfo& used_decoder); bool HasDecodeEndTime(size_t peer) const; diff --git a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator.cc b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator.cc index a6e5070ce2..ae311f418f 100644 --- a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator.cc +++ b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator.cc @@ -77,6 +77,8 @@ FrameComparison ValidateFrameComparison(FrameComparison comparison) { << "Regular comparison has to have used_encoder"; RTC_DCHECK(comparison.frame_stats.used_decoder.has_value()) << "Regular comparison has to have used_decoder"; + RTC_DCHECK(!comparison.frame_stats.decoder_failed) + << "Regular comparison can't have decoder failure"; break; case FrameComparisonType::kDroppedFrame: // Frame can be dropped before encoder, by encoder, inside network or @@ -95,26 +97,27 @@ FrameComparison ValidateFrameComparison(FrameComparison comparison) { << "when encoded_time is finite."; } - if (comparison.frame_stats.decode_end_time.IsFinite()) { + if (comparison.frame_stats.decode_end_time.IsFinite() || + comparison.frame_stats.decoder_failed) { RTC_DCHECK(comparison.frame_stats.received_time.IsFinite()) << "Dropped frame comparison has to have received_time when " - << "decode_end_time is set"; + << "decode_end_time is set or decoder_failed is true"; RTC_DCHECK(comparison.frame_stats.decode_start_time.IsFinite()) << "Dropped frame comparison has to have decode_start_time when " - << "decode_end_time is set"; + << "decode_end_time is set or decoder_failed is true"; RTC_DCHECK(comparison.frame_stats.used_decoder.has_value()) << "Dropped frame comparison has to have used_decoder when " - << "decode_end_time is set"; + << "decode_end_time is set or decoder_failed is true"; } else { RTC_DCHECK(!comparison.frame_stats.received_time.IsFinite()) << "Dropped frame comparison can't have received_time when " - << "decode_end_time is not set"; + << "decode_end_time is not set and there were no decoder failures"; RTC_DCHECK(!comparison.frame_stats.decode_start_time.IsFinite()) << "Dropped frame comparison can't have decode_start_time when " - << "decode_end_time is not set"; + << "decode_end_time is not set and there were no decoder failures"; RTC_DCHECK(!comparison.frame_stats.used_decoder.has_value()) << "Dropped frame comparison can't have used_decoder when " - << "decode_end_time is not set"; + << "decode_end_time is not set and there were no decoder failures"; } RTC_DCHECK(!comparison.frame_stats.rendered_time.IsFinite()) << "Dropped frame comparison can't have rendered_time"; @@ -138,13 +141,15 @@ FrameComparison ValidateFrameComparison(FrameComparison comparison) { RTC_DCHECK(!comparison.frame_stats.rendered_frame_height.has_value()) << "Frame in flight comparison can't have rendered_frame_height"; - if (comparison.frame_stats.decode_end_time.IsFinite()) { + if (comparison.frame_stats.decode_end_time.IsFinite() || + comparison.frame_stats.decoder_failed) { RTC_DCHECK(comparison.frame_stats.used_decoder.has_value()) << "Frame in flight comparison has to have used_decoder when " - << "decode_end_time is set"; + << "decode_end_time is set or decoder_failed is true."; RTC_DCHECK(comparison.frame_stats.decode_start_time.IsFinite()) << "Frame in flight comparison has to have finite " - << "decode_start_time when decode_end_time is finite."; + << "decode_start_time when decode_end_time is finite or " + << "decoder_failed is true."; } if (comparison.frame_stats.decode_start_time.IsFinite()) { RTC_DCHECK(comparison.frame_stats.received_time.IsFinite()) @@ -415,6 +420,9 @@ void DefaultVideoQualityAnalyzerFramesComparator::ProcessComparison( FrameDropPhase dropped_phase; if (frame_stats.decode_end_time.IsFinite()) { dropped_phase = FrameDropPhase::kAfterDecoder; + } else if (frame_stats.decode_start_time.IsFinite() && + frame_stats.decoder_failed) { + dropped_phase = FrameDropPhase::kByDecoder; } else if (frame_stats.encoded_time.IsFinite()) { dropped_phase = FrameDropPhase::kTransport; } else if (frame_stats.pre_encode_time.IsFinite()) { @@ -441,7 +449,8 @@ void DefaultVideoQualityAnalyzerFramesComparator::ProcessComparison( } } // Next stats can be calculated only if frame was received on remote side. - if (comparison.type != FrameComparisonType::kDroppedFrame) { + if (comparison.type != FrameComparisonType::kDroppedFrame || + comparison.frame_stats.decoder_failed) { if (frame_stats.rendered_time.IsFinite()) { stats->resolution_of_rendered_frame.AddSample( StatsSample(*comparison.frame_stats.rendered_frame_width * diff --git a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator_test.cc b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator_test.cc index b9b822072f..bd0f75d0b4 100644 --- a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator_test.cc +++ b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator_test.cc @@ -13,9 +13,11 @@ #include #include +#include "api/test/create_frame_generator.h" #include "api/units/timestamp.h" #include "rtc_base/strings/string_builder.h" #include "system_wrappers/include/clock.h" +#include "test/gmock.h" #include "test/gtest.h" #include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_cpu_measurer.h" #include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_shared_objects.h" @@ -23,6 +25,9 @@ namespace webrtc { namespace { +using ::testing::Eq; +using ::testing::IsEmpty; + using StatsSample = ::webrtc::SamplesStatsCounter::StatsSample; constexpr int kMaxFramesInFlightPerStream = 10; @@ -36,6 +41,24 @@ DefaultVideoQualityAnalyzerOptions AnalyzerOptionsForTest() { return options; } +VideoFrame CreateFrame(uint16_t frame_id, + int width, + int height, + Timestamp timestamp) { + std::unique_ptr frame_generator = + test::CreateSquareFrameGenerator(width, height, + /*type=*/absl::nullopt, + /*num_squares=*/absl::nullopt); + test::FrameGeneratorInterface::VideoFrameData frame_data = + frame_generator->NextFrame(); + return VideoFrame::Builder() + .set_id(frame_id) + .set_video_frame_buffer(frame_data.buffer) + .set_update_rect(frame_data.update_rect) + .set_timestamp_us(timestamp.us()) + .build(); +} + StreamCodecInfo Vp8CodecForOneFrame(uint16_t frame_id, Timestamp time) { StreamCodecInfo info; info.codec_name = "VP8"; @@ -94,6 +117,17 @@ std::string ToString(const SamplesStatsCounter& counter) { return out.str(); } +void expectEmpty(const SamplesStatsCounter& counter) { + EXPECT_TRUE(counter.IsEmpty()) + << "Expected empty SamplesStatsCounter, but got " << ToString(counter); +} + +void expectEmpty(const SamplesRateCounter& counter) { + EXPECT_TRUE(counter.IsEmpty()) + << "Expected empty SamplesRateCounter, but got " + << counter.GetEventsPerSecond(); +} + TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, StatsPresentedAfterAddingOneComparison) { DefaultVideoQualityAnalyzerCpuMeasurer cpu_measurer; @@ -110,7 +144,7 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, FrameStats frame_stats = FrameStatsWith10msDeltaBetweenPhasesAnd10x10Frame(stream_start_time); - comparator.Start(1); + comparator.Start(/*max_threads_count=*/1); comparator.EnsureStatsForStream(stream, sender, peers_count, stream_start_time, stream_start_time); comparator.AddComparison(stats_key, @@ -150,7 +184,7 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, stream_start_time + TimeDelta::Millis(15)); frame_stats2.prev_frame_rendered_time = frame_stats1.rendered_time; - comparator.Start(1); + comparator.Start(/*max_threads_count=*/1); comparator.EnsureStatsForStream(stream, sender, peers_count, stream_start_time, stream_start_time); comparator.AddComparison(stats_key, @@ -230,7 +264,7 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, frame_stats.rendered_frame_height = 10; stats.push_back(frame_stats); - comparator.Start(1); + comparator.Start(/*max_threads_count=*/1); comparator.EnsureStatsForStream(stream, sender, peers_count, stream_start_time, stream_start_time); for (size_t i = 0; i < stats.size() - 1; ++i) { @@ -279,5 +313,994 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, << "There should be 4 events with interval of 15 ms"; } +// Tests to validate that stats for each possible input frame are computed +// correctly. +// Frame in flight start +TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, + CapturedOnlyInFlightFrameAccountedInStats) { + DefaultVideoQualityAnalyzerCpuMeasurer cpu_measurer; + DefaultVideoQualityAnalyzerFramesComparator comparator( + Clock::GetRealTimeClock(), cpu_measurer, + DefaultVideoQualityAnalyzerOptions()); + + Timestamp captured_time = Clock::GetRealTimeClock()->CurrentTime(); + size_t stream = 0; + size_t sender = 0; + size_t receiver = 1; + InternalStatsKey stats_key(stream, sender, receiver); + + // Frame captured + FrameStats frame_stats(captured_time); + + comparator.Start(/*max_threads_count=*/1); + comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2, + captured_time, captured_time); + comparator.AddComparison(stats_key, + /*captured=*/absl::nullopt, + /*rendered=*/absl::nullopt, + FrameComparisonType::kFrameInFlight, frame_stats); + comparator.Stop(/*last_rendered_frame_times=*/{}); + + EXPECT_EQ(comparator.stream_stats().size(), 1lu); + StreamStats stats = comparator.stream_stats().at(stats_key); + EXPECT_EQ(stats.stream_started_time, captured_time); + expectEmpty(stats.psnr); + expectEmpty(stats.ssim); + expectEmpty(stats.transport_time_ms); + expectEmpty(stats.total_delay_incl_transport_ms); + expectEmpty(stats.time_between_rendered_frames_ms); + expectEmpty(stats.encode_frame_rate); + expectEmpty(stats.encode_time_ms); + expectEmpty(stats.decode_time_ms); + expectEmpty(stats.receive_to_render_time_ms); + expectEmpty(stats.skipped_between_rendered); + expectEmpty(stats.freeze_time_ms); + expectEmpty(stats.time_between_freezes_ms); + expectEmpty(stats.resolution_of_rendered_frame); + expectEmpty(stats.target_encode_bitrate); + expectEmpty(stats.recv_key_frame_size_bytes); + expectEmpty(stats.recv_delta_frame_size_bytes); + EXPECT_EQ(stats.total_encoded_images_payload, 0); + EXPECT_EQ(stats.num_send_key_frames, 0); + EXPECT_EQ(stats.num_recv_key_frames, 0); + EXPECT_THAT(stats.dropped_by_phase, Eq(std::map{ + {FrameDropPhase::kBeforeEncoder, 0}, + {FrameDropPhase::kByEncoder, 0}, + {FrameDropPhase::kTransport, 0}, + {FrameDropPhase::kByDecoder, 0}, + {FrameDropPhase::kAfterDecoder, 0}})); + EXPECT_THAT(stats.encoders, IsEmpty()); + EXPECT_THAT(stats.decoders, IsEmpty()); +} + +TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, + PreEncodedInFlightFrameAccountedInStats) { + DefaultVideoQualityAnalyzerCpuMeasurer cpu_measurer; + DefaultVideoQualityAnalyzerFramesComparator comparator( + Clock::GetRealTimeClock(), cpu_measurer, + DefaultVideoQualityAnalyzerOptions()); + + Timestamp captured_time = Clock::GetRealTimeClock()->CurrentTime(); + size_t stream = 0; + size_t sender = 0; + size_t receiver = 1; + InternalStatsKey stats_key(stream, sender, receiver); + + // Frame captured + FrameStats frame_stats(captured_time); + // Frame pre encoded + frame_stats.pre_encode_time = captured_time + TimeDelta::Millis(10); + + comparator.Start(/*max_threads_count=*/1); + comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2, + captured_time, captured_time); + comparator.AddComparison(stats_key, + /*captured=*/absl::nullopt, + /*rendered=*/absl::nullopt, + FrameComparisonType::kFrameInFlight, frame_stats); + comparator.Stop(/*last_rendered_frame_times=*/{}); + + EXPECT_EQ(comparator.stream_stats().size(), 1lu); + StreamStats stats = comparator.stream_stats().at(stats_key); + EXPECT_EQ(stats.stream_started_time, captured_time); + expectEmpty(stats.psnr); + expectEmpty(stats.ssim); + expectEmpty(stats.transport_time_ms); + expectEmpty(stats.total_delay_incl_transport_ms); + expectEmpty(stats.time_between_rendered_frames_ms); + expectEmpty(stats.encode_frame_rate); + expectEmpty(stats.encode_time_ms); + expectEmpty(stats.decode_time_ms); + expectEmpty(stats.receive_to_render_time_ms); + expectEmpty(stats.skipped_between_rendered); + expectEmpty(stats.freeze_time_ms); + expectEmpty(stats.time_between_freezes_ms); + expectEmpty(stats.resolution_of_rendered_frame); + expectEmpty(stats.target_encode_bitrate); + expectEmpty(stats.recv_key_frame_size_bytes); + expectEmpty(stats.recv_delta_frame_size_bytes); + EXPECT_EQ(stats.total_encoded_images_payload, 0); + EXPECT_EQ(stats.num_send_key_frames, 0); + EXPECT_EQ(stats.num_recv_key_frames, 0); + EXPECT_THAT(stats.dropped_by_phase, Eq(std::map{ + {FrameDropPhase::kBeforeEncoder, 0}, + {FrameDropPhase::kByEncoder, 0}, + {FrameDropPhase::kTransport, 0}, + {FrameDropPhase::kByDecoder, 0}, + {FrameDropPhase::kAfterDecoder, 0}})); + EXPECT_THAT(stats.encoders, IsEmpty()); + EXPECT_THAT(stats.decoders, IsEmpty()); +} + +TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, + EncodedInFlightKeyFrameAccountedInStats) { + DefaultVideoQualityAnalyzerCpuMeasurer cpu_measurer; + DefaultVideoQualityAnalyzerFramesComparator comparator( + Clock::GetRealTimeClock(), cpu_measurer, + DefaultVideoQualityAnalyzerOptions()); + + Timestamp captured_time = Clock::GetRealTimeClock()->CurrentTime(); + uint16_t frame_id = 1; + size_t stream = 0; + size_t sender = 0; + size_t receiver = 1; + InternalStatsKey stats_key(stream, sender, receiver); + + // Frame captured + FrameStats frame_stats(captured_time); + // Frame pre encoded + frame_stats.pre_encode_time = captured_time + TimeDelta::Millis(10); + // Frame encoded + frame_stats.encoded_time = captured_time + TimeDelta::Millis(20); + frame_stats.used_encoder = + Vp8CodecForOneFrame(frame_id, frame_stats.encoded_time); + frame_stats.encoded_frame_type = VideoFrameType::kVideoFrameKey; + frame_stats.encoded_image_size = DataSize::Bytes(1000); + frame_stats.target_encode_bitrate = 2000; + + comparator.Start(/*max_threads_count=*/1); + comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2, + captured_time, captured_time); + comparator.AddComparison(stats_key, + /*captured=*/absl::nullopt, + /*rendered=*/absl::nullopt, + FrameComparisonType::kFrameInFlight, frame_stats); + comparator.Stop(/*last_rendered_frame_times=*/{}); + + EXPECT_EQ(comparator.stream_stats().size(), 1lu); + StreamStats stats = comparator.stream_stats().at(stats_key); + EXPECT_EQ(stats.stream_started_time, captured_time); + expectEmpty(stats.psnr); + expectEmpty(stats.ssim); + expectEmpty(stats.transport_time_ms); + expectEmpty(stats.total_delay_incl_transport_ms); + expectEmpty(stats.time_between_rendered_frames_ms); + expectEmpty(stats.encode_frame_rate); + EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.encode_time_ms), 10.0); + expectEmpty(stats.decode_time_ms); + expectEmpty(stats.receive_to_render_time_ms); + expectEmpty(stats.skipped_between_rendered); + expectEmpty(stats.freeze_time_ms); + expectEmpty(stats.time_between_freezes_ms); + expectEmpty(stats.resolution_of_rendered_frame); + EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.target_encode_bitrate), 2000.0); + expectEmpty(stats.recv_key_frame_size_bytes); + expectEmpty(stats.recv_delta_frame_size_bytes); + EXPECT_EQ(stats.total_encoded_images_payload, 1000); + EXPECT_EQ(stats.num_send_key_frames, 1); + EXPECT_EQ(stats.num_recv_key_frames, 0); + EXPECT_THAT(stats.dropped_by_phase, Eq(std::map{ + {FrameDropPhase::kBeforeEncoder, 0}, + {FrameDropPhase::kByEncoder, 0}, + {FrameDropPhase::kTransport, 0}, + {FrameDropPhase::kByDecoder, 0}, + {FrameDropPhase::kAfterDecoder, 0}})); + EXPECT_EQ(stats.encoders, + std::vector{*frame_stats.used_encoder}); + EXPECT_THAT(stats.decoders, IsEmpty()); +} + +TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, + EncodedInFlightDeltaFrameAccountedInStats) { + DefaultVideoQualityAnalyzerCpuMeasurer cpu_measurer; + DefaultVideoQualityAnalyzerFramesComparator comparator( + Clock::GetRealTimeClock(), cpu_measurer, + DefaultVideoQualityAnalyzerOptions()); + + Timestamp captured_time = Clock::GetRealTimeClock()->CurrentTime(); + uint16_t frame_id = 1; + size_t stream = 0; + size_t sender = 0; + size_t receiver = 1; + InternalStatsKey stats_key(stream, sender, receiver); + + // Frame captured + FrameStats frame_stats(captured_time); + // Frame pre encoded + frame_stats.pre_encode_time = captured_time + TimeDelta::Millis(10); + // Frame encoded + frame_stats.encoded_time = captured_time + TimeDelta::Millis(20); + frame_stats.used_encoder = + Vp8CodecForOneFrame(frame_id, frame_stats.encoded_time); + frame_stats.encoded_frame_type = VideoFrameType::kVideoFrameDelta; + frame_stats.encoded_image_size = DataSize::Bytes(1000); + frame_stats.target_encode_bitrate = 2000; + + comparator.Start(/*max_threads_count=*/1); + comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2, + captured_time, captured_time); + comparator.AddComparison(stats_key, + /*captured=*/absl::nullopt, + /*rendered=*/absl::nullopt, + FrameComparisonType::kFrameInFlight, frame_stats); + comparator.Stop(/*last_rendered_frame_times=*/{}); + + EXPECT_EQ(comparator.stream_stats().size(), 1lu); + StreamStats stats = comparator.stream_stats().at(stats_key); + EXPECT_EQ(stats.stream_started_time, captured_time); + expectEmpty(stats.psnr); + expectEmpty(stats.ssim); + expectEmpty(stats.transport_time_ms); + expectEmpty(stats.total_delay_incl_transport_ms); + expectEmpty(stats.time_between_rendered_frames_ms); + expectEmpty(stats.encode_frame_rate); + EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.encode_time_ms), 10.0); + expectEmpty(stats.decode_time_ms); + expectEmpty(stats.receive_to_render_time_ms); + expectEmpty(stats.skipped_between_rendered); + expectEmpty(stats.freeze_time_ms); + expectEmpty(stats.time_between_freezes_ms); + expectEmpty(stats.resolution_of_rendered_frame); + EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.target_encode_bitrate), 2000.0); + expectEmpty(stats.recv_key_frame_size_bytes); + expectEmpty(stats.recv_delta_frame_size_bytes); + EXPECT_EQ(stats.total_encoded_images_payload, 1000); + EXPECT_EQ(stats.num_send_key_frames, 0); + EXPECT_EQ(stats.num_recv_key_frames, 0); + EXPECT_THAT(stats.dropped_by_phase, Eq(std::map{ + {FrameDropPhase::kBeforeEncoder, 0}, + {FrameDropPhase::kByEncoder, 0}, + {FrameDropPhase::kTransport, 0}, + {FrameDropPhase::kByDecoder, 0}, + {FrameDropPhase::kAfterDecoder, 0}})); + EXPECT_EQ(stats.encoders, + std::vector{*frame_stats.used_encoder}); + EXPECT_THAT(stats.decoders, IsEmpty()); +} + +TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, + PreDecodedInFlightKeyFrameAccountedInStats) { + DefaultVideoQualityAnalyzerCpuMeasurer cpu_measurer; + DefaultVideoQualityAnalyzerFramesComparator comparator( + Clock::GetRealTimeClock(), cpu_measurer, + DefaultVideoQualityAnalyzerOptions()); + + Timestamp captured_time = Clock::GetRealTimeClock()->CurrentTime(); + uint16_t frame_id = 1; + size_t stream = 0; + size_t sender = 0; + size_t receiver = 1; + InternalStatsKey stats_key(stream, sender, receiver); + + // Frame captured + FrameStats frame_stats(captured_time); + // Frame pre encoded + frame_stats.pre_encode_time = captured_time + TimeDelta::Millis(10); + // Frame encoded + frame_stats.encoded_time = captured_time + TimeDelta::Millis(20); + frame_stats.used_encoder = + Vp8CodecForOneFrame(frame_id, frame_stats.encoded_time); + frame_stats.encoded_frame_type = VideoFrameType::kVideoFrameKey; + frame_stats.encoded_image_size = DataSize::Bytes(1000); + frame_stats.target_encode_bitrate = 2000; + // Frame pre decoded + frame_stats.pre_decoded_frame_type = VideoFrameType::kVideoFrameKey; + frame_stats.pre_decoded_image_size = DataSize::Bytes(500); + frame_stats.received_time = captured_time + TimeDelta::Millis(30); + frame_stats.decode_start_time = captured_time + TimeDelta::Millis(40); + + comparator.Start(/*max_threads_count=*/1); + comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2, + captured_time, captured_time); + comparator.AddComparison(stats_key, + /*captured=*/absl::nullopt, + /*rendered=*/absl::nullopt, + FrameComparisonType::kFrameInFlight, frame_stats); + comparator.Stop(/*last_rendered_frame_times=*/{}); + + EXPECT_EQ(comparator.stream_stats().size(), 1lu); + StreamStats stats = comparator.stream_stats().at(stats_key); + EXPECT_EQ(stats.stream_started_time, captured_time); + expectEmpty(stats.psnr); + expectEmpty(stats.ssim); + EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.transport_time_ms), 20.0); + expectEmpty(stats.total_delay_incl_transport_ms); + expectEmpty(stats.time_between_rendered_frames_ms); + expectEmpty(stats.encode_frame_rate); + EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.encode_time_ms), 10.0); + expectEmpty(stats.decode_time_ms); + expectEmpty(stats.receive_to_render_time_ms); + expectEmpty(stats.skipped_between_rendered); + expectEmpty(stats.freeze_time_ms); + expectEmpty(stats.time_between_freezes_ms); + expectEmpty(stats.resolution_of_rendered_frame); + EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.target_encode_bitrate), 2000.0); + EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.recv_key_frame_size_bytes), 500.0); + expectEmpty(stats.recv_delta_frame_size_bytes); + EXPECT_EQ(stats.total_encoded_images_payload, 1000); + EXPECT_EQ(stats.num_send_key_frames, 1); + EXPECT_EQ(stats.num_recv_key_frames, 1); + EXPECT_THAT(stats.dropped_by_phase, Eq(std::map{ + {FrameDropPhase::kBeforeEncoder, 0}, + {FrameDropPhase::kByEncoder, 0}, + {FrameDropPhase::kTransport, 0}, + {FrameDropPhase::kByDecoder, 0}, + {FrameDropPhase::kAfterDecoder, 0}})); + EXPECT_EQ(stats.encoders, + std::vector{*frame_stats.used_encoder}); + EXPECT_THAT(stats.decoders, IsEmpty()); +} + +TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, + DecodedInFlightKeyFrameAccountedInStats) { + DefaultVideoQualityAnalyzerCpuMeasurer cpu_measurer; + DefaultVideoQualityAnalyzerFramesComparator comparator( + Clock::GetRealTimeClock(), cpu_measurer, + DefaultVideoQualityAnalyzerOptions()); + + Timestamp captured_time = Clock::GetRealTimeClock()->CurrentTime(); + uint16_t frame_id = 1; + size_t stream = 0; + size_t sender = 0; + size_t receiver = 1; + InternalStatsKey stats_key(stream, sender, receiver); + + // Frame captured + FrameStats frame_stats(captured_time); + // Frame pre encoded + frame_stats.pre_encode_time = captured_time + TimeDelta::Millis(10); + // Frame encoded + frame_stats.encoded_time = captured_time + TimeDelta::Millis(20); + frame_stats.used_encoder = + Vp8CodecForOneFrame(frame_id, frame_stats.encoded_time); + frame_stats.encoded_frame_type = VideoFrameType::kVideoFrameKey; + frame_stats.encoded_image_size = DataSize::Bytes(1000); + frame_stats.target_encode_bitrate = 2000; + // Frame pre decoded + frame_stats.pre_decoded_frame_type = VideoFrameType::kVideoFrameKey; + frame_stats.pre_decoded_image_size = DataSize::Bytes(500); + frame_stats.received_time = captured_time + TimeDelta::Millis(30); + frame_stats.decode_start_time = captured_time + TimeDelta::Millis(40); + // Frame decoded + frame_stats.decode_end_time = captured_time + TimeDelta::Millis(50); + frame_stats.used_decoder = + Vp8CodecForOneFrame(frame_id, frame_stats.decode_end_time); + + comparator.Start(/*max_threads_count=*/1); + comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2, + captured_time, captured_time); + comparator.AddComparison(stats_key, + /*captured=*/absl::nullopt, + /*rendered=*/absl::nullopt, + FrameComparisonType::kFrameInFlight, frame_stats); + comparator.Stop(/*last_rendered_frame_times=*/{}); + + EXPECT_EQ(comparator.stream_stats().size(), 1lu); + StreamStats stats = comparator.stream_stats().at(stats_key); + EXPECT_EQ(stats.stream_started_time, captured_time); + expectEmpty(stats.psnr); + expectEmpty(stats.ssim); + EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.transport_time_ms), 20.0); + expectEmpty(stats.total_delay_incl_transport_ms); + expectEmpty(stats.time_between_rendered_frames_ms); + expectEmpty(stats.encode_frame_rate); + EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.encode_time_ms), 10.0); + EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.decode_time_ms), 10.0); + expectEmpty(stats.receive_to_render_time_ms); + expectEmpty(stats.skipped_between_rendered); + expectEmpty(stats.freeze_time_ms); + expectEmpty(stats.time_between_freezes_ms); + expectEmpty(stats.resolution_of_rendered_frame); + EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.target_encode_bitrate), 2000.0); + EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.recv_key_frame_size_bytes), 500.0); + expectEmpty(stats.recv_delta_frame_size_bytes); + EXPECT_EQ(stats.total_encoded_images_payload, 1000); + EXPECT_EQ(stats.num_send_key_frames, 1); + EXPECT_EQ(stats.num_recv_key_frames, 1); + EXPECT_THAT(stats.dropped_by_phase, Eq(std::map{ + {FrameDropPhase::kBeforeEncoder, 0}, + {FrameDropPhase::kByEncoder, 0}, + {FrameDropPhase::kTransport, 0}, + {FrameDropPhase::kByDecoder, 0}, + {FrameDropPhase::kAfterDecoder, 0}})); + EXPECT_EQ(stats.encoders, + std::vector{*frame_stats.used_encoder}); + EXPECT_EQ(stats.decoders, + std::vector{*frame_stats.used_decoder}); +} + +TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, + DecoderFailureOnInFlightKeyFrameAccountedInStats) { + DefaultVideoQualityAnalyzerCpuMeasurer cpu_measurer; + DefaultVideoQualityAnalyzerFramesComparator comparator( + Clock::GetRealTimeClock(), cpu_measurer, + DefaultVideoQualityAnalyzerOptions()); + + Timestamp captured_time = Clock::GetRealTimeClock()->CurrentTime(); + uint16_t frame_id = 1; + size_t stream = 0; + size_t sender = 0; + size_t receiver = 1; + InternalStatsKey stats_key(stream, sender, receiver); + + // Frame captured + FrameStats frame_stats(captured_time); + // Frame pre encoded + frame_stats.pre_encode_time = captured_time + TimeDelta::Millis(10); + // Frame encoded + frame_stats.encoded_time = captured_time + TimeDelta::Millis(20); + frame_stats.used_encoder = + Vp8CodecForOneFrame(frame_id, frame_stats.encoded_time); + frame_stats.encoded_frame_type = VideoFrameType::kVideoFrameKey; + frame_stats.encoded_image_size = DataSize::Bytes(1000); + frame_stats.target_encode_bitrate = 2000; + // Frame pre decoded + frame_stats.pre_decoded_frame_type = VideoFrameType::kVideoFrameKey; + frame_stats.pre_decoded_image_size = DataSize::Bytes(500); + frame_stats.received_time = captured_time + TimeDelta::Millis(30); + frame_stats.decode_start_time = captured_time + TimeDelta::Millis(40); + // Frame decoded + frame_stats.decoder_failed = true; + frame_stats.used_decoder = + Vp8CodecForOneFrame(frame_id, frame_stats.decode_end_time); + + comparator.Start(/*max_threads_count=*/1); + comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2, + captured_time, captured_time); + comparator.AddComparison(stats_key, + /*captured=*/absl::nullopt, + /*rendered=*/absl::nullopt, + FrameComparisonType::kFrameInFlight, frame_stats); + comparator.Stop(/*last_rendered_frame_times=*/{}); + + EXPECT_EQ(comparator.stream_stats().size(), 1lu); + StreamStats stats = comparator.stream_stats().at(stats_key); + EXPECT_EQ(stats.stream_started_time, captured_time); + expectEmpty(stats.psnr); + expectEmpty(stats.ssim); + EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.transport_time_ms), 20.0); + expectEmpty(stats.total_delay_incl_transport_ms); + expectEmpty(stats.time_between_rendered_frames_ms); + expectEmpty(stats.encode_frame_rate); + EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.encode_time_ms), 10.0); + expectEmpty(stats.decode_time_ms); + expectEmpty(stats.receive_to_render_time_ms); + expectEmpty(stats.skipped_between_rendered); + expectEmpty(stats.freeze_time_ms); + expectEmpty(stats.time_between_freezes_ms); + expectEmpty(stats.resolution_of_rendered_frame); + EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.target_encode_bitrate), 2000.0); + EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.recv_key_frame_size_bytes), 500.0); + expectEmpty(stats.recv_delta_frame_size_bytes); + EXPECT_EQ(stats.total_encoded_images_payload, 1000); + EXPECT_EQ(stats.num_send_key_frames, 1); + EXPECT_EQ(stats.num_recv_key_frames, 1); + // All frame in flight are not considered as dropped. + EXPECT_THAT(stats.dropped_by_phase, Eq(std::map{ + {FrameDropPhase::kBeforeEncoder, 0}, + {FrameDropPhase::kByEncoder, 0}, + {FrameDropPhase::kTransport, 0}, + {FrameDropPhase::kByDecoder, 0}, + {FrameDropPhase::kAfterDecoder, 0}})); + EXPECT_EQ(stats.encoders, + std::vector{*frame_stats.used_encoder}); + EXPECT_EQ(stats.decoders, + std::vector{*frame_stats.used_decoder}); +} +// Frame in flight end + +// Dropped frame start +TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, + CapturedOnlyDroppedFrameAccountedInStats) { + DefaultVideoQualityAnalyzerCpuMeasurer cpu_measurer; + DefaultVideoQualityAnalyzerFramesComparator comparator( + Clock::GetRealTimeClock(), cpu_measurer, + DefaultVideoQualityAnalyzerOptions()); + + Timestamp captured_time = Clock::GetRealTimeClock()->CurrentTime(); + size_t stream = 0; + size_t sender = 0; + size_t receiver = 1; + InternalStatsKey stats_key(stream, sender, receiver); + + // Frame captured + FrameStats frame_stats(captured_time); + + comparator.Start(/*max_threads_count=*/1); + comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2, + captured_time, captured_time); + comparator.AddComparison(stats_key, + /*captured=*/absl::nullopt, + /*rendered=*/absl::nullopt, + FrameComparisonType::kDroppedFrame, frame_stats); + comparator.Stop(/*last_rendered_frame_times=*/{}); + + EXPECT_EQ(comparator.stream_stats().size(), 1lu); + StreamStats stats = comparator.stream_stats().at(stats_key); + EXPECT_EQ(stats.stream_started_time, captured_time); + expectEmpty(stats.psnr); + expectEmpty(stats.ssim); + expectEmpty(stats.transport_time_ms); + expectEmpty(stats.total_delay_incl_transport_ms); + expectEmpty(stats.time_between_rendered_frames_ms); + expectEmpty(stats.encode_frame_rate); + expectEmpty(stats.encode_time_ms); + expectEmpty(stats.decode_time_ms); + expectEmpty(stats.receive_to_render_time_ms); + expectEmpty(stats.skipped_between_rendered); + expectEmpty(stats.freeze_time_ms); + expectEmpty(stats.time_between_freezes_ms); + expectEmpty(stats.resolution_of_rendered_frame); + expectEmpty(stats.target_encode_bitrate); + expectEmpty(stats.recv_key_frame_size_bytes); + expectEmpty(stats.recv_delta_frame_size_bytes); + EXPECT_EQ(stats.total_encoded_images_payload, 0); + EXPECT_EQ(stats.num_send_key_frames, 0); + EXPECT_EQ(stats.num_recv_key_frames, 0); + EXPECT_THAT(stats.dropped_by_phase, Eq(std::map{ + {FrameDropPhase::kBeforeEncoder, 1}, + {FrameDropPhase::kByEncoder, 0}, + {FrameDropPhase::kTransport, 0}, + {FrameDropPhase::kByDecoder, 0}, + {FrameDropPhase::kAfterDecoder, 0}})); + EXPECT_THAT(stats.encoders, IsEmpty()); + EXPECT_THAT(stats.decoders, IsEmpty()); +} + +TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, + PreEncodedDroppedFrameAccountedInStats) { + DefaultVideoQualityAnalyzerCpuMeasurer cpu_measurer; + DefaultVideoQualityAnalyzerFramesComparator comparator( + Clock::GetRealTimeClock(), cpu_measurer, + DefaultVideoQualityAnalyzerOptions()); + + Timestamp captured_time = Clock::GetRealTimeClock()->CurrentTime(); + size_t stream = 0; + size_t sender = 0; + size_t receiver = 1; + InternalStatsKey stats_key(stream, sender, receiver); + + // Frame captured + FrameStats frame_stats(captured_time); + // Frame pre encoded + frame_stats.pre_encode_time = captured_time + TimeDelta::Millis(10); + + comparator.Start(/*max_threads_count=*/1); + comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2, + captured_time, captured_time); + comparator.AddComparison(stats_key, + /*captured=*/absl::nullopt, + /*rendered=*/absl::nullopt, + FrameComparisonType::kDroppedFrame, frame_stats); + comparator.Stop(/*last_rendered_frame_times=*/{}); + + EXPECT_EQ(comparator.stream_stats().size(), 1lu); + StreamStats stats = comparator.stream_stats().at(stats_key); + EXPECT_EQ(stats.stream_started_time, captured_time); + expectEmpty(stats.psnr); + expectEmpty(stats.ssim); + expectEmpty(stats.transport_time_ms); + expectEmpty(stats.total_delay_incl_transport_ms); + expectEmpty(stats.time_between_rendered_frames_ms); + expectEmpty(stats.encode_frame_rate); + expectEmpty(stats.encode_time_ms); + expectEmpty(stats.decode_time_ms); + expectEmpty(stats.receive_to_render_time_ms); + expectEmpty(stats.skipped_between_rendered); + expectEmpty(stats.freeze_time_ms); + expectEmpty(stats.time_between_freezes_ms); + expectEmpty(stats.resolution_of_rendered_frame); + expectEmpty(stats.target_encode_bitrate); + expectEmpty(stats.recv_key_frame_size_bytes); + expectEmpty(stats.recv_delta_frame_size_bytes); + EXPECT_EQ(stats.total_encoded_images_payload, 0); + EXPECT_EQ(stats.num_send_key_frames, 0); + EXPECT_EQ(stats.num_recv_key_frames, 0); + EXPECT_THAT(stats.dropped_by_phase, Eq(std::map{ + {FrameDropPhase::kBeforeEncoder, 0}, + {FrameDropPhase::kByEncoder, 1}, + {FrameDropPhase::kTransport, 0}, + {FrameDropPhase::kByDecoder, 0}, + {FrameDropPhase::kAfterDecoder, 0}})); + EXPECT_THAT(stats.encoders, IsEmpty()); + EXPECT_THAT(stats.decoders, IsEmpty()); +} + +TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, + EncodedDroppedKeyFrameAccountedInStats) { + DefaultVideoQualityAnalyzerCpuMeasurer cpu_measurer; + DefaultVideoQualityAnalyzerFramesComparator comparator( + Clock::GetRealTimeClock(), cpu_measurer, + DefaultVideoQualityAnalyzerOptions()); + + Timestamp captured_time = Clock::GetRealTimeClock()->CurrentTime(); + uint16_t frame_id = 1; + size_t stream = 0; + size_t sender = 0; + size_t receiver = 1; + InternalStatsKey stats_key(stream, sender, receiver); + + // Frame captured + FrameStats frame_stats(captured_time); + // Frame pre encoded + frame_stats.pre_encode_time = captured_time + TimeDelta::Millis(10); + // Frame encoded + frame_stats.encoded_time = captured_time + TimeDelta::Millis(20); + frame_stats.used_encoder = + Vp8CodecForOneFrame(frame_id, frame_stats.encoded_time); + frame_stats.encoded_frame_type = VideoFrameType::kVideoFrameKey; + frame_stats.encoded_image_size = DataSize::Bytes(1000); + frame_stats.target_encode_bitrate = 2000; + + comparator.Start(/*max_threads_count=*/1); + comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2, + captured_time, captured_time); + comparator.AddComparison(stats_key, + /*captured=*/absl::nullopt, + /*rendered=*/absl::nullopt, + FrameComparisonType::kDroppedFrame, frame_stats); + comparator.Stop(/*last_rendered_frame_times=*/{}); + + EXPECT_EQ(comparator.stream_stats().size(), 1lu); + StreamStats stats = comparator.stream_stats().at(stats_key); + EXPECT_EQ(stats.stream_started_time, captured_time); + expectEmpty(stats.psnr); + expectEmpty(stats.ssim); + expectEmpty(stats.transport_time_ms); + expectEmpty(stats.total_delay_incl_transport_ms); + expectEmpty(stats.time_between_rendered_frames_ms); + expectEmpty(stats.encode_frame_rate); + EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.encode_time_ms), 10.0); + expectEmpty(stats.decode_time_ms); + expectEmpty(stats.receive_to_render_time_ms); + expectEmpty(stats.skipped_between_rendered); + expectEmpty(stats.freeze_time_ms); + expectEmpty(stats.time_between_freezes_ms); + expectEmpty(stats.resolution_of_rendered_frame); + EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.target_encode_bitrate), 2000.0); + expectEmpty(stats.recv_key_frame_size_bytes); + expectEmpty(stats.recv_delta_frame_size_bytes); + EXPECT_EQ(stats.total_encoded_images_payload, 1000); + EXPECT_EQ(stats.num_send_key_frames, 1); + EXPECT_EQ(stats.num_recv_key_frames, 0); + EXPECT_THAT(stats.dropped_by_phase, Eq(std::map{ + {FrameDropPhase::kBeforeEncoder, 0}, + {FrameDropPhase::kByEncoder, 0}, + {FrameDropPhase::kTransport, 1}, + {FrameDropPhase::kByDecoder, 0}, + {FrameDropPhase::kAfterDecoder, 0}})); + EXPECT_EQ(stats.encoders, + std::vector{*frame_stats.used_encoder}); + EXPECT_THAT(stats.decoders, IsEmpty()); +} + +TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, + EncodedDroppedDeltaFrameAccountedInStats) { + DefaultVideoQualityAnalyzerCpuMeasurer cpu_measurer; + DefaultVideoQualityAnalyzerFramesComparator comparator( + Clock::GetRealTimeClock(), cpu_measurer, + DefaultVideoQualityAnalyzerOptions()); + + Timestamp captured_time = Clock::GetRealTimeClock()->CurrentTime(); + uint16_t frame_id = 1; + size_t stream = 0; + size_t sender = 0; + size_t receiver = 1; + InternalStatsKey stats_key(stream, sender, receiver); + + // Frame captured + FrameStats frame_stats(captured_time); + // Frame pre encoded + frame_stats.pre_encode_time = captured_time + TimeDelta::Millis(10); + // Frame encoded + frame_stats.encoded_time = captured_time + TimeDelta::Millis(20); + frame_stats.used_encoder = + Vp8CodecForOneFrame(frame_id, frame_stats.encoded_time); + frame_stats.encoded_frame_type = VideoFrameType::kVideoFrameDelta; + frame_stats.encoded_image_size = DataSize::Bytes(1000); + frame_stats.target_encode_bitrate = 2000; + + comparator.Start(/*max_threads_count=*/1); + comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2, + captured_time, captured_time); + comparator.AddComparison(stats_key, + /*captured=*/absl::nullopt, + /*rendered=*/absl::nullopt, + FrameComparisonType::kDroppedFrame, frame_stats); + comparator.Stop(/*last_rendered_frame_times=*/{}); + + EXPECT_EQ(comparator.stream_stats().size(), 1lu); + StreamStats stats = comparator.stream_stats().at(stats_key); + EXPECT_EQ(stats.stream_started_time, captured_time); + expectEmpty(stats.psnr); + expectEmpty(stats.ssim); + expectEmpty(stats.transport_time_ms); + expectEmpty(stats.total_delay_incl_transport_ms); + expectEmpty(stats.time_between_rendered_frames_ms); + expectEmpty(stats.encode_frame_rate); + EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.encode_time_ms), 10.0); + expectEmpty(stats.decode_time_ms); + expectEmpty(stats.receive_to_render_time_ms); + expectEmpty(stats.skipped_between_rendered); + expectEmpty(stats.freeze_time_ms); + expectEmpty(stats.time_between_freezes_ms); + expectEmpty(stats.resolution_of_rendered_frame); + EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.target_encode_bitrate), 2000.0); + expectEmpty(stats.recv_key_frame_size_bytes); + expectEmpty(stats.recv_delta_frame_size_bytes); + EXPECT_EQ(stats.total_encoded_images_payload, 1000); + EXPECT_EQ(stats.num_send_key_frames, 0); + EXPECT_EQ(stats.num_recv_key_frames, 0); + EXPECT_THAT(stats.dropped_by_phase, Eq(std::map{ + {FrameDropPhase::kBeforeEncoder, 0}, + {FrameDropPhase::kByEncoder, 0}, + {FrameDropPhase::kTransport, 1}, + {FrameDropPhase::kByDecoder, 0}, + {FrameDropPhase::kAfterDecoder, 0}})); + EXPECT_EQ(stats.encoders, + std::vector{*frame_stats.used_encoder}); + EXPECT_THAT(stats.decoders, IsEmpty()); +} + +// TODO(titovartem): add test that just pre decoded frame can't be received as +// dropped one because decoder always returns either decoded frame or error. + +TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, + DecodedDroppedKeyFrameAccountedInStats) { + // We don't really drop frames after decoder, so it's a bit unclear what is + // correct way to account such frames in stats, so this test just fixes some + // current way. + DefaultVideoQualityAnalyzerCpuMeasurer cpu_measurer; + DefaultVideoQualityAnalyzerFramesComparator comparator( + Clock::GetRealTimeClock(), cpu_measurer, + DefaultVideoQualityAnalyzerOptions()); + + Timestamp captured_time = Clock::GetRealTimeClock()->CurrentTime(); + uint16_t frame_id = 1; + size_t stream = 0; + size_t sender = 0; + size_t receiver = 1; + InternalStatsKey stats_key(stream, sender, receiver); + + // Frame captured + FrameStats frame_stats(captured_time); + // Frame pre encoded + frame_stats.pre_encode_time = captured_time + TimeDelta::Millis(10); + // Frame encoded + frame_stats.encoded_time = captured_time + TimeDelta::Millis(20); + frame_stats.used_encoder = + Vp8CodecForOneFrame(frame_id, frame_stats.encoded_time); + frame_stats.encoded_frame_type = VideoFrameType::kVideoFrameKey; + frame_stats.encoded_image_size = DataSize::Bytes(1000); + frame_stats.target_encode_bitrate = 2000; + // Frame pre decoded + frame_stats.pre_decoded_frame_type = VideoFrameType::kVideoFrameKey; + frame_stats.pre_decoded_image_size = DataSize::Bytes(500); + frame_stats.received_time = captured_time + TimeDelta::Millis(30); + frame_stats.decode_start_time = captured_time + TimeDelta::Millis(40); + // Frame decoded + frame_stats.decode_end_time = captured_time + TimeDelta::Millis(50); + frame_stats.used_decoder = + Vp8CodecForOneFrame(frame_id, frame_stats.decode_end_time); + + comparator.Start(/*max_threads_count=*/1); + comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2, + captured_time, captured_time); + comparator.AddComparison(stats_key, + /*captured=*/absl::nullopt, + /*rendered=*/absl::nullopt, + FrameComparisonType::kDroppedFrame, frame_stats); + comparator.Stop(/*last_rendered_frame_times=*/{}); + + EXPECT_EQ(comparator.stream_stats().size(), 1lu); + StreamStats stats = comparator.stream_stats().at(stats_key); + EXPECT_EQ(stats.stream_started_time, captured_time); + expectEmpty(stats.psnr); + expectEmpty(stats.ssim); + expectEmpty(stats.transport_time_ms); + expectEmpty(stats.total_delay_incl_transport_ms); + expectEmpty(stats.time_between_rendered_frames_ms); + expectEmpty(stats.encode_frame_rate); + EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.encode_time_ms), 10.0); + expectEmpty(stats.decode_time_ms); + expectEmpty(stats.receive_to_render_time_ms); + expectEmpty(stats.skipped_between_rendered); + expectEmpty(stats.freeze_time_ms); + expectEmpty(stats.time_between_freezes_ms); + expectEmpty(stats.resolution_of_rendered_frame); + EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.target_encode_bitrate), 2000.0); + expectEmpty(stats.recv_key_frame_size_bytes); + expectEmpty(stats.recv_delta_frame_size_bytes); + EXPECT_EQ(stats.total_encoded_images_payload, 1000); + EXPECT_EQ(stats.num_send_key_frames, 1); + EXPECT_EQ(stats.num_recv_key_frames, 0); + EXPECT_THAT(stats.dropped_by_phase, Eq(std::map{ + {FrameDropPhase::kBeforeEncoder, 0}, + {FrameDropPhase::kByEncoder, 0}, + {FrameDropPhase::kTransport, 0}, + {FrameDropPhase::kByDecoder, 0}, + {FrameDropPhase::kAfterDecoder, 1}})); + EXPECT_EQ(stats.encoders, + std::vector{*frame_stats.used_encoder}); + EXPECT_EQ(stats.decoders, + std::vector{*frame_stats.used_decoder}); +} + +TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, + DecoderFailedDroppedKeyFrameAccountedInStats) { + DefaultVideoQualityAnalyzerCpuMeasurer cpu_measurer; + DefaultVideoQualityAnalyzerFramesComparator comparator( + Clock::GetRealTimeClock(), cpu_measurer, + DefaultVideoQualityAnalyzerOptions()); + + Timestamp captured_time = Clock::GetRealTimeClock()->CurrentTime(); + uint16_t frame_id = 1; + size_t stream = 0; + size_t sender = 0; + size_t receiver = 1; + InternalStatsKey stats_key(stream, sender, receiver); + + // Frame captured + FrameStats frame_stats(captured_time); + // Frame pre encoded + frame_stats.pre_encode_time = captured_time + TimeDelta::Millis(10); + // Frame encoded + frame_stats.encoded_time = captured_time + TimeDelta::Millis(20); + frame_stats.used_encoder = + Vp8CodecForOneFrame(frame_id, frame_stats.encoded_time); + frame_stats.encoded_frame_type = VideoFrameType::kVideoFrameKey; + frame_stats.encoded_image_size = DataSize::Bytes(1000); + frame_stats.target_encode_bitrate = 2000; + // Frame pre decoded + frame_stats.pre_decoded_frame_type = VideoFrameType::kVideoFrameKey; + frame_stats.pre_decoded_image_size = DataSize::Bytes(500); + frame_stats.received_time = captured_time + TimeDelta::Millis(30); + frame_stats.decode_start_time = captured_time + TimeDelta::Millis(40); + // Frame decoded + frame_stats.decoder_failed = true; + frame_stats.used_decoder = + Vp8CodecForOneFrame(frame_id, frame_stats.decode_end_time); + + comparator.Start(/*max_threads_count=*/1); + comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2, + captured_time, captured_time); + comparator.AddComparison(stats_key, + /*captured=*/absl::nullopt, + /*rendered=*/absl::nullopt, + FrameComparisonType::kDroppedFrame, frame_stats); + comparator.Stop(/*last_rendered_frame_times=*/{}); + + EXPECT_EQ(comparator.stream_stats().size(), 1lu); + StreamStats stats = comparator.stream_stats().at(stats_key); + EXPECT_EQ(stats.stream_started_time, captured_time); + expectEmpty(stats.psnr); + expectEmpty(stats.ssim); + EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.transport_time_ms), 20.0); + expectEmpty(stats.total_delay_incl_transport_ms); + expectEmpty(stats.time_between_rendered_frames_ms); + expectEmpty(stats.encode_frame_rate); + EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.encode_time_ms), 10.0); + expectEmpty(stats.decode_time_ms); + expectEmpty(stats.receive_to_render_time_ms); + expectEmpty(stats.skipped_between_rendered); + expectEmpty(stats.freeze_time_ms); + expectEmpty(stats.time_between_freezes_ms); + expectEmpty(stats.resolution_of_rendered_frame); + EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.target_encode_bitrate), 2000.0); + EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.recv_key_frame_size_bytes), 500.0); + expectEmpty(stats.recv_delta_frame_size_bytes); + EXPECT_EQ(stats.total_encoded_images_payload, 1000); + EXPECT_EQ(stats.num_send_key_frames, 1); + EXPECT_EQ(stats.num_recv_key_frames, 1); + EXPECT_THAT(stats.dropped_by_phase, Eq(std::map{ + {FrameDropPhase::kBeforeEncoder, 0}, + {FrameDropPhase::kByEncoder, 0}, + {FrameDropPhase::kTransport, 0}, + {FrameDropPhase::kByDecoder, 1}, + {FrameDropPhase::kAfterDecoder, 0}})); + EXPECT_EQ(stats.encoders, + std::vector{*frame_stats.used_encoder}); + EXPECT_EQ(stats.decoders, + std::vector{*frame_stats.used_decoder}); +} +// Dropped frame end + +// Regular frame start +TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, + RenderedKeyFrameAccountedInStats) { + DefaultVideoQualityAnalyzerCpuMeasurer cpu_measurer; + DefaultVideoQualityAnalyzerFramesComparator comparator( + Clock::GetRealTimeClock(), cpu_measurer, + DefaultVideoQualityAnalyzerOptions()); + + Timestamp captured_time = Clock::GetRealTimeClock()->CurrentTime(); + uint16_t frame_id = 1; + size_t stream = 0; + size_t sender = 0; + size_t receiver = 1; + InternalStatsKey stats_key(stream, sender, receiver); + + // Frame captured + VideoFrame frame = + CreateFrame(frame_id, /*width=*/320, /*height=*/180, captured_time); + FrameStats frame_stats(captured_time); + // Frame pre encoded + frame_stats.pre_encode_time = captured_time + TimeDelta::Millis(10); + // Frame encoded + frame_stats.encoded_time = captured_time + TimeDelta::Millis(20); + frame_stats.used_encoder = + Vp8CodecForOneFrame(frame_id, frame_stats.encoded_time); + frame_stats.encoded_frame_type = VideoFrameType::kVideoFrameKey; + frame_stats.encoded_image_size = DataSize::Bytes(1000); + frame_stats.target_encode_bitrate = 2000; + // Frame pre decoded + frame_stats.pre_decoded_frame_type = VideoFrameType::kVideoFrameKey; + frame_stats.pre_decoded_image_size = DataSize::Bytes(500); + frame_stats.received_time = captured_time + TimeDelta::Millis(30); + frame_stats.decode_start_time = captured_time + TimeDelta::Millis(40); + // Frame decoded + frame_stats.decode_end_time = captured_time + TimeDelta::Millis(50); + frame_stats.used_decoder = + Vp8CodecForOneFrame(frame_id, frame_stats.decode_end_time); + // Frame rendered + frame_stats.rendered_time = captured_time + TimeDelta::Millis(60); + frame_stats.rendered_frame_width = 200; + frame_stats.rendered_frame_height = 100; + + comparator.Start(/*max_threads_count=*/1); + comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2, + captured_time, captured_time); + comparator.AddComparison(stats_key, + /*captured=*/frame, + /*rendered=*/frame, FrameComparisonType::kRegular, + frame_stats); + comparator.Stop(/*last_rendered_frame_times=*/{}); + + EXPECT_EQ(comparator.stream_stats().size(), 1lu); + StreamStats stats = comparator.stream_stats().at(stats_key); + EXPECT_EQ(stats.stream_started_time, captured_time); + EXPECT_GE(GetFirstOrDie(stats.psnr), 20); + EXPECT_GE(GetFirstOrDie(stats.ssim), 0.5); + EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.transport_time_ms), 20.0); + EXPECT_GE(GetFirstOrDie(stats.total_delay_incl_transport_ms), 60.0); + expectEmpty(stats.time_between_rendered_frames_ms); + expectEmpty(stats.encode_frame_rate); + EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.encode_time_ms), 10.0); + EXPECT_GE(GetFirstOrDie(stats.decode_time_ms), 10.0); + EXPECT_GE(GetFirstOrDie(stats.receive_to_render_time_ms), 30.0); + expectEmpty(stats.skipped_between_rendered); + expectEmpty(stats.freeze_time_ms); + expectEmpty(stats.time_between_freezes_ms); + EXPECT_GE(GetFirstOrDie(stats.resolution_of_rendered_frame), 200 * 100.0); + EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.target_encode_bitrate), 2000.0); + EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.recv_key_frame_size_bytes), 500.0); + expectEmpty(stats.recv_delta_frame_size_bytes); + EXPECT_EQ(stats.total_encoded_images_payload, 1000); + EXPECT_EQ(stats.num_send_key_frames, 1); + EXPECT_EQ(stats.num_recv_key_frames, 1); + EXPECT_THAT(stats.dropped_by_phase, Eq(std::map{ + {FrameDropPhase::kBeforeEncoder, 0}, + {FrameDropPhase::kByEncoder, 0}, + {FrameDropPhase::kTransport, 0}, + {FrameDropPhase::kByDecoder, 0}, + {FrameDropPhase::kAfterDecoder, 0}})); + EXPECT_EQ(stats.encoders, + std::vector{*frame_stats.used_encoder}); + EXPECT_EQ(stats.decoders, + std::vector{*frame_stats.used_decoder}); +} +// Regular frame end +// Stats validation tests end. + } // namespace } // namespace webrtc diff --git a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_internal_shared_objects.h b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_internal_shared_objects.h index 04f653c02b..033fa7514f 100644 --- a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_internal_shared_objects.h +++ b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_internal_shared_objects.h @@ -68,6 +68,8 @@ struct FrameStats { absl::optional used_encoder = absl::nullopt; // Can be not set if frame was dropped in the network. absl::optional used_decoder = absl::nullopt; + + bool decoder_failed = false; }; // Describes why comparison was done in overloaded mode (without calculating diff --git a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_shared_objects.cc b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_shared_objects.cc index 732ec217b8..79b9286e2d 100644 --- a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_shared_objects.cc +++ b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_shared_objects.cc @@ -43,6 +43,13 @@ rtc::StringBuilder& operator<<(rtc::StringBuilder& sb, return sb << state.ToString(); } +bool operator==(const StreamCodecInfo& a, const StreamCodecInfo& b) { + return a.codec_name == b.codec_name && a.first_frame_id == b.first_frame_id && + a.last_frame_id == b.last_frame_id && + a.switched_on_at == b.switched_on_at && + a.switched_from_at == b.switched_from_at; +} + std::string ToString(FrameDropPhase phase) { switch (phase) { case FrameDropPhase::kBeforeEncoder: @@ -51,6 +58,8 @@ std::string ToString(FrameDropPhase phase) { return "kByEncoder"; case FrameDropPhase::kTransport: return "kTransport"; + case FrameDropPhase::kByDecoder: + return "kByDecoder"; case FrameDropPhase::kAfterDecoder: return "kAfterDecoder"; case FrameDropPhase::kLastValue: diff --git a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_shared_objects.h b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_shared_objects.h index 3b2897bf9d..a52914e6b7 100644 --- a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_shared_objects.h +++ b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_shared_objects.h @@ -64,6 +64,9 @@ struct FrameCounters { // Count of frames that were dropped in any point between capturing and // rendering. int64_t dropped = 0; + // Count of frames for which decoder returned error when they were sent for + // decoding. + int64_t failed_to_decode = 0; }; // Contains information about the codec that was used for encoding or decoding @@ -86,6 +89,7 @@ struct StreamCodecInfo { std::ostream& operator<<(std::ostream& os, const StreamCodecInfo& state); rtc::StringBuilder& operator<<(rtc::StringBuilder& sb, const StreamCodecInfo& state); +bool operator==(const StreamCodecInfo& a, const StreamCodecInfo& b); // Represents phases where video frame can be dropped and such drop will be // detected by analyzer. @@ -93,6 +97,7 @@ enum class FrameDropPhase : int { kBeforeEncoder, kByEncoder, kTransport, + kByDecoder, kAfterDecoder, // kLastValue must be the last value in this enumeration. kLastValue diff --git a/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.cc b/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.cc index 41b8aec8a1..b958f4d027 100644 --- a/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.cc +++ b/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.cc @@ -100,13 +100,15 @@ int32_t QualityAnalyzingVideoDecoder::Decode(const EncodedImage& input_image, delegate_->Decode(*origin_image, missing_frames, render_time_ms); if (result != WEBRTC_VIDEO_CODEC_OK) { // If delegate decoder failed, then cleanup data for this image. + VideoQualityAnalyzerInterface::DecoderStats stats; { MutexLock lock(&mutex_); timestamp_to_frame_id_.erase(input_image.Timestamp()); decoding_images_.erase(input_image.Timestamp()); + stats.decoder_name = codec_name_; } - analyzer_->OnDecoderError(peer_name_, - out.id.value_or(VideoFrame::kNotSetId), result); + analyzer_->OnDecoderError( + peer_name_, out.id.value_or(VideoFrame::kNotSetId), result, stats); } return result; }