Add method to return frames history from DVQA.

Bug: b/205824594
Change-Id: Ie388aa4b61538cf97e714b1c5d28b01c72739ebd
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/238162
Reviewed-by: Mirko Bonadei <mbonadei@webrtc.org>
Commit-Queue: Artem Titov <titovartem@webrtc.org>
Cr-Commit-Position: refs/heads/main@{#35364}
This commit is contained in:
Artem Titov
2021-11-17 11:49:16 +01:00
committed by WebRTC LUCI CQ
parent 5a9be308e3
commit 9c14573d4e
3 changed files with 57 additions and 0 deletions

View File

@ -237,6 +237,7 @@ uint16_t DefaultVideoQualityAnalyzer::OnFrameCaptured(
it->second.erase(frame_id);
}
stream_to_frame_id_history_[stream_index].insert(frame_id);
stream_to_frame_id_full_history_[stream_index].push_back(frame_id);
// If state has too many frames that are in flight => remove the oldest
// queued frame in order to avoid to use too much memory.
@ -870,6 +871,16 @@ double DefaultVideoQualityAnalyzer::GetCpuUsagePercent() {
return cpu_measurer_.GetCpuUsagePercent();
}
std::map<std::string, std::vector<uint16_t>>
DefaultVideoQualityAnalyzer::GetStreamFrames() const {
MutexLock lock(&mutex_);
std::map<std::string, std::vector<uint16_t>> out;
for (auto entry_it : stream_to_frame_id_full_history_) {
out.insert({streams_.name(entry_it.first), entry_it.second});
}
return out;
}
uint16_t DefaultVideoQualityAnalyzer::StreamState::PopFront(size_t peer) {
size_t peer_queue = GetPeerQueueIndex(peer);
size_t alive_frames_queue = GetAliveFramesQueueIndex();

View File

@ -92,6 +92,10 @@ class DefaultVideoQualityAnalyzer : public VideoQualityAnalyzerInterface {
AnalyzerStats GetAnalyzerStats() const;
double GetCpuUsagePercent();
// Returns mapping from the stream label to the history of frames that were
// met in this stream in the order as they were captured.
std::map<std::string, std::vector<uint16_t>> GetStreamFrames() const;
private:
// Represents a current state of video stream.
class StreamState {
@ -363,6 +367,9 @@ class DefaultVideoQualityAnalyzer : public VideoQualityAnalyzerInterface {
// still encoding.
std::map<size_t, std::set<uint16_t>> stream_to_frame_id_history_
RTC_GUARDED_BY(mutex_);
// Map from stream index to the list of frames as they were met in the stream.
std::map<size_t, std::vector<uint16_t>> stream_to_frame_id_full_history_
RTC_GUARDED_BY(mutex_);
AnalyzerStats analyzer_stats_ RTC_GUARDED_BY(mutex_);
DefaultVideoQualityAnalyzerCpuMeasurer cpu_measurer_;

View File

@ -1425,5 +1425,44 @@ TEST(
}
}
TEST(DefaultVideoQualityAnalyzerTest, GetStreamFrames) {
std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
/*type=*/absl::nullopt,
/*num_squares=*/absl::nullopt);
DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest();
DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(), options);
analyzer.Start("test_case", std::vector<std::string>{"alice", "bob"},
kAnalyzerMaxThreadsCount);
// The order in which peers captured frames and passed them to analyzer.
std::vector<std::string> frame_capturers_sequence{
"alice", "alice", "bob", "bob", "bob",
"bob", "bob", "alice", "alice", "alice",
};
std::map<std::string, std::vector<uint16_t>> stream_to_frame_ids;
stream_to_frame_ids.emplace("alice_video", std::vector<uint16_t>{});
stream_to_frame_ids.emplace("bob_video", std::vector<uint16_t>{});
std::vector<VideoFrame> frames;
for (const std::string& sender : frame_capturers_sequence) {
VideoFrame frame = NextFrame(frame_generator.get(), /*timestamp_us=*/1);
uint16_t frame_id =
analyzer.OnFrameCaptured(sender, sender + "_video", frame);
frame.set_id(frame_id);
stream_to_frame_ids.find(sender + "_video")->second.push_back(frame_id);
frames.push_back(frame);
analyzer.OnFramePreEncode(sender, frame);
analyzer.OnFrameEncoded(sender, frame.id(), FakeEncode(frame),
VideoQualityAnalyzerInterface::EncoderStats());
}
// We don't need to receive frames for stats to be gathered correctly.
analyzer.Stop();
EXPECT_EQ(analyzer.GetStreamFrames(), stream_to_frame_ids);
}
} // namespace
} // namespace webrtc