Ignore frames that are comming to DVQA after Stop is called

Bug: webrtc:12247
Change-Id: Ie3e773bdff66c900956019ac3131bbdb9ee874cd
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/196084
Reviewed-by: Sebastian Jansson <srte@webrtc.org>
Reviewed-by: Mirko Bonadei <mbonadei@webrtc.org>
Commit-Queue: Andrey Logvin <landrey@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#32738}
This commit is contained in:
Andrey Logvin
2020-12-01 19:32:53 +00:00
committed by Commit Bot
parent 1afe2be9a9
commit 8d4cdd11d8
8 changed files with 155 additions and 23 deletions

View File

@ -85,9 +85,11 @@ class VideoQualityAnalyzerInterface : public StatsObserverInterface {
// Will be called when frame was generated from the input stream.
// |peer_name| is name of the peer on which side frame was captured.
// Returns frame id, that will be set by framework to the frame.
virtual uint16_t OnFrameCaptured(absl::string_view peer_name,
const std::string& stream_label,
const VideoFrame& frame) = 0;
// absl::nullopt is returned if analyzer has ignored the frame.
virtual absl::optional<uint16_t> OnFrameCaptured(
absl::string_view peer_name,
const std::string& stream_label,
const VideoFrame& frame) = 0;
// Will be called before calling the encoder.
// |peer_name| is name of the peer on which side frame came to encoder.
virtual void OnFramePreEncode(absl::string_view peer_name,

View File

@ -207,6 +207,7 @@ if (!build_with_chromium) {
absl_deps = [
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
}
@ -594,6 +595,7 @@ if (!build_with_chromium) {
"../../../rtc_base:logging",
"../../../rtc_base/synchronization:mutex",
]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("video_quality_metrics_reporter") {
@ -653,6 +655,7 @@ if (!build_with_chromium) {
"../../../rtc_tools:video_quality_analysis",
"../../../system_wrappers",
]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("network_quality_metrics_reporter") {

View File

@ -14,6 +14,7 @@
#include <memory>
#include <utility>
#include "absl/types/optional.h"
#include "api/array_view.h"
#include "api/numerics/samples_stats_counter.h"
#include "api/units/time_delta.h"
@ -159,10 +160,16 @@ void DefaultVideoQualityAnalyzer::Start(
StartMeasuringCpuProcessTime();
}
uint16_t DefaultVideoQualityAnalyzer::OnFrameCaptured(
absl::optional<uint16_t> DefaultVideoQualityAnalyzer::OnFrameCaptured(
absl::string_view peer_name,
const std::string& stream_label,
const webrtc::VideoFrame& frame) {
{
MutexLock lock(&lock_);
if (state_ == State::kStopped) {
return absl::nullopt;
}
}
// |next_frame_id| is atomic, so we needn't lock here.
uint16_t frame_id = next_frame_id_++;
Timestamp start_time = Timestamp::MinusInfinity();
@ -280,6 +287,9 @@ void DefaultVideoQualityAnalyzer::OnFramePreEncode(
absl::string_view peer_name,
const webrtc::VideoFrame& frame) {
MutexLock lock(&lock_);
if (state_ == State::kStopped) {
return;
}
auto it = captured_frames_in_flight_.find(frame.id());
RTC_DCHECK(it != captured_frames_in_flight_.end())
<< "Frame id=" << frame.id() << " not found";
@ -300,6 +310,9 @@ void DefaultVideoQualityAnalyzer::OnFrameEncoded(
const webrtc::EncodedImage& encoded_image,
const EncoderStats& stats) {
MutexLock lock(&lock_);
if (state_ == State::kStopped) {
return;
}
auto it = captured_frames_in_flight_.find(frame_id);
RTC_DCHECK(it != captured_frames_in_flight_.end());
// For SVC we can receive multiple encoded images for one frame, so to cover
@ -330,6 +343,9 @@ void DefaultVideoQualityAnalyzer::OnFramePreDecode(
uint16_t frame_id,
const webrtc::EncodedImage& input_image) {
MutexLock lock(&lock_);
if (state_ == State::kStopped) {
return;
}
size_t peer_index = peers_->index(peer_name);
auto it = captured_frames_in_flight_.find(frame_id);
@ -367,6 +383,9 @@ void DefaultVideoQualityAnalyzer::OnFrameDecoded(
const webrtc::VideoFrame& frame,
const DecoderStats& stats) {
MutexLock lock(&lock_);
if (state_ == State::kStopped) {
return;
}
size_t peer_index = peers_->index(peer_name);
auto it = captured_frames_in_flight_.find(frame.id());
@ -390,6 +409,9 @@ void DefaultVideoQualityAnalyzer::OnFrameRendered(
absl::string_view peer_name,
const webrtc::VideoFrame& frame) {
MutexLock lock(&lock_);
if (state_ == State::kStopped) {
return;
}
size_t peer_index = peers_->index(peer_name);
auto frame_it = captured_frames_in_flight_.find(frame.id());
@ -484,6 +506,12 @@ void DefaultVideoQualityAnalyzer::OnEncoderError(
absl::string_view peer_name,
const webrtc::VideoFrame& frame,
int32_t error_code) {
{
MutexLock lock(&lock_);
if (state_ == State::kStopped) {
return;
}
}
RTC_LOG(LS_ERROR) << "Encoder error for frame.id=" << frame.id()
<< ", code=" << error_code;
}
@ -491,6 +519,12 @@ void DefaultVideoQualityAnalyzer::OnEncoderError(
void DefaultVideoQualityAnalyzer::OnDecoderError(absl::string_view peer_name,
uint16_t frame_id,
int32_t error_code) {
{
MutexLock lock(&lock_);
if (state_ == State::kStopped) {
return;
}
}
RTC_LOG(LS_ERROR) << "Decoder error for frame_id=" << frame_id
<< ", code=" << error_code;
}

View File

@ -19,6 +19,7 @@
#include <string>
#include <vector>
#include "absl/types/optional.h"
#include "api/array_view.h"
#include "api/numerics/samples_stats_counter.h"
#include "api/test/video_quality_analyzer_interface.h"
@ -196,9 +197,9 @@ class DefaultVideoQualityAnalyzer : public VideoQualityAnalyzerInterface {
void Start(std::string test_case_name,
rtc::ArrayView<const std::string> peer_names,
int max_threads_count) override;
uint16_t OnFrameCaptured(absl::string_view peer_name,
const std::string& stream_label,
const VideoFrame& frame) override;
absl::optional<uint16_t> OnFrameCaptured(absl::string_view peer_name,
const std::string& stream_label,
const VideoFrame& frame) override;
void OnFramePreEncode(absl::string_view peer_name,
const VideoFrame& frame) override;
void OnFrameEncoded(absl::string_view peer_name,

View File

@ -116,7 +116,7 @@ TEST(DefaultVideoQualityAnalyzerTest,
for (int i = 0; i < kMaxFramesInFlightPerStream * 2; ++i) {
VideoFrame frame = NextFrame(frame_generator.get(), i);
frame.set_id(
analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, frame));
analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, frame).value());
frames_order.push_back(frame.id());
captured_frames.insert({frame.id(), frame});
analyzer.OnFramePreEncode(kSenderPeerName, frame);
@ -168,7 +168,7 @@ TEST(DefaultVideoQualityAnalyzerTest,
for (int i = 0; i < kMaxFramesInFlightPerStream; ++i) {
VideoFrame frame = NextFrame(frame_generator.get(), i);
frame.set_id(
analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, frame));
analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, frame).value());
frames_order.push_back(frame.id());
captured_frames.insert({frame.id(), frame});
analyzer.OnFramePreEncode(kSenderPeerName, frame);
@ -196,7 +196,7 @@ TEST(DefaultVideoQualityAnalyzerTest,
for (int i = 0; i < 2 * kMaxFramesInFlightPerStream; ++i) {
VideoFrame frame = NextFrame(frame_generator.get(), i);
frame.set_id(
analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, frame));
analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, frame).value());
frames_order.push_back(frame.id());
captured_frames.insert({frame.id(), frame});
analyzer.OnFramePreEncode(kSenderPeerName, frame);
@ -248,7 +248,7 @@ TEST(DefaultVideoQualityAnalyzerTest,
for (int i = 0; i < kMaxFramesInFlightPerStream * 2; ++i) {
VideoFrame frame = NextFrame(frame_generator.get(), i);
frame.set_id(
analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, frame));
analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, frame).value());
frames_order.push_back(frame.id());
captured_frames.insert({frame.id(), frame});
analyzer.OnFramePreEncode(kSenderPeerName, frame);
@ -298,7 +298,7 @@ TEST(DefaultVideoQualityAnalyzerTest, NormalScenario) {
for (int i = 0; i < kMaxFramesInFlightPerStream; ++i) {
VideoFrame frame = NextFrame(frame_generator.get(), i);
frame.set_id(
analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, frame));
analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, frame).value());
frames_order.push_back(frame.id());
captured_frames.insert({frame.id(), frame});
analyzer.OnFramePreEncode(kSenderPeerName, frame);
@ -353,7 +353,8 @@ TEST(DefaultVideoQualityAnalyzerTest, OneFrameReceivedTwice) {
VideoFrame captured_frame = NextFrame(frame_generator.get(), 0);
captured_frame.set_id(
analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, captured_frame));
analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, captured_frame)
.value());
analyzer.OnFramePreEncode(kSenderPeerName, captured_frame);
analyzer.OnFrameEncoded(kSenderPeerName, captured_frame.id(),
FakeEncode(captured_frame),
@ -410,7 +411,7 @@ TEST(DefaultVideoQualityAnalyzerTest, NormalScenario2Receivers) {
std::vector<uint16_t> frames_order;
for (int i = 0; i < kMaxFramesInFlightPerStream; ++i) {
VideoFrame frame = NextFrame(frame_generator.get(), i);
frame.set_id(analyzer.OnFrameCaptured(kAlice, kStreamLabel, frame));
frame.set_id(analyzer.OnFrameCaptured(kAlice, kStreamLabel, frame).value());
frames_order.push_back(frame.id());
captured_frames.insert({frame.id(), frame});
analyzer.OnFramePreEncode(kAlice, frame);
@ -526,7 +527,7 @@ TEST(DefaultVideoQualityAnalyzerTest, OneFrameReceivedTwiceWith2Receivers) {
VideoFrame captured_frame = NextFrame(frame_generator.get(), 0);
captured_frame.set_id(
analyzer.OnFrameCaptured(kAlice, kStreamLabel, captured_frame));
analyzer.OnFrameCaptured(kAlice, kStreamLabel, captured_frame).value());
analyzer.OnFramePreEncode(kAlice, captured_frame);
analyzer.OnFrameEncoded(kAlice, captured_frame.id(),
FakeEncode(captured_frame),
@ -564,6 +565,92 @@ TEST(DefaultVideoQualityAnalyzerTest, OneFrameReceivedTwiceWith2Receivers) {
EXPECT_EQ(frame_counters.dropped, 0);
}
TEST(DefaultVideoQualityAnalyzerTest, FramesComingAfterStop) {
std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
/*type=*/absl::nullopt,
/*num_squares=*/absl::nullopt);
DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(),
AnalyzerOptionsForTest());
analyzer.Start("test_case",
std::vector<std::string>{kSenderPeerName, kReceiverPeerName},
kAnalyzerMaxThreadsCount);
std::map<uint16_t, VideoFrame> captured_frames;
std::vector<uint16_t> frames_order;
for (int i = 0; i < kMaxFramesInFlightPerStream; ++i) {
VideoFrame frame = NextFrame(frame_generator.get(), i);
frame.set_id(
analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, frame).value());
frames_order.push_back(frame.id());
captured_frames.insert({frame.id(), frame});
analyzer.OnFramePreEncode(kSenderPeerName, frame);
analyzer.OnFrameEncoded(kSenderPeerName, frame.id(), FakeEncode(frame),
VideoQualityAnalyzerInterface::EncoderStats());
}
for (size_t i = 1; i < frames_order.size(); i += 2) {
uint16_t frame_id = frames_order.at(i);
VideoFrame received_frame = DeepCopy(captured_frames.at(frame_id));
analyzer.OnFramePreDecode(kReceiverPeerName, received_frame.id(),
FakeEncode(received_frame));
analyzer.OnFrameDecoded(kReceiverPeerName, received_frame,
VideoQualityAnalyzerInterface::DecoderStats());
analyzer.OnFrameRendered(kReceiverPeerName, received_frame);
}
// Give analyzer some time to process frames on async thread. The computations
// have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it
// means we have an issue!
SleepMs(100);
analyzer.Stop();
captured_frames.clear();
frames_order.clear();
for (int i = 0; i < kMaxFramesInFlightPerStream; ++i) {
VideoFrame frame = NextFrame(frame_generator.get(), i);
ASSERT_FALSE(analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, frame)
.has_value());
frame.set_id(i + 1);
frames_order.push_back(frame.id());
captured_frames.insert({frame.id(), frame});
analyzer.OnFramePreEncode(kSenderPeerName, frame);
analyzer.OnFrameEncoded(kSenderPeerName, frame.id(), FakeEncode(frame),
VideoQualityAnalyzerInterface::EncoderStats());
}
for (size_t i = 1; i < frames_order.size(); i += 2) {
uint16_t frame_id = frames_order.at(i);
VideoFrame received_frame = DeepCopy(captured_frames.at(frame_id));
analyzer.OnFramePreDecode(kReceiverPeerName, received_frame.id(),
FakeEncode(received_frame));
analyzer.OnFrameDecoded(kReceiverPeerName, received_frame,
VideoQualityAnalyzerInterface::DecoderStats());
analyzer.OnFrameRendered(kReceiverPeerName, received_frame);
}
SleepMs(100);
// We expect the results to be same as in NormalScenario. All frames after
// Stop should be ignored.
AnalyzerStats stats = analyzer.GetAnalyzerStats();
EXPECT_EQ(stats.memory_overloaded_comparisons_done, 0);
EXPECT_EQ(stats.comparisons_done, kMaxFramesInFlightPerStream);
std::vector<StatsSample> frames_in_flight_sizes =
GetSortedSamples(stats.frames_in_flight_left_count);
EXPECT_EQ(frames_in_flight_sizes.back().value, 0)
<< ToString(frames_in_flight_sizes);
FrameCounters frame_counters = analyzer.GetGlobalCounters();
EXPECT_EQ(frame_counters.captured, kMaxFramesInFlightPerStream);
EXPECT_EQ(frame_counters.received, kMaxFramesInFlightPerStream / 2);
EXPECT_EQ(frame_counters.decoded, kMaxFramesInFlightPerStream / 2);
EXPECT_EQ(frame_counters.rendered, kMaxFramesInFlightPerStream / 2);
EXPECT_EQ(frame_counters.dropped, kMaxFramesInFlightPerStream / 2);
}
TEST(DefaultVideoQualityAnalyzerTest, HeavyQualityMetricsFromEqualFrames) {
std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
@ -584,7 +671,7 @@ TEST(DefaultVideoQualityAnalyzerTest, HeavyQualityMetricsFromEqualFrames) {
for (int i = 0; i < kMaxFramesInFlightPerStream; ++i) {
VideoFrame frame = NextFrame(frame_generator.get(), i);
frame.set_id(
analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, frame));
analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, frame).value());
analyzer.OnFramePreEncode(kSenderPeerName, frame);
analyzer.OnFrameEncoded(kSenderPeerName, frame.id(), FakeEncode(frame),
VideoQualityAnalyzerInterface::EncoderStats());
@ -643,7 +730,7 @@ TEST(DefaultVideoQualityAnalyzerTest,
for (int i = 0; i < kMaxFramesInFlightPerStream; ++i) {
VideoFrame frame = NextFrame(frame_generator.get(), i);
frame.set_id(
analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, frame));
analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, frame).value());
analyzer.OnFramePreEncode(kSenderPeerName, frame);
analyzer.OnFrameEncoded(kSenderPeerName, frame.id(), FakeEncode(frame),
VideoQualityAnalyzerInterface::EncoderStats());

View File

@ -10,6 +10,7 @@
#include "test/pc/e2e/analyzer/video/example_video_quality_analyzer.h"
#include "absl/types/optional.h"
#include "api/array_view.h"
#include "rtc_base/logging.h"
@ -24,7 +25,7 @@ void ExampleVideoQualityAnalyzer::Start(
rtc::ArrayView<const std::string> peer_names,
int max_threads_count) {}
uint16_t ExampleVideoQualityAnalyzer::OnFrameCaptured(
absl::optional<uint16_t> ExampleVideoQualityAnalyzer::OnFrameCaptured(
absl::string_view peer_name,
const std::string& stream_label,
const webrtc::VideoFrame& frame) {

View File

@ -16,6 +16,7 @@
#include <set>
#include <string>
#include "absl/types/optional.h"
#include "api/array_view.h"
#include "api/test/video_quality_analyzer_interface.h"
#include "api/video/encoded_image.h"
@ -37,9 +38,9 @@ class ExampleVideoQualityAnalyzer : public VideoQualityAnalyzerInterface {
void Start(std::string test_case_name,
rtc::ArrayView<const std::string> peer_names,
int max_threads_count) override;
uint16_t OnFrameCaptured(absl::string_view peer_name,
const std::string& stream_label,
const VideoFrame& frame) override;
absl::optional<uint16_t> OnFrameCaptured(absl::string_view peer_name,
const std::string& stream_label,
const VideoFrame& frame) override;
void OnFramePreEncode(absl::string_view peer_name,
const VideoFrame& frame) override;
void OnFrameEncoded(absl::string_view peer_name,

View File

@ -15,6 +15,7 @@
#include "absl/memory/memory.h"
#include "absl/strings/string_view.h"
#include "absl/types/optional.h"
#include "api/array_view.h"
#include "test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.h"
#include "test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.h"
@ -58,9 +59,11 @@ class AnalyzingFramePreprocessor
VideoFrame Preprocess(const VideoFrame& source_frame) override {
// Copy VideoFrame to be able to set id on it.
VideoFrame frame = source_frame;
uint16_t frame_id =
absl::optional<uint16_t> frame_id =
analyzer_->OnFrameCaptured(peer_name_, stream_label_, frame);
frame.set_id(frame_id);
if (frame_id.has_value()) {
frame.set_id(frame_id.value());
}
for (auto& sink : sinks_) {
sink->OnFrame(frame);