Revert "googBandwidthLimitedResolution stat is not always set depending on configuration."

This reverts commit 59283e4c66d038a00923736685457f4b53f922fe.

Reason for revert: This CL is preventing rolls into Chromium because it fails to compile with MSVC.

Sample error log:

[13258/43857] CXX obj/third_party/webrtc/video/video/send_statistics_proxy.obj
FAILED: obj/third_party/webrtc/video/video/send_statistics_proxy.obj 
ninja -t msvc -e environment.x64 -- E:\b\c\goma_client/gomacc.exe "e:\b\c\win_toolchain\vs_files\a9e1098bba66d2acccc377d5ee81265910f29272\vc\tools\msvc\14.11.25503\bin\hostx64\x64/cl.exe" /nologo /showIncludes  @obj/third_party/webrtc/video/video/send_statistics_proxy.obj.rsp /c ../../third_party/webrtc/video/send_statistics_proxy.cc /Foobj/third_party/webrtc/video/video/send_statistics_proxy.obj /Fd"obj/third_party/webrtc/video/video_cc.pdb"
../../third_party/webrtc/video/send_statistics_proxy.cc(217): error C2220: warning treated as error - no 'object' file generated
../../third_party/webrtc/video/send_statistics_proxy.cc(217): warning C4267: 'initializing': conversion from 'size_t' to 'int', possible loss of data
../../third_party/webrtc/video/send_statistics_proxy.cc(632): warning C4267: '=': conversion from 'size_t' to 'uint32_t', possible loss of data


Original change's description:
> googBandwidthLimitedResolution stat is not always set depending on configuration.
> 
> Currently |bw_resolutions_disabled| is set per VP8EncoderImpl instance and reported via
> OnEncodedImage callback.
> 
> Instead move logic to SendStatisticsProxy to determine if resolution is bw limited or not based
> on info that is reported to SendStatisticsProxy::OnEncodedImage.
> 
> Bug: webrtc:8643
> Change-Id: I6c148e3507a0f04a793775b9f84ce54028b64d0f
> Reviewed-on: https://webrtc-review.googlesource.com/31460
> Reviewed-by: Rasmus Brandt <brandtr@webrtc.org>
> Reviewed-by: Stefan Holmer <stefan@webrtc.org>
> Commit-Queue: Åsa Persson <asapersson@webrtc.org>
> Cr-Commit-Position: refs/heads/master@{#21249}

TBR=brandtr@webrtc.org,asapersson@webrtc.org,stefan@webrtc.org

# Not skipping CQ checks because original CL landed > 1 day ago.

Bug: webrtc:8643
Change-Id: Ib9ef55b8894ea72236a5dc1e9a839adecd401afb
Reviewed-on: https://webrtc-review.googlesource.com/33100
Reviewed-by: Guido Urdaneta <guidou@webrtc.org>
Commit-Queue: Guido Urdaneta <guidou@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#21284}
This commit is contained in:
Guido Urdaneta
2017-12-14 17:01:38 +00:00
committed by Commit Bot
parent 7284e8a650
commit 62e9ebe589
7 changed files with 87 additions and 208 deletions

View File

@ -35,6 +35,14 @@ class EncodedImage {
void SetEncodeTime(int64_t encode_start_ms, int64_t encode_finish_ms);
// TODO(kthelgason): get rid of this struct as it only has a single member
// remaining.
struct AdaptReason {
AdaptReason() : bw_resolutions_disabled(-1) {}
int bw_resolutions_disabled; // Number of resolutions that are not sent
// due to bandwidth for this frame.
// Or -1 if information is not provided.
};
uint32_t _encodedWidth = 0;
uint32_t _encodedHeight = 0;
uint32_t _timeStamp = 0;
@ -48,6 +56,7 @@ class EncodedImage {
VideoRotation rotation_ = kVideoRotation_0;
VideoContentType content_type_ = VideoContentType::UNSPECIFIED;
bool _completeFrame = false;
AdaptReason adapt_reason_;
int qp_ = -1; // Quantizer value.
// When an application indicates non-zero values here, it is taken as an

View File

@ -254,6 +254,8 @@ TEST_F(TestVp8Impl, OnEncodedImageReportsInfo) {
EXPECT_EQ(kWidth, static_cast<int>(encoded_cb_.encoded_frame_._encodedWidth));
EXPECT_EQ(kHeight,
static_cast<int>(encoded_cb_.encoded_frame_._encodedHeight));
EXPECT_EQ(-1, // Disabled for single stream.
encoded_cb_.encoded_frame_.adapt_reason_.bw_resolutions_disabled);
}
// We only test the encoder here, since the decoded frame rotation is set based

View File

@ -115,6 +115,15 @@ bool ValidSimulcastTemporalLayers(const VideoCodec& codec, int num_streams) {
return true;
}
int NumStreamsDisabled(const std::vector<bool>& streams) {
int num_disabled = 0;
for (bool stream : streams) {
if (!stream)
++num_disabled;
}
return num_disabled;
}
bool GetGfBoostPercentageFromFieldTrialGroup(int* boost_percentage) {
std::string group = webrtc::field_trial::FindFullName(kVp8GfBoostFieldTrial);
if (group.empty())
@ -865,6 +874,9 @@ void VP8EncoderImpl::PopulateCodecSpecific(
int VP8EncoderImpl::GetEncodedPartitions(
const TemporalLayers::FrameConfig tl_configs[],
const VideoFrame& input_image) {
int bw_resolutions_disabled =
(encoders_.size() > 1) ? NumStreamsDisabled(send_stream_) : -1;
int stream_idx = static_cast<int>(encoders_.size()) - 1;
int result = WEBRTC_VIDEO_CODEC_OK;
for (size_t encoder_idx = 0; encoder_idx < encoders_.size();
@ -937,6 +949,9 @@ int VP8EncoderImpl::GetEncodedPartitions(
codec_.simulcastStream[stream_idx].height;
encoded_images_[encoder_idx]._encodedWidth =
codec_.simulcastStream[stream_idx].width;
// Report once per frame (lowest stream always sent).
encoded_images_[encoder_idx].adapt_reason_.bw_resolutions_disabled =
(stream_idx == 0) ? bw_resolutions_disabled : -1;
int qp_128 = -1;
vpx_codec_control(&encoders_[encoder_idx], VP8E_GET_LAST_QUANTIZER,
&qp_128);

View File

@ -12,7 +12,8 @@
#include <algorithm>
#include <cmath>
#include <utility>
#include <map>
#include <vector>
#include "common_types.h" // NOLINT(build/include)
#include "modules/video_coding/include/video_codec_interface.h"
@ -169,9 +170,7 @@ SendStatisticsProxy::UmaSamplesContainer::UmaSamplesContainer(
fec_byte_counter_(clock, nullptr, true),
first_rtcp_stats_time_ms_(-1),
first_rtp_stats_time_ms_(-1),
start_stats_(stats),
num_streams_(0),
num_pixels_highest_stream_(0) {
start_stats_(stats) {
InitializeBitrateCounters(stats);
}
@ -198,9 +197,7 @@ void SendStatisticsProxy::UmaSamplesContainer::InitializeBitrateCounters(
}
}
void SendStatisticsProxy::UmaSamplesContainer::RemoveOld(
int64_t now_ms,
bool* is_limited_in_resolution) {
void SendStatisticsProxy::UmaSamplesContainer::RemoveOld(int64_t now_ms) {
while (!encoded_frames_.empty()) {
auto it = encoded_frames_.begin();
if (now_ms - it->second.send_ms < kMaxEncodedFrameWindowMs)
@ -209,33 +206,14 @@ void SendStatisticsProxy::UmaSamplesContainer::RemoveOld(
// Use max per timestamp.
sent_width_counter_.Add(it->second.max_width);
sent_height_counter_.Add(it->second.max_height);
// Check number of encoded streams per timestamp.
if (num_streams_ > it->second.max_simulcast_idx) {
*is_limited_in_resolution = false;
if (num_streams_ > 1) {
int disabled_streams = num_streams_ - 1 - it->second.max_simulcast_idx;
// Can be limited in resolution or framerate.
uint32_t pixels = it->second.max_width * it->second.max_height;
bool bw_limited_resolution =
disabled_streams > 0 && pixels < num_pixels_highest_stream_;
bw_limited_frame_counter_.Add(bw_limited_resolution);
if (bw_limited_resolution) {
bw_resolutions_disabled_counter_.Add(disabled_streams);
*is_limited_in_resolution = true;
}
}
}
encoded_frames_.erase(it);
}
}
bool SendStatisticsProxy::UmaSamplesContainer::InsertEncodedFrame(
const EncodedImage& encoded_frame,
size_t simulcast_idx,
bool* is_limited_in_resolution) {
const EncodedImage& encoded_frame) {
int64_t now_ms = clock_->TimeInMilliseconds();
RemoveOld(now_ms, is_limited_in_resolution);
RemoveOld(now_ms);
if (encoded_frames_.size() > kMaxEncodedFrameMapSize) {
encoded_frames_.clear();
}
@ -243,10 +221,9 @@ bool SendStatisticsProxy::UmaSamplesContainer::InsertEncodedFrame(
auto it = encoded_frames_.find(encoded_frame._timeStamp);
if (it == encoded_frames_.end()) {
// First frame with this timestamp.
encoded_frames_.insert(
std::make_pair(encoded_frame._timeStamp,
Frame(now_ms, encoded_frame._encodedWidth,
encoded_frame._encodedHeight, simulcast_idx)));
encoded_frames_.insert(std::make_pair(
encoded_frame._timeStamp, Frame(now_ms, encoded_frame._encodedWidth,
encoded_frame._encodedHeight)));
sent_fps_counter_.Add(1);
return true;
}
@ -255,8 +232,6 @@ bool SendStatisticsProxy::UmaSamplesContainer::InsertEncodedFrame(
std::max(it->second.max_width, encoded_frame._encodedWidth);
it->second.max_height =
std::max(it->second.max_height, encoded_frame._encodedHeight);
it->second.max_simulcast_idx =
std::max(it->second.max_simulcast_idx, simulcast_idx);
return false;
}
@ -615,7 +590,6 @@ void SendStatisticsProxy::UmaSamplesContainer::UpdateHistograms(
void SendStatisticsProxy::OnEncoderReconfigured(
const VideoEncoderConfig& config,
const std::vector<VideoStream>& streams,
uint32_t preferred_bitrate_bps) {
rtc::CritScope lock(&crit_);
stats_.preferred_media_bitrate_bps = preferred_bitrate_bps;
@ -626,10 +600,6 @@ void SendStatisticsProxy::OnEncoderReconfigured(
GetUmaPrefix(config.content_type), stats_, clock_));
content_type_ = config.content_type;
}
uma_container_->encoded_frames_.clear();
uma_container_->num_streams_ = streams.size();
uma_container_->num_pixels_highest_stream_ =
streams.empty() ? 0 : (streams.back().width * streams.back().height);
}
void SendStatisticsProxy::OnEncodedFrameTimeMeasured(
@ -878,6 +848,23 @@ void SendStatisticsProxy::OnSendEncodedImage(
uma_container_->key_frame_counter_.Add(encoded_image._frameType ==
kVideoFrameKey);
stats_.bw_limited_resolution =
encoded_image.adapt_reason_.bw_resolutions_disabled > 0 ||
quality_downscales_ > 0;
if (quality_downscales_ != -1) {
uma_container_->quality_limited_frame_counter_.Add(quality_downscales_ > 0);
if (quality_downscales_ > 0)
uma_container_->quality_downscales_counter_.Add(quality_downscales_);
}
if (encoded_image.adapt_reason_.bw_resolutions_disabled != -1) {
bool bw_limited = encoded_image.adapt_reason_.bw_resolutions_disabled > 0;
uma_container_->bw_limited_frame_counter_.Add(bw_limited);
if (bw_limited) {
uma_container_->bw_resolutions_disabled_counter_.Add(
encoded_image.adapt_reason_.bw_resolutions_disabled);
}
}
if (encoded_image.qp_ != -1) {
if (!stats_.qp_sum)
@ -904,23 +891,8 @@ void SendStatisticsProxy::OnSendEncodedImage(
}
media_byte_rate_tracker_.AddSamples(encoded_image._length);
// Initialize to current since |is_limited_in_resolution| is only updated
// when an encoded frame is removed from the EncodedFrameMap.
bool is_limited_in_resolution = stats_.bw_limited_resolution;
if (uma_container_->InsertEncodedFrame(encoded_image, simulcast_idx,
&is_limited_in_resolution)) {
if (uma_container_->InsertEncodedFrame(encoded_image))
encoded_frame_rate_tracker_.AddSamples(1);
}
stats_.bw_limited_resolution =
is_limited_in_resolution || quality_downscales_ > 0;
if (quality_downscales_ != -1) {
uma_container_->quality_limited_frame_counter_.Add(quality_downscales_ > 0);
if (quality_downscales_ > 0)
uma_container_->quality_downscales_counter_.Add(quality_downscales_);
}
}
int SendStatisticsProxy::GetSendFrameRate() const {

View File

@ -81,7 +81,6 @@ class SendStatisticsProxy : public CpuOveruseMetricsObserver,
// Used to indicate change in content type, which may require a change in
// how stats are collected and set the configured preferred media bitrate.
void OnEncoderReconfigured(const VideoEncoderConfig& encoder_config,
const std::vector<VideoStream>& streams,
uint32_t preferred_bitrate_bps);
// Used to update the encoder target rate.
@ -191,19 +190,12 @@ class SendStatisticsProxy : public CpuOveruseMetricsObserver,
}
};
struct Frame {
Frame(int64_t send_ms,
uint32_t width,
uint32_t height,
size_t simulcast_idx)
: send_ms(send_ms),
max_width(width),
max_height(height),
max_simulcast_idx(simulcast_idx) {}
Frame(int64_t send_ms, uint32_t width, uint32_t height)
: send_ms(send_ms), max_width(width), max_height(height) {}
const int64_t
send_ms; // Time when first frame with this timestamp is sent.
uint32_t max_width; // Max width with this timestamp.
uint32_t max_height; // Max height with this timestamp.
size_t max_simulcast_idx; // Max simulcast index with this timestamp.
};
typedef std::map<uint32_t, Frame, TimestampOlderThan> EncodedFrameMap;
@ -255,10 +247,8 @@ class SendStatisticsProxy : public CpuOveruseMetricsObserver,
void InitializeBitrateCounters(const VideoSendStream::Stats& stats);
bool InsertEncodedFrame(const EncodedImage& encoded_frame,
size_t simulcast_idx,
bool* is_limited_in_resolution);
void RemoveOld(int64_t now_ms, bool* is_limited_in_resolution);
bool InsertEncodedFrame(const EncodedImage& encoded_frame);
void RemoveOld(int64_t now_ms);
const std::string uma_prefix_;
Clock* const clock_;
@ -295,8 +285,6 @@ class SendStatisticsProxy : public CpuOveruseMetricsObserver,
FallbackEncoderInfoDisabled fallback_info_disabled_;
ReportBlockStats report_block_stats_;
const VideoSendStream::Stats start_stats_;
size_t num_streams_; // Number of configured streams to encoder.
uint32_t num_pixels_highest_stream_;
EncodedFrameMap encoded_frames_;
std::map<int, QpCounters>

View File

@ -28,12 +28,10 @@ const uint32_t kFirstRtxSsrc = 18;
const uint32_t kSecondRtxSsrc = 43;
const uint32_t kFlexFecSsrc = 55;
const int kFpsPeriodicIntervalMs = 2000;
const int kPreferredBps = 50000;
const int kWidth = 640;
const int kHeight = 480;
const int kQpIdx0 = 21;
const int kQpIdx1 = 39;
const int kRtpClockRateHz = 90000;
const CodecSpecificInfo kDefaultCodecInfo = []() {
CodecSpecificInfo codec_info;
codec_info.codecType = kVideoCodecVP8;
@ -324,11 +322,12 @@ TEST_F(SendStatisticsProxyTest, OnEncodedFrameTimeMeasured) {
TEST_F(SendStatisticsProxyTest, OnEncoderReconfiguredChangePreferredBitrate) {
VideoSendStream::Stats stats = statistics_proxy_->GetStats();
EXPECT_EQ(0, stats.preferred_media_bitrate_bps);
const int kPreferredMediaBitrateBps = 50;
VideoEncoderConfig config;
statistics_proxy_->OnEncoderReconfigured(config, {}, kPreferredBps);
statistics_proxy_->OnEncoderReconfigured(config, kPreferredMediaBitrateBps);
stats = statistics_proxy_->GetStats();
EXPECT_EQ(kPreferredBps, stats.preferred_media_bitrate_bps);
EXPECT_EQ(kPreferredMediaBitrateBps, stats.preferred_media_bitrate_bps);
}
TEST_F(SendStatisticsProxyTest, OnSendEncodedImageIncreasesFramesEncoded) {
@ -754,7 +753,7 @@ TEST_F(SendStatisticsProxyTest, AdaptChangesReportedAfterContentSwitch) {
// Switch content type, real-time stats should be updated.
VideoEncoderConfig config;
config.content_type = VideoEncoderConfig::ContentType::kScreen;
statistics_proxy_->OnEncoderReconfigured(config, {}, kPreferredBps);
statistics_proxy_->OnEncoderReconfigured(config, 50);
EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.AdaptChangesPerMinute.Cpu"));
EXPECT_EQ(1, metrics::NumEvents("WebRTC.Video.AdaptChangesPerMinute.Cpu", 8));
EXPECT_EQ(0,
@ -787,12 +786,12 @@ TEST_F(SendStatisticsProxyTest, SwitchContentTypeUpdatesHistograms) {
// No switch, stats should not be updated.
VideoEncoderConfig config;
config.content_type = VideoEncoderConfig::ContentType::kRealtimeVideo;
statistics_proxy_->OnEncoderReconfigured(config, {}, kPreferredBps);
statistics_proxy_->OnEncoderReconfigured(config, 50);
EXPECT_EQ(0, metrics::NumSamples("WebRTC.Video.InputWidthInPixels"));
// Switch to screenshare, real-time stats should be updated.
config.content_type = VideoEncoderConfig::ContentType::kScreen;
statistics_proxy_->OnEncoderReconfigured(config, {}, kPreferredBps);
statistics_proxy_->OnEncoderReconfigured(config, 50);
EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.InputWidthInPixels"));
}
@ -1180,30 +1179,11 @@ TEST_F(SendStatisticsProxyTest, VerifyQpHistogramStats_H264) {
}
TEST_F(SendStatisticsProxyTest,
BandwidthLimitedHistogramsNotUpdatedForOneStream) {
// Configure one stream.
VideoEncoderConfig config;
config.content_type = VideoEncoderConfig::ContentType::kRealtimeVideo;
VideoStream stream1;
stream1.width = kWidth;
stream1.height = kHeight;
statistics_proxy_->OnEncoderReconfigured(config, {stream1}, kPreferredBps);
const int64_t kMaxEncodedFrameWindowMs = 800;
const int kFps = 20;
const int kNumFramesPerWindow = kFps * kMaxEncodedFrameWindowMs / 1000;
const int kMinSamples = // Sample added when removed from EncodedFrameMap.
SendStatisticsProxy::kMinRequiredMetricsSamples + kNumFramesPerWindow;
// Stream encoded.
BandwidthLimitedHistogramsNotUpdatedWhenDisabled) {
EncodedImage encoded_image;
encoded_image._encodedWidth = kWidth;
encoded_image._encodedHeight = kHeight;
for (int i = 0; i < kMinSamples; ++i) {
fake_clock_.AdvanceTimeMilliseconds(1000 / kFps);
encoded_image._timeStamp += (kRtpClockRateHz / kFps);
// encoded_image.adapt_reason_.bw_resolutions_disabled by default: -1
for (int i = 0; i < SendStatisticsProxy::kMinRequiredMetricsSamples; ++i)
statistics_proxy_->OnSendEncodedImage(encoded_image, nullptr);
}
// Histograms are updated when the statistics_proxy_ is deleted.
statistics_proxy_.reset();
@ -1214,37 +1194,12 @@ TEST_F(SendStatisticsProxyTest,
}
TEST_F(SendStatisticsProxyTest,
BandwidthLimitedHistogramsUpdatedForTwoStreams_NoResolutionDisabled) {
// Configure two streams.
VideoEncoderConfig config;
config.content_type = VideoEncoderConfig::ContentType::kRealtimeVideo;
VideoStream stream1;
stream1.width = kWidth / 2;
stream1.height = kHeight / 2;
VideoStream stream2;
stream2.width = kWidth;
stream2.height = kHeight;
statistics_proxy_->OnEncoderReconfigured(config, {stream1, stream2},
kPreferredBps);
const int64_t kMaxEncodedFrameWindowMs = 800;
const int kFps = 20;
const int kNumFramesPerWindow = kFps * kMaxEncodedFrameWindowMs / 1000;
const int kMinSamples = // Sample added when removed from EncodedFrameMap.
SendStatisticsProxy::kMinRequiredMetricsSamples + kNumFramesPerWindow;
// Two streams encoded.
BandwidthLimitedHistogramsUpdatedWhenEnabled_NoResolutionDisabled) {
const int kResolutionsDisabled = 0;
EncodedImage encoded_image;
for (int i = 0; i < kMinSamples; ++i) {
fake_clock_.AdvanceTimeMilliseconds(1000 / kFps);
encoded_image._timeStamp += (kRtpClockRateHz / kFps);
encoded_image._encodedWidth = kWidth;
encoded_image._encodedHeight = kHeight;
encoded_image.adapt_reason_.bw_resolutions_disabled = kResolutionsDisabled;
for (int i = 0; i < SendStatisticsProxy::kMinRequiredMetricsSamples; ++i)
statistics_proxy_->OnSendEncodedImage(encoded_image, nullptr);
encoded_image._encodedWidth = kWidth / 2;
encoded_image._encodedHeight = kHeight / 2;
statistics_proxy_->OnSendEncodedImage(encoded_image, nullptr);
}
// Histograms are updated when the statistics_proxy_ is deleted.
statistics_proxy_.reset();
@ -1258,34 +1213,12 @@ TEST_F(SendStatisticsProxyTest,
}
TEST_F(SendStatisticsProxyTest,
BandwidthLimitedHistogramsUpdatedForTwoStreams_OneResolutionDisabled) {
// Configure two streams.
VideoEncoderConfig config;
config.content_type = VideoEncoderConfig::ContentType::kRealtimeVideo;
VideoStream stream1;
stream1.width = kWidth / 2;
stream1.height = kHeight / 2;
VideoStream stream2;
stream2.width = kWidth;
stream2.height = kHeight;
statistics_proxy_->OnEncoderReconfigured(config, {stream1, stream2},
kPreferredBps);
const int64_t kMaxEncodedFrameWindowMs = 800;
const int kFps = 20;
const int kNumFramesPerWindow = kFps * kMaxEncodedFrameWindowMs / 1000;
const int kMinSamples = // Sample added when removed from EncodedFrameMap.
SendStatisticsProxy::kMinRequiredMetricsSamples + kNumFramesPerWindow;
// One stream encoded.
BandwidthLimitedHistogramsUpdatedWhenEnabled_OneResolutionDisabled) {
const int kResolutionsDisabled = 1;
EncodedImage encoded_image;
encoded_image._encodedWidth = kWidth / 2;
encoded_image._encodedHeight = kHeight / 2;
for (int i = 0; i < kMinSamples; ++i) {
fake_clock_.AdvanceTimeMilliseconds(1000 / kFps);
encoded_image._timeStamp += (kRtpClockRateHz / kFps);
encoded_image.adapt_reason_.bw_resolutions_disabled = kResolutionsDisabled;
for (int i = 0; i < SendStatisticsProxy::kMinRequiredMetricsSamples; ++i)
statistics_proxy_->OnSendEncodedImage(encoded_image, nullptr);
}
// Histograms are updated when the statistics_proxy_ is deleted.
statistics_proxy_.reset();
@ -1293,11 +1226,12 @@ TEST_F(SendStatisticsProxyTest,
"WebRTC.Video.BandwidthLimitedResolutionInPercent"));
EXPECT_EQ(1, metrics::NumEvents(
"WebRTC.Video.BandwidthLimitedResolutionInPercent", 100));
// One resolution disabled.
// Resolutions disabled.
EXPECT_EQ(1, metrics::NumSamples(
"WebRTC.Video.BandwidthLimitedResolutionsDisabled"));
EXPECT_EQ(1, metrics::NumEvents(
"WebRTC.Video.BandwidthLimitedResolutionsDisabled", 1));
EXPECT_EQ(
1, metrics::NumEvents("WebRTC.Video.BandwidthLimitedResolutionsDisabled",
kResolutionsDisabled));
}
TEST_F(SendStatisticsProxyTest,
@ -1366,58 +1300,17 @@ TEST_F(SendStatisticsProxyTest,
TEST_F(SendStatisticsProxyTest, GetStatsReportsBandwidthLimitedResolution) {
// Initially false.
EXPECT_FALSE(statistics_proxy_->GetStats().bw_limited_resolution);
// Configure two streams.
VideoEncoderConfig config;
config.content_type = VideoEncoderConfig::ContentType::kRealtimeVideo;
VideoStream stream1;
stream1.width = kWidth / 2;
stream1.height = kHeight / 2;
VideoStream stream2;
stream2.width = kWidth;
stream2.height = kHeight;
statistics_proxy_->OnEncoderReconfigured(config, {stream1, stream2},
kPreferredBps);
const int64_t kMaxEncodedFrameWindowMs = 800;
const int kFps = 20;
const int kMinSamples = // Sample added when removed from EncodedFrameMap.
kFps * kMaxEncodedFrameWindowMs / 1000;
// One stream encoded.
// No resolution scale by default.
EncodedImage encoded_image;
encoded_image._encodedWidth = kWidth / 2;
encoded_image._encodedHeight = kHeight / 2;
for (int i = 0; i < kMinSamples; ++i) {
fake_clock_.AdvanceTimeMilliseconds(1000 / kFps);
encoded_image._timeStamp += (kRtpClockRateHz / kFps);
statistics_proxy_->OnSendEncodedImage(encoded_image, nullptr);
EXPECT_FALSE(statistics_proxy_->GetStats().bw_limited_resolution);
}
statistics_proxy_->OnSendEncodedImage(encoded_image, nullptr);
EXPECT_FALSE(statistics_proxy_->GetStats().bw_limited_resolution);
// First frame removed from EncodedFrameMap, stats updated.
fake_clock_.AdvanceTimeMilliseconds(1000 / kFps);
++encoded_image._timeStamp;
// Simulcast disabled resolutions
encoded_image.adapt_reason_.bw_resolutions_disabled = 1;
statistics_proxy_->OnSendEncodedImage(encoded_image, nullptr);
EXPECT_TRUE(statistics_proxy_->GetStats().bw_limited_resolution);
// Two streams encoded.
for (int i = 0; i < kMinSamples; ++i) {
fake_clock_.AdvanceTimeMilliseconds(1000 / kFps);
encoded_image._timeStamp += (kRtpClockRateHz / kFps);
encoded_image._encodedWidth = kWidth;
encoded_image._encodedHeight = kHeight;
statistics_proxy_->OnSendEncodedImage(encoded_image, nullptr);
EXPECT_TRUE(statistics_proxy_->GetStats().bw_limited_resolution);
encoded_image._encodedWidth = kWidth / 2;
encoded_image._encodedHeight = kHeight / 2;
statistics_proxy_->OnSendEncodedImage(encoded_image, nullptr);
EXPECT_TRUE(statistics_proxy_->GetStats().bw_limited_resolution);
}
// First frame with two streams removed, expect no resolution limit.
fake_clock_.AdvanceTimeMilliseconds(1000 / kFps);
encoded_image._timeStamp += (kRtpClockRateHz / kFps);
encoded_image.adapt_reason_.bw_resolutions_disabled = 0;
statistics_proxy_->OnSendEncodedImage(encoded_image, nullptr);
EXPECT_FALSE(statistics_proxy_->GetStats().bw_limited_resolution);
@ -1580,7 +1473,7 @@ TEST_F(SendStatisticsProxyTest, ResetsRtcpCountersOnContentChange) {
// Changing content type causes histograms to be reported.
VideoEncoderConfig config;
config.content_type = VideoEncoderConfig::ContentType::kScreen;
statistics_proxy_->OnEncoderReconfigured(config, {}, kPreferredBps);
statistics_proxy_->OnEncoderReconfigured(config, 50);
EXPECT_EQ(1,
metrics::NumSamples("WebRTC.Video.NackPacketsReceivedPerMinute"));
@ -1744,7 +1637,7 @@ TEST_F(SendStatisticsProxyTest, ResetsRtpCountersOnContentChange) {
// Changing content type causes histograms to be reported.
VideoEncoderConfig config;
config.content_type = VideoEncoderConfig::ContentType::kScreen;
statistics_proxy_->OnEncoderReconfigured(config, {}, kPreferredBps);
statistics_proxy_->OnEncoderReconfigured(config, 50000);
// Interval: 3500 bytes * 4 / 2 sec = 7000 bytes / sec = 56 kbps
EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.BitrateSentInKbps"));

View File

@ -616,7 +616,7 @@ void VideoStreamEncoder::ReconfigureEncoder() {
if (current_framerate == 0)
current_framerate = codec.maxFramerate;
stats_proxy_->OnEncoderReconfigured(
encoder_config_, streams,
encoder_config_,
rate_allocator_.get()
? rate_allocator_->GetPreferredBitrateBps(current_framerate)
: codec.maxBitrate);