Add histogram for percentage of sent frames that are limited in resolution due to quality:

- "WebRTC.Video.QualityLimitedResolutionInPercent"

and if a frame is downscaled, the average number of times the frame is downscaled:
- "WebRTC.Video.QualityLimitedResolutionDownscales"

BUG=

Review URL: https://codereview.webrtc.org/1325153009

Cr-Commit-Position: refs/heads/master@{#10319}
This commit is contained in:
asapersson
2015-10-19 00:35:21 -07:00
committed by Commit bot
parent a20de2030f
commit 4306fc70d7
6 changed files with 48 additions and 9 deletions

View File

@ -148,7 +148,8 @@ VP8EncoderImpl::VP8EncoderImpl()
down_scale_bitrate_(0),
tl0_frame_dropper_(),
tl1_frame_dropper_(kTl1MaxTimeToDropFrames),
key_frame_request_(kMaxSimulcastStreams, false) {
key_frame_request_(kMaxSimulcastStreams, false),
quality_scaler_enabled_(false) {
uint32_t seed = static_cast<uint32_t>(TickTime::MillisecondTimestamp());
srand(seed);
@ -586,6 +587,12 @@ int VP8EncoderImpl::InitEncode(const VideoCodec* inst,
kDisabledBadQpThreshold, false);
quality_scaler_.ReportFramerate(codec_.maxFramerate);
// Only apply scaling to improve for single-layer streams. The scaling metrics
// use frame drops as a signal and is only applicable when we drop frames.
quality_scaler_enabled_ = encoders_.size() == 1 &&
configurations_[0].rc_dropframe_thresh > 0 &&
codec_.codecSpecific.VP8.automaticResizeOn;
return InitAndSetControlSettings();
}
@ -709,17 +716,12 @@ int VP8EncoderImpl::Encode(const VideoFrame& frame,
if (encoded_complete_callback_ == NULL)
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
// Only apply scaling to improve for single-layer streams. The scaling metrics
// use frame drops as a signal and is only applicable when we drop frames.
const bool use_quality_scaler = encoders_.size() == 1 &&
configurations_[0].rc_dropframe_thresh > 0 &&
codec_.codecSpecific.VP8.automaticResizeOn;
if (use_quality_scaler)
if (quality_scaler_enabled_)
quality_scaler_.OnEncodeFrame(frame);
const VideoFrame& input_image =
use_quality_scaler ? quality_scaler_.GetScaledFrame(frame) : frame;
quality_scaler_enabled_ ? quality_scaler_.GetScaledFrame(frame) : frame;
if (use_quality_scaler && (input_image.width() != codec_.width ||
if (quality_scaler_enabled_ && (input_image.width() != codec_.width ||
input_image.height() != codec_.height)) {
int ret = UpdateCodecFrameSize(input_image);
if (ret < 0)
@ -1013,6 +1015,9 @@ int VP8EncoderImpl::GetEncodedPartitions(const VideoFrame& input_image,
codec_.simulcastStream[stream_idx].height;
encoded_images_[encoder_idx]._encodedWidth =
codec_.simulcastStream[stream_idx].width;
encoded_images_[encoder_idx]
.adapt_reason_.quality_resolution_downscales =
quality_scaler_enabled_ ? quality_scaler_.downscale_shift() : -1;
encoded_complete_callback_->Encoded(encoded_images_[encoder_idx],
&codec_specific, &frag_info);
} else if (codec_.mode == kScreensharing) {

View File

@ -114,6 +114,7 @@ class VP8EncoderImpl : public VP8Encoder {
std::vector<vpx_codec_enc_cfg_t> configurations_;
std::vector<vpx_rational_t> downsampling_factors_;
QualityScaler quality_scaler_;
bool quality_scaler_enabled_;
}; // end of VP8EncoderImpl class
class VP8DecoderImpl : public VP8Decoder {

View File

@ -37,6 +37,7 @@ class QualityScaler {
Resolution GetScaledResolution() const;
const VideoFrame& GetScaledFrame(const VideoFrame& frame);
int GetTargetFramerate() const;
int downscale_shift() const { return downscale_shift_; }
private:
void AdjustScale(bool up);

View File

@ -70,6 +70,17 @@ void SendStatisticsProxy::UpdateHistograms() {
RTC_HISTOGRAM_COUNTS_1000("WebRTC.Video.KeyFramesSentInPermille",
key_frames_permille);
}
int quality_limited =
quality_limited_frame_counter_.Percent(kMinRequiredSamples);
if (quality_limited != -1) {
RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.QualityLimitedResolutionInPercent",
quality_limited);
}
int downscales = quality_downscales_counter_.Avg(kMinRequiredSamples);
if (downscales != -1) {
RTC_HISTOGRAM_ENUMERATION("WebRTC.Video.QualityLimitedResolutionDownscales",
downscales, 20);
}
}
void SendStatisticsProxy::OnOutgoingRate(uint32_t framerate, uint32_t bitrate) {
@ -170,6 +181,16 @@ void SendStatisticsProxy::OnSendEncodedImage(
key_frame_counter_.Add(encoded_image._frameType == kKeyFrame);
if (encoded_image.adapt_reason_.quality_resolution_downscales != -1) {
bool downscaled =
encoded_image.adapt_reason_.quality_resolution_downscales > 0;
quality_limited_frame_counter_.Add(downscaled);
if (downscaled) {
quality_downscales_counter_.Add(
encoded_image.adapt_reason_.quality_resolution_downscales);
}
}
// TODO(asapersson): This is incorrect if simulcast layers are encoded on
// different threads and there is no guarantee that one frame of all layers
// are encoded before the next start.

View File

@ -134,6 +134,8 @@ class SendStatisticsProxy : public CpuOveruseMetricsObserver,
SampleCounter sent_height_counter_ GUARDED_BY(crit_);
SampleCounter encode_time_counter_ GUARDED_BY(crit_);
BoolSampleCounter key_frame_counter_ GUARDED_BY(crit_);
BoolSampleCounter quality_limited_frame_counter_ GUARDED_BY(crit_);
SampleCounter quality_downscales_counter_ GUARDED_BY(crit_);
};
} // namespace webrtc

View File

@ -178,6 +178,14 @@ class EncodedImage {
EncodedImage(uint8_t* buffer, size_t length, size_t size)
: _buffer(buffer), _length(length), _size(size) {}
struct AdaptReason {
AdaptReason()
: quality_resolution_downscales(-1) {}
int quality_resolution_downscales; // Number of times this frame is down
// scaled in resolution due to quality.
// Or -1 if information is not provided.
};
uint32_t _encodedWidth = 0;
uint32_t _encodedHeight = 0;
uint32_t _timeStamp = 0;
@ -190,6 +198,7 @@ class EncodedImage {
size_t _length;
size_t _size;
bool _completeFrame = false;
AdaptReason adapt_reason_;
};
} // namespace webrtc