Add fine grained dropped video frames counters on sending side

4 new counters added to SendStatisticsProxy and reported to UMA and logs.

Bug: webrtc:8355
Change-Id: Idf9b8dfc295c92821e058a97cb3894dc6a446082
Reviewed-on: https://webrtc-review.googlesource.com/12260
Commit-Queue: Ilya Nikolaevskiy <ilnik@webrtc.org>
Reviewed-by: Stefan Holmer <stefan@webrtc.org>
Reviewed-by: Erik Språng <sprang@webrtc.org>
Reviewed-by: Taylor Brandstetter <deadbeef@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#20347}
This commit is contained in:
Ilya Nikolaevskiy
2017-10-19 11:36:02 +02:00
committed by Commit Bot
parent a8c08b1063
commit 4b1a363e4c
16 changed files with 174 additions and 28 deletions

View File

@ -56,13 +56,25 @@ class EncodedImageCallback {
bool drop_next_frame = false; bool drop_next_frame = false;
}; };
// Used to signal the encoder about reason a frame is dropped.
// kDroppedByMediaOptimizations - dropped by MediaOptimizations (for rate
// limiting purposes).
// kDroppedByEncoder - dropped by encoder's internal rate limiter.
enum class DropReason : uint8_t {
kDroppedByMediaOptimizations,
kDroppedByEncoder
};
// Callback function which is called when an image has been encoded. // Callback function which is called when an image has been encoded.
virtual Result OnEncodedImage( virtual Result OnEncodedImage(
const EncodedImage& encoded_image, const EncodedImage& encoded_image,
const CodecSpecificInfo* codec_specific_info, const CodecSpecificInfo* codec_specific_info,
const RTPFragmentationHeader* fragmentation) = 0; const RTPFragmentationHeader* fragmentation) = 0;
// Deprecated. TODO(ilnik): Remove this in few weeks.
virtual void OnDroppedFrame() {} virtual void OnDroppedFrame() {}
virtual void OnDroppedFrame(DropReason reason) {}
}; };
class VideoEncoder { class VideoEncoder {

View File

@ -63,6 +63,10 @@ class VideoSendStream {
int avg_encode_time_ms = 0; int avg_encode_time_ms = 0;
int encode_usage_percent = 0; int encode_usage_percent = 0;
uint32_t frames_encoded = 0; uint32_t frames_encoded = 0;
uint32_t frames_dropped_by_capturer = 0;
uint32_t frames_dropped_by_encoder_queue = 0;
uint32_t frames_dropped_by_rate_limiter = 0;
uint32_t frames_dropped_by_encoder = 0;
rtc::Optional<uint64_t> qp_sum; rtc::Optional<uint64_t> qp_sum;
// Bitrate the encoder is currently configured to use due to bandwidth // Bitrate the encoder is currently configured to use due to bandwidth
// limitations. // limitations.

View File

@ -105,6 +105,7 @@ bool AdaptedVideoTrackSource::AdaptFrame(int width,
if (!video_adapter_.AdaptFrameResolution( if (!video_adapter_.AdaptFrameResolution(
width, height, time_us * rtc::kNumNanosecsPerMicrosec, width, height, time_us * rtc::kNumNanosecsPerMicrosec,
crop_width, crop_height, out_width, out_height)) { crop_width, crop_height, out_width, out_height)) {
broadcaster_.OnDiscardedFrame();
// VideoAdapter dropped the frame. // VideoAdapter dropped the frame.
return false; return false;
} }

View File

@ -73,6 +73,12 @@ void VideoBroadcaster::OnFrame(const webrtc::VideoFrame& frame) {
} }
} }
void VideoBroadcaster::OnDiscardedFrame() {
for (auto& sink_pair : sink_pairs()) {
sink_pair.sink->OnDiscardedFrame();
}
}
void VideoBroadcaster::UpdateWants() { void VideoBroadcaster::UpdateWants() {
RTC_DCHECK(thread_checker_.CalledOnValidThread()); RTC_DCHECK(thread_checker_.CalledOnValidThread());

View File

@ -50,6 +50,8 @@ class VideoBroadcaster : public VideoSourceBase,
// to the VideoSinkWants. // to the VideoSinkWants.
void OnFrame(const webrtc::VideoFrame& frame) override; void OnFrame(const webrtc::VideoFrame& frame) override;
void OnDiscardedFrame() override;
protected: protected:
void UpdateWants() RTC_EXCLUSIVE_LOCKS_REQUIRED(sinks_and_wants_lock_); void UpdateWants() RTC_EXCLUSIVE_LOCKS_REQUIRED(sinks_and_wants_lock_);
const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& GetBlackFrameBuffer( const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& GetBlackFrameBuffer(

View File

@ -179,6 +179,7 @@ bool VideoCapturer::AdaptFrame(int width,
width, height, camera_time_us * rtc::kNumNanosecsPerMicrosec, width, height, camera_time_us * rtc::kNumNanosecsPerMicrosec,
crop_width, crop_height, out_width, out_height)) { crop_width, crop_height, out_width, out_height)) {
// VideoAdapter dropped the frame. // VideoAdapter dropped the frame.
broadcaster_.OnDiscardedFrame();
return false; return false;
} }
*crop_x = (width - *crop_width) / 2; *crop_x = (width - *crop_width) / 2;

View File

@ -11,6 +11,7 @@
#ifndef MEDIA_BASE_VIDEOSINKINTERFACE_H_ #ifndef MEDIA_BASE_VIDEOSINKINTERFACE_H_
#define MEDIA_BASE_VIDEOSINKINTERFACE_H_ #define MEDIA_BASE_VIDEOSINKINTERFACE_H_
#include <rtc_base/checks.h>
// TODO(nisse): Consider moving this interface (and possibly // TODO(nisse): Consider moving this interface (and possibly
// VideoSourceInterface too) from media/base to common_video, to // VideoSourceInterface too) from media/base to common_video, to
// reduce dependency cycles. // reduce dependency cycles.
@ -22,6 +23,10 @@ class VideoSinkInterface {
virtual ~VideoSinkInterface() {} virtual ~VideoSinkInterface() {}
virtual void OnFrame(const VideoFrameT& frame) = 0; virtual void OnFrame(const VideoFrameT& frame) = 0;
// Should be called by the source when it discards the frame due to rate
// limiting.
virtual void OnDiscardedFrame(){};
}; };
} // namespace rtc } // namespace rtc

View File

@ -216,8 +216,20 @@ void VCMEncodedFrameCallback::OnEncodeStarted(int64_t capture_time_ms,
rtc::CritScope crit(&timing_params_lock_); rtc::CritScope crit(&timing_params_lock_);
if (timing_frames_info_.size() < simulcast_svc_idx + 1) if (timing_frames_info_.size() < simulcast_svc_idx + 1)
timing_frames_info_.resize(simulcast_svc_idx + 1); timing_frames_info_.resize(simulcast_svc_idx + 1);
timing_frames_info_[simulcast_svc_idx].encode_start_time_ms[capture_time_ms] = RTC_DCHECK(
rtc::TimeMillis(); timing_frames_info_[simulcast_svc_idx].encode_start_list.empty() ||
rtc::TimeDiff(capture_time_ms, timing_frames_info_[simulcast_svc_idx]
.encode_start_list.back()
.capture_time_ms) >= 0);
if (timing_frames_info_[simulcast_svc_idx].encode_start_list.size() ==
kMaxEncodeStartTimeListSize) {
LOG(LS_WARNING) << "Too many frames in the encode_start_list."
" Did encoder stall?";
post_encode_callback_->OnDroppedFrame(DropReason::kDroppedByEncoder);
timing_frames_info_[simulcast_svc_idx].encode_start_list.pop_front();
}
timing_frames_info_[simulcast_svc_idx].encode_start_list.emplace_back(
capture_time_ms, rtc::TimeMillis());
} }
EncodedImageCallback::Result VCMEncodedFrameCallback::OnEncodedImage( EncodedImageCallback::Result VCMEncodedFrameCallback::OnEncodedImage(
@ -249,20 +261,22 @@ EncodedImageCallback::Result VCMEncodedFrameCallback::OnEncodedImage(
// OnFrameRateChanged. |timing_frames_info_| may be not filled here. // OnFrameRateChanged. |timing_frames_info_| may be not filled here.
num_simulcast_svc_streams = timing_frames_info_.size(); num_simulcast_svc_streams = timing_frames_info_.size();
if (simulcast_svc_idx < num_simulcast_svc_streams) { if (simulcast_svc_idx < num_simulcast_svc_streams) {
auto encode_start_map = auto encode_start_list =
&timing_frames_info_[simulcast_svc_idx].encode_start_time_ms; &timing_frames_info_[simulcast_svc_idx].encode_start_list;
auto it = encode_start_map->find(encoded_image.capture_time_ms_); // Skip frames for which there was OnEncodeStarted but no OnEncodedImage
if (it != encode_start_map->end()) { // call. These are dropped by encoder internally.
encode_start_ms.emplace(it->second); while (!encode_start_list->empty() &&
// Assuming all encoders do not reorder frames within single stream, encode_start_list->front().capture_time_ms <
// there may be some dropped frames with smaller timestamps. These encoded_image.capture_time_ms_) {
// should be purged. post_encode_callback_->OnDroppedFrame(DropReason::kDroppedByEncoder);
encode_start_map->erase(encode_start_map->begin(), it); encode_start_list->pop_front();
encode_start_map->erase(it); }
} else { if (encode_start_list->size() > 0 &&
// Encoder is with internal source: free our records of any frames just encode_start_list->front().capture_time_ms ==
// in case to free memory. encoded_image.capture_time_ms_) {
encode_start_map->clear(); encode_start_ms.emplace(
encode_start_list->front().encode_start_time_ms);
encode_start_list->pop_front();
} }
size_t target_bitrate = size_t target_bitrate =

View File

@ -12,7 +12,7 @@
#define MODULES_VIDEO_CODING_GENERIC_ENCODER_H_ #define MODULES_VIDEO_CODING_GENERIC_ENCODER_H_
#include <stdio.h> #include <stdio.h>
#include <map> #include <list>
#include <vector> #include <vector>
#include "modules/video_coding/include/video_codec_interface.h" #include "modules/video_coding/include/video_codec_interface.h"
@ -71,9 +71,16 @@ class VCMEncodedFrameCallback : public EncodedImageCallback {
EncodedImageCallback* const post_encode_callback_; EncodedImageCallback* const post_encode_callback_;
media_optimization::MediaOptimization* const media_opt_; media_optimization::MediaOptimization* const media_opt_;
struct EncodeStartTimeRecord {
EncodeStartTimeRecord(int64_t capture_time, int64_t encode_start_time)
: capture_time_ms(capture_time),
encode_start_time_ms(encode_start_time) {}
int64_t capture_time_ms;
int64_t encode_start_time_ms;
};
struct TimingFramesLayerInfo { struct TimingFramesLayerInfo {
size_t target_bitrate_bytes_per_sec = 0; size_t target_bitrate_bytes_per_sec = 0;
std::map<int64_t, int64_t> encode_start_time_ms; std::list<EncodeStartTimeRecord> encode_start_list;
}; };
// Separate instance for each simulcast stream or spatial layer. // Separate instance for each simulcast stream or spatial layer.
std::vector<TimingFramesLayerInfo> timing_frames_info_ std::vector<TimingFramesLayerInfo> timing_frames_info_
@ -111,7 +118,6 @@ class VCMGenericEncoder {
int32_t SetPeriodicKeyFrames(bool enable); int32_t SetPeriodicKeyFrames(bool enable);
int32_t RequestFrame(const std::vector<FrameType>& frame_types); int32_t RequestFrame(const std::vector<FrameType>& frame_types);
bool InternalSource() const; bool InternalSource() const;
void OnDroppedFrame();
bool SupportsNativeHandle() const; bool SupportsNativeHandle() const;
private: private:

View File

@ -27,7 +27,8 @@ inline size_t FrameSize(const size_t& min_frame_size,
class FakeEncodedImageCallback : public EncodedImageCallback { class FakeEncodedImageCallback : public EncodedImageCallback {
public: public:
FakeEncodedImageCallback() : last_frame_was_timing_(false) {} FakeEncodedImageCallback()
: last_frame_was_timing_(false), num_frames_dropped_(0) {}
Result OnEncodedImage(const EncodedImage& encoded_image, Result OnEncodedImage(const EncodedImage& encoded_image,
const CodecSpecificInfo* codec_specific_info, const CodecSpecificInfo* codec_specific_info,
const RTPFragmentationHeader* fragmentation) override { const RTPFragmentationHeader* fragmentation) override {
@ -36,10 +37,15 @@ class FakeEncodedImageCallback : public EncodedImageCallback {
return Result(Result::OK); return Result(Result::OK);
}; };
void OnDroppedFrame(DropReason reason) override { ++num_frames_dropped_; }
bool WasTimingFrame() { return last_frame_was_timing_; } bool WasTimingFrame() { return last_frame_was_timing_; }
size_t GetNumFramesDropped() { return num_frames_dropped_; }
private: private:
bool last_frame_was_timing_; bool last_frame_was_timing_;
size_t num_frames_dropped_;
}; };
enum class FrameType { enum class FrameType {
@ -190,5 +196,34 @@ TEST(TestVCMEncodedFrameCallback, NoTimingFrameIfNoEncodeStartTime) {
EXPECT_FALSE(sink.WasTimingFrame()); EXPECT_FALSE(sink.WasTimingFrame());
} }
TEST(TestVCMEncodedFrameCallback, NotifiesAboutDroppedFrames) {
EncodedImage image;
CodecSpecificInfo codec_specific;
const int64_t timestamp1 = 100;
const int64_t timestamp2 = 110;
const int64_t timestamp3 = 120;
const int64_t timestamp4 = 130;
codec_specific.codecType = kVideoCodecGeneric;
codec_specific.codecSpecific.generic.simulcast_idx = 0;
FakeEncodedImageCallback sink;
VCMEncodedFrameCallback callback(&sink, nullptr);
callback.OnEncodeStarted(timestamp1, 0);
EXPECT_EQ(0u, sink.GetNumFramesDropped());
image.capture_time_ms_ = timestamp1;
callback.OnEncodedImage(image, &codec_specific, nullptr);
callback.OnEncodeStarted(timestamp2, 0);
// No OnEncodedImageCall for timestamp2. Yet, at this moment it's not known
// that frame with timestamp2 was dropped.
EXPECT_EQ(0u, sink.GetNumFramesDropped());
callback.OnEncodeStarted(timestamp3, 0);
image.capture_time_ms_ = timestamp3;
callback.OnEncodedImage(image, &codec_specific, nullptr);
EXPECT_EQ(1u, sink.GetNumFramesDropped());
callback.OnEncodeStarted(timestamp4, 0);
image.capture_time_ms_ = timestamp4;
callback.OnEncodedImage(image, &codec_specific, nullptr);
EXPECT_EQ(1u, sink.GetNumFramesDropped());
}
} // namespace test } // namespace test
} // namespace webrtc } // namespace webrtc

View File

@ -45,6 +45,8 @@ enum {
// |kDefaultOutliserFrameSizePercent| in size of average frame. // |kDefaultOutliserFrameSizePercent| in size of average frame.
kDefaultTimingFramesDelayMs = 200, kDefaultTimingFramesDelayMs = 200,
kDefaultOutlierFrameSizePercent = 250, kDefaultOutlierFrameSizePercent = 250,
// Maximum number of frames for what we store encode start timing information.
kMaxEncodeStartTimeListSize = 50,
}; };
enum VCMVideoProtection { enum VCMVideoProtection {

View File

@ -324,7 +324,8 @@ int32_t VideoSender::AddVideoFrame(const VideoFrame& videoFrame,
<< " loss rate " << encoder_params.loss_rate << " rtt " << " loss rate " << encoder_params.loss_rate << " rtt "
<< encoder_params.rtt << " input frame rate " << encoder_params.rtt << " input frame rate "
<< encoder_params.input_frame_rate; << encoder_params.input_frame_rate;
post_encode_callback_->OnDroppedFrame(); post_encode_callback_->OnDroppedFrame(
EncodedImageCallback::DropReason::kDroppedByMediaOptimizations);
return VCM_OK; return VCM_OK;
} }
// TODO(pbos): Make sure setting send codec is synchronized with video // TODO(pbos): Make sure setting send codec is synchronized with video

View File

@ -560,6 +560,23 @@ void SendStatisticsProxy::UmaSamplesContainer::UpdateHistograms(
<< fec_bytes_per_sec.ToStringWithMultiplier(8); << fec_bytes_per_sec.ToStringWithMultiplier(8);
} }
} }
LOG(LS_INFO) << "Frames encoded " << current_stats.frames_encoded;
LOG(LS_INFO) << uma_prefix_ << "DroppedFrames.Capturer "
<< current_stats.frames_dropped_by_capturer;
RTC_HISTOGRAMS_COUNTS_1000(kIndex, uma_prefix_ + "DroppedFrames.Capturer",
current_stats.frames_dropped_by_capturer);
LOG(LS_INFO) << uma_prefix_ << "DroppedFrames.EncoderQueue "
<< current_stats.frames_dropped_by_encoder_queue;
RTC_HISTOGRAMS_COUNTS_1000(kIndex, uma_prefix_ + "DroppedFrames.EncoderQueue",
current_stats.frames_dropped_by_encoder_queue);
LOG(LS_INFO) << uma_prefix_ << "DroppedFrames.Encoder "
<< current_stats.frames_dropped_by_encoder;
RTC_HISTOGRAMS_COUNTS_1000(kIndex, uma_prefix_ + "DroppedFrames.Encoder",
current_stats.frames_dropped_by_encoder);
LOG(LS_INFO) << uma_prefix_ << "DroppedFrames.Ratelimiter "
<< current_stats.frames_dropped_by_rate_limiter;
RTC_HISTOGRAMS_COUNTS_1000(kIndex, uma_prefix_ + "DroppedFrames.Ratelimiter",
current_stats.frames_dropped_by_rate_limiter);
} }
void SendStatisticsProxy::OnEncoderReconfigured( void SendStatisticsProxy::OnEncoderReconfigured(
@ -860,6 +877,26 @@ void SendStatisticsProxy::OnIncomingFrame(int width, int height) {
} }
} }
void SendStatisticsProxy::OnFrameDroppedBySource() {
rtc::CritScope lock(&crit_);
++stats_.frames_dropped_by_capturer;
}
void SendStatisticsProxy::OnFrameDroppedInEncoderQueue() {
rtc::CritScope lock(&crit_);
++stats_.frames_dropped_by_encoder_queue;
}
void SendStatisticsProxy::OnFrameDroppedByEncoder() {
rtc::CritScope lock(&crit_);
++stats_.frames_dropped_by_encoder;
}
void SendStatisticsProxy::OnFrameDroppedByMediaOptimizations() {
rtc::CritScope lock(&crit_);
++stats_.frames_dropped_by_rate_limiter;
}
void SendStatisticsProxy::SetAdaptationStats( void SendStatisticsProxy::SetAdaptationStats(
const VideoStreamEncoder::AdaptCounts& cpu_counts, const VideoStreamEncoder::AdaptCounts& cpu_counts,
const VideoStreamEncoder::AdaptCounts& quality_counts) { const VideoStreamEncoder::AdaptCounts& quality_counts) {

View File

@ -57,6 +57,12 @@ class SendStatisticsProxy : public CpuOveruseMetricsObserver,
// Used to update incoming frame rate. // Used to update incoming frame rate.
void OnIncomingFrame(int width, int height); void OnIncomingFrame(int width, int height);
// Dropped frame stats.
void OnFrameDroppedBySource();
void OnFrameDroppedInEncoderQueue();
void OnFrameDroppedByEncoder();
void OnFrameDroppedByMediaOptimizations();
// Adaptation stats. // Adaptation stats.
void SetAdaptationStats( void SetAdaptationStats(
const VideoStreamEncoder::AdaptCounts& cpu_counts, const VideoStreamEncoder::AdaptCounts& cpu_counts,

View File

@ -149,6 +149,7 @@ class VideoStreamEncoder::EncodeTask : public rtc::QueuedTask {
LOG(LS_VERBOSE) LOG(LS_VERBOSE)
<< "Incoming frame dropped due to that the encoder is blocked."; << "Incoming frame dropped due to that the encoder is blocked.";
++video_stream_encoder_->dropped_frame_count_; ++video_stream_encoder_->dropped_frame_count_;
video_stream_encoder_->stats_proxy_->OnFrameDroppedInEncoderQueue();
} }
if (log_stats_) { if (log_stats_) {
LOG(LS_INFO) << "Number of frames: captured " LOG(LS_INFO) << "Number of frames: captured "
@ -723,6 +724,10 @@ void VideoStreamEncoder::OnFrame(const VideoFrame& video_frame) {
incoming_frame, this, rtc::TimeMicros(), log_stats))); incoming_frame, this, rtc::TimeMicros(), log_stats)));
} }
void VideoStreamEncoder::OnDiscardedFrame() {
stats_proxy_->OnFrameDroppedBySource();
}
bool VideoStreamEncoder::EncoderPaused() const { bool VideoStreamEncoder::EncoderPaused() const {
RTC_DCHECK_RUN_ON(&encoder_queue_); RTC_DCHECK_RUN_ON(&encoder_queue_);
// Pause video if paused by caller or as long as the network is down or the // Pause video if paused by caller or as long as the network is down or the
@ -861,12 +866,20 @@ EncodedImageCallback::Result VideoStreamEncoder::OnEncodedImage(
return result; return result;
} }
void VideoStreamEncoder::OnDroppedFrame() { void VideoStreamEncoder::OnDroppedFrame(DropReason reason) {
encoder_queue_.PostTask([this] { switch (reason) {
RTC_DCHECK_RUN_ON(&encoder_queue_); case DropReason::kDroppedByMediaOptimizations:
if (quality_scaler_) stats_proxy_->OnFrameDroppedByMediaOptimizations();
quality_scaler_->ReportDroppedFrame(); encoder_queue_.PostTask([this] {
}); RTC_DCHECK_RUN_ON(&encoder_queue_);
if (quality_scaler_)
quality_scaler_->ReportDroppedFrame();
});
break;
case DropReason::kDroppedByEncoder:
stats_proxy_->OnFrameDroppedByEncoder();
break;
}
} }
void VideoStreamEncoder::OnReceivedIntraFrameRequest(size_t stream_index) { void VideoStreamEncoder::OnReceivedIntraFrameRequest(size_t stream_index) {

View File

@ -159,6 +159,7 @@ class VideoStreamEncoder : public rtc::VideoSinkInterface<VideoFrame>,
// Implements VideoSinkInterface. // Implements VideoSinkInterface.
void OnFrame(const VideoFrame& video_frame) override; void OnFrame(const VideoFrame& video_frame) override;
void OnDiscardedFrame() override;
void EncodeVideoFrame(const VideoFrame& frame, void EncodeVideoFrame(const VideoFrame& frame,
int64_t time_when_posted_in_ms); int64_t time_when_posted_in_ms);
@ -169,7 +170,7 @@ class VideoStreamEncoder : public rtc::VideoSinkInterface<VideoFrame>,
const CodecSpecificInfo* codec_specific_info, const CodecSpecificInfo* codec_specific_info,
const RTPFragmentationHeader* fragmentation) override; const RTPFragmentationHeader* fragmentation) override;
void OnDroppedFrame() override; void OnDroppedFrame(EncodedImageCallback::DropReason reason) override;
bool EncoderPaused() const; bool EncoderPaused() const;
void TraceFrameDropStart(); void TraceFrameDropStart();