Revert "Remove sent framerate and bitrate calculations from MediaOptimization."

This reverts commit af721b72cc1bdc5d945629ad78fbea701b6f82b9.

Reason for revert: <INSERT REASONING HERE>

Original change's description:
> Remove sent framerate and bitrate calculations from MediaOptimization.
> 
> Add RateTracker for sent framerate and bitrate in SendStatisticsProxy.
> 
> Store sent frame info in map to solve potential issue where sent framerate statistics could be
> incorrect.
> 
> Bug: webrtc:8375
> Change-Id: I4a6e3956013438a711b8c2e73a8cd90c52dd1210
> Reviewed-on: https://webrtc-review.googlesource.com/7880
> Reviewed-by: Erik Språng <sprang@webrtc.org>
> Commit-Queue: Åsa Persson <asapersson@webrtc.org>
> Cr-Commit-Position: refs/heads/master@{#20225}

TBR=asapersson@webrtc.org,sprang@webrtc.org

Change-Id: Ic914f03ff7065ac410ae06b6f82b56a935399b66
No-Presubmit: true
No-Tree-Checks: true
No-Try: true
Bug: webrtc:8375
Reviewed-on: https://webrtc-review.googlesource.com/8480
Reviewed-by: Åsa Persson <asapersson@webrtc.org>
Commit-Queue: Åsa Persson <asapersson@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#20248}
This commit is contained in:
Åsa Persson
2017-10-11 12:59:01 +00:00
committed by Commit Bot
parent 18945c35c2
commit ca0ed63c19
8 changed files with 171 additions and 120 deletions

View File

@ -13,10 +13,26 @@
#include <limits>
#include "modules/video_coding/utility/frame_dropper.h"
#include "rtc_base/logging.h"
#include "system_wrappers/include/clock.h"
namespace webrtc {
namespace media_optimization {
const int kMsPerSec = 1000;
const int kBitsPerByte = 8;
struct MediaOptimization::EncodedFrameSample {
EncodedFrameSample(size_t size_bytes,
uint32_t timestamp,
int64_t time_complete_ms)
: size_bytes(size_bytes),
timestamp(timestamp),
time_complete_ms(time_complete_ms) {}
size_t size_bytes;
uint32_t timestamp;
int64_t time_complete_ms;
};
MediaOptimization::MediaOptimization(Clock* clock)
: clock_(clock),
@ -24,7 +40,9 @@ MediaOptimization::MediaOptimization(Clock* clock)
user_frame_rate_(0),
frame_dropper_(new FrameDropper),
video_target_bitrate_(0),
incoming_frame_rate_(0) {
incoming_frame_rate_(0),
encoded_frame_samples_(),
avg_sent_framerate_(0) {
memset(incoming_frame_times_, -1, sizeof(incoming_frame_times_));
}
@ -40,6 +58,7 @@ void MediaOptimization::Reset() {
frame_dropper_->SetRates(0, 0);
video_target_bitrate_ = 0;
user_frame_rate_ = 0;
encoded_frame_samples_.clear();
}
void MediaOptimization::SetEncodingData(int32_t max_bit_rate,
@ -98,14 +117,51 @@ uint32_t MediaOptimization::InputFrameRateInternal() {
return framerate;
}
uint32_t MediaOptimization::SentFrameRate() {
rtc::CritScope lock(&crit_sect_);
return SentFrameRateInternal();
}
uint32_t MediaOptimization::SentFrameRateInternal() {
PurgeOldFrameSamples(clock_->TimeInMilliseconds() - kBitrateAverageWinMs);
UpdateSentFramerate();
return avg_sent_framerate_;
}
uint32_t MediaOptimization::SentBitRate() {
rtc::CritScope lock(&crit_sect_);
PurgeOldFrameSamples(clock_->TimeInMilliseconds() - kBitrateAverageWinMs);
size_t sent_bytes = 0;
for (auto& frame_sample : encoded_frame_samples_) {
sent_bytes += frame_sample.size_bytes;
}
return sent_bytes * kBitsPerByte * kMsPerSec / kBitrateAverageWinMs;
}
int32_t MediaOptimization::UpdateWithEncodedData(
const EncodedImage& encoded_image) {
size_t encoded_length = encoded_image._length;
uint32_t timestamp = encoded_image._timeStamp;
rtc::CritScope lock(&crit_sect_);
const int64_t now_ms = clock_->TimeInMilliseconds();
PurgeOldFrameSamples(now_ms - kBitrateAverageWinMs);
if (encoded_frame_samples_.size() > 0 &&
encoded_frame_samples_.back().timestamp == timestamp) {
// Frames having the same timestamp are generated from the same input
// frame. We don't want to double count them, but only increment the
// size_bytes.
encoded_frame_samples_.back().size_bytes += encoded_length;
encoded_frame_samples_.back().time_complete_ms = now_ms;
} else {
encoded_frame_samples_.push_back(
EncodedFrameSample(encoded_length, timestamp, now_ms));
}
UpdateSentFramerate();
if (encoded_length > 0) {
const bool delta_frame = encoded_image._frameType != kVideoFrameKey;
frame_dropper_->Fill(encoded_length, delta_frame);
}
return VCM_OK;
}
@ -136,6 +192,31 @@ void MediaOptimization::UpdateIncomingFrameRate() {
ProcessIncomingFrameRate(now);
}
void MediaOptimization::PurgeOldFrameSamples(int64_t threshold_ms) {
while (!encoded_frame_samples_.empty()) {
if (encoded_frame_samples_.front().time_complete_ms < threshold_ms) {
encoded_frame_samples_.pop_front();
} else {
break;
}
}
}
void MediaOptimization::UpdateSentFramerate() {
if (encoded_frame_samples_.size() <= 1) {
avg_sent_framerate_ = encoded_frame_samples_.size();
return;
}
int denom = encoded_frame_samples_.back().timestamp -
encoded_frame_samples_.front().timestamp;
if (denom > 0) {
avg_sent_framerate_ =
(90000 * (encoded_frame_samples_.size() - 1) + denom / 2) / denom;
} else {
avg_sent_framerate_ = encoded_frame_samples_.size();
}
}
// Allowing VCM to keep track of incoming frame rate.
void MediaOptimization::ProcessIncomingFrameRate(int64_t now) {
int32_t num = 0;

View File

@ -57,12 +57,22 @@ class MediaOptimization {
// InputFrameRate 0 = no frame rate estimate available.
uint32_t InputFrameRate();
uint32_t SentFrameRate();
uint32_t SentBitRate();
private:
enum { kFrameCountHistorySize = 90 };
enum { kFrameHistoryWinMs = 2000 };
enum { kBitrateAverageWinMs = 1000 };
struct EncodedFrameSample;
typedef std::list<EncodedFrameSample> FrameSampleList;
void UpdateIncomingFrameRate() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
void PurgeOldFrameSamples(int64_t threshold_ms)
RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
void UpdateSentFramerate() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
void ProcessIncomingFrameRate(int64_t now)
RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
@ -78,6 +88,8 @@ class MediaOptimization {
uint32_t InputFrameRateInternal() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
uint32_t SentFrameRateInternal() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
// Protect all members.
rtc::CriticalSection crit_sect_;
@ -89,6 +101,9 @@ class MediaOptimization {
float incoming_frame_rate_ RTC_GUARDED_BY(crit_sect_);
int64_t incoming_frame_times_[kFrameCountHistorySize] RTC_GUARDED_BY(
crit_sect_);
std::list<EncodedFrameSample> encoded_frame_samples_
RTC_GUARDED_BY(crit_sect_);
uint32_t avg_sent_framerate_ RTC_GUARDED_BY(crit_sect_);
};
} // namespace media_optimization
} // namespace webrtc

View File

@ -52,15 +52,14 @@ VideoSender::VideoSender(Clock* clock,
VideoSender::~VideoSender() {}
// TODO(asapersson): Remove _sendStatsTimer and send_stats_callback_.
void VideoSender::Process() {
if (_sendStatsTimer.TimeUntilProcess() == 0) {
// |_sendStatsTimer.Processed()| must be called. Otherwise
// VideoSender::Process() will be called in an infinite loop.
_sendStatsTimer.Processed();
if (send_stats_callback_) {
uint32_t bitRate = 0;
uint32_t frameRate = 0;
uint32_t bitRate = _mediaOpt.SentBitRate();
uint32_t frameRate = _mediaOpt.SentFrameRate();
send_stats_callback_->SendStatistics(bitRate, frameRate);
}
}

View File

@ -25,10 +25,6 @@
namespace webrtc {
namespace {
const float kEncodeTimeWeigthFactor = 0.5f;
const size_t kMaxEncodedFrameMapSize = 1000;
const int64_t kMaxEncodedFrameWindowMs = 800;
const int64_t kBucketSizeMs = 100;
const size_t kBucketCount = 10;
const char kVp8ForcedFallbackEncoderFieldTrial[] =
"WebRTC-VP8-Forced-Fallback-Encoder";
@ -124,11 +120,10 @@ SendStatisticsProxy::SendStatisticsProxy(
min_first_fallback_interval_ms_(GetFallbackIntervalFromFieldTrial()),
content_type_(content_type),
start_ms_(clock->TimeInMilliseconds()),
last_sent_frame_timestamp_(0),
encode_time_(kEncodeTimeWeigthFactor),
quality_downscales_(-1),
cpu_downscales_(-1),
media_byte_rate_tracker_(kBucketSizeMs, kBucketCount),
encoded_frame_rate_tracker_(kBucketSizeMs, kBucketCount),
uma_container_(
new UmaSamplesContainer(GetUmaPrefix(content_type_), stats_, clock)) {
}
@ -151,6 +146,8 @@ SendStatisticsProxy::UmaSamplesContainer::UmaSamplesContainer(
Clock* const clock)
: uma_prefix_(prefix),
clock_(clock),
max_sent_width_per_timestamp_(0),
max_sent_height_per_timestamp_(0),
input_frame_rate_tracker_(100, 10u),
input_fps_counter_(clock, nullptr, true),
sent_fps_counter_(clock, nullptr, true),
@ -189,44 +186,6 @@ void SendStatisticsProxy::UmaSamplesContainer::InitializeBitrateCounters(
}
}
void SendStatisticsProxy::UmaSamplesContainer::RemoveOld(int64_t now_ms) {
while (!encoded_frames_.empty()) {
auto it = encoded_frames_.begin();
if (now_ms - it->second.send_ms < kMaxEncodedFrameWindowMs)
break;
// Use max per timestamp.
sent_width_counter_.Add(it->second.max_width);
sent_height_counter_.Add(it->second.max_height);
encoded_frames_.erase(it);
}
}
bool SendStatisticsProxy::UmaSamplesContainer::InsertEncodedFrame(
const EncodedImage& encoded_frame) {
int64_t now_ms = clock_->TimeInMilliseconds();
RemoveOld(now_ms);
if (encoded_frames_.size() > kMaxEncodedFrameMapSize) {
encoded_frames_.clear();
}
auto it = encoded_frames_.find(encoded_frame._timeStamp);
if (it == encoded_frames_.end()) {
// First frame with this timestamp.
encoded_frames_.insert(std::make_pair(
encoded_frame._timeStamp, Frame(now_ms, encoded_frame._encodedWidth,
encoded_frame._encodedHeight)));
sent_fps_counter_.Add(1);
return true;
}
it->second.max_width =
std::max(it->second.max_width, encoded_frame._encodedWidth);
it->second.max_height =
std::max(it->second.max_height, encoded_frame._encodedHeight);
return false;
}
void SendStatisticsProxy::UmaSamplesContainer::UpdateHistograms(
const VideoSendStream::Config::Rtp& rtp_config,
const VideoSendStream::Stats& current_stats) {
@ -576,6 +535,13 @@ void SendStatisticsProxy::OnEncoderReconfigured(
}
}
void SendStatisticsProxy::OnEncoderStatsUpdate(uint32_t framerate,
uint32_t bitrate) {
rtc::CritScope lock(&crit_);
stats_.encode_frame_rate = framerate;
stats_.media_bitrate_bps = bitrate;
}
void SendStatisticsProxy::OnEncodedFrameTimeMeasured(
int encode_time_ms,
const CpuOveruseMetrics& metrics) {
@ -630,8 +596,6 @@ VideoSendStream::Stats SendStatisticsProxy::GetStats() {
content_type_ == VideoEncoderConfig::ContentType::kRealtimeVideo
? VideoContentType::UNSPECIFIED
: VideoContentType::SCREENSHARE;
stats_.encode_frame_rate = round(encoded_frame_rate_tracker_.ComputeRate());
stats_.media_bitrate_bps = media_byte_rate_tracker_.ComputeRate() * 8;
return stats_;
}
@ -838,14 +802,31 @@ void SendStatisticsProxy::OnSendEncodedImage(
}
}
media_byte_rate_tracker_.AddSamples(encoded_image._length);
if (uma_container_->InsertEncodedFrame(encoded_image))
encoded_frame_rate_tracker_.AddSamples(1);
// TODO(asapersson): This is incorrect if simulcast layers are encoded on
// different threads and there is no guarantee that one frame of all layers
// are encoded before the next start.
if (last_sent_frame_timestamp_ > 0 &&
encoded_image._timeStamp != last_sent_frame_timestamp_) {
uma_container_->sent_fps_counter_.Add(1);
uma_container_->sent_width_counter_.Add(
uma_container_->max_sent_width_per_timestamp_);
uma_container_->sent_height_counter_.Add(
uma_container_->max_sent_height_per_timestamp_);
uma_container_->max_sent_width_per_timestamp_ = 0;
uma_container_->max_sent_height_per_timestamp_ = 0;
}
last_sent_frame_timestamp_ = encoded_image._timeStamp;
uma_container_->max_sent_width_per_timestamp_ =
std::max(uma_container_->max_sent_width_per_timestamp_,
static_cast<int>(encoded_image._encodedWidth));
uma_container_->max_sent_height_per_timestamp_ =
std::max(uma_container_->max_sent_height_per_timestamp_,
static_cast<int>(encoded_image._encodedHeight));
}
int SendStatisticsProxy::GetSendFrameRate() const {
rtc::CritScope lock(&crit_);
return round(encoded_frame_rate_tracker_.ComputeRate());
return stats_.encode_frame_rate;
}
void SendStatisticsProxy::OnIncomingFrame(int width, int height) {

View File

@ -68,6 +68,7 @@ class SendStatisticsProxy : public CpuOveruseMetricsObserver,
const VideoStreamEncoder::AdaptCounts& cpu_counts,
const VideoStreamEncoder::AdaptCounts& quality_counts);
void OnEncoderStatsUpdate(uint32_t framerate, uint32_t bitrate);
void OnSuspendChange(bool is_suspended);
void OnInactiveSsrc(uint32_t ssrc);
@ -169,25 +170,6 @@ class SendStatisticsProxy : public CpuOveruseMetricsObserver,
SampleCounter vp9; // QP range: 0-255.
SampleCounter h264; // QP range: 0-51.
};
// Map holding encoded frames (mapped by timestamp).
// If simulcast layers are encoded on different threads, there is no guarantee
// that one frame of all layers are encoded before the next start.
struct TimestampOlderThan {
bool operator()(uint32_t ts1, uint32_t ts2) const {
return IsNewerTimestamp(ts2, ts1);
}
};
struct Frame {
Frame(int64_t send_ms, uint32_t width, uint32_t height)
: send_ms(send_ms), max_width(width), max_height(height) {}
const int64_t
send_ms; // Time when first frame with this timestamp is sent.
uint32_t max_width; // Max width with this timestamp.
uint32_t max_height; // Max height with this timestamp.
};
typedef std::map<uint32_t, Frame, TimestampOlderThan> EncodedFrameMap;
void PurgeOldStats() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
VideoSendStream::StreamStats* GetStatsEntry(uint32_t ssrc)
RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
@ -210,12 +192,11 @@ class SendStatisticsProxy : public CpuOveruseMetricsObserver,
VideoEncoderConfig::ContentType content_type_ RTC_GUARDED_BY(crit_);
const int64_t start_ms_;
VideoSendStream::Stats stats_ RTC_GUARDED_BY(crit_);
uint32_t last_sent_frame_timestamp_ RTC_GUARDED_BY(crit_);
std::map<uint32_t, StatsUpdateTimes> update_times_ RTC_GUARDED_BY(crit_);
rtc::ExpFilter encode_time_ RTC_GUARDED_BY(crit_);
int quality_downscales_ RTC_GUARDED_BY(crit_);
int cpu_downscales_ RTC_GUARDED_BY(crit_);
rtc::RateTracker media_byte_rate_tracker_ RTC_GUARDED_BY(crit_);
rtc::RateTracker encoded_frame_rate_tracker_ RTC_GUARDED_BY(crit_);
// Contains stats used for UMA histograms. These stats will be reset if
// content type changes between real-time video and screenshare, since these
@ -231,11 +212,10 @@ class SendStatisticsProxy : public CpuOveruseMetricsObserver,
void InitializeBitrateCounters(const VideoSendStream::Stats& stats);
bool InsertEncodedFrame(const EncodedImage& encoded_frame);
void RemoveOld(int64_t now_ms);
const std::string uma_prefix_;
Clock* const clock_;
int max_sent_width_per_timestamp_;
int max_sent_height_per_timestamp_;
SampleCounter input_width_counter_;
SampleCounter input_height_counter_;
SampleCounter sent_width_counter_;
@ -268,7 +248,6 @@ class SendStatisticsProxy : public CpuOveruseMetricsObserver,
FallbackEncoderInfo fallback_info_;
ReportBlockStats report_block_stats_;
const VideoSendStream::Stats start_stats_;
EncodedFrameMap encoded_frames_;
std::map<int, QpCounters>
qp_counters_; // QP counters mapped by spatial idx.

View File

@ -185,6 +185,17 @@ TEST_F(SendStatisticsProxyTest, RtcpStatistics) {
ExpectEqual(expected_, stats);
}
TEST_F(SendStatisticsProxyTest, EncodedBitrateAndFramerate) {
int media_bitrate_bps = 500;
int encode_fps = 29;
statistics_proxy_->OnEncoderStatsUpdate(encode_fps, media_bitrate_bps);
VideoSendStream::Stats stats = statistics_proxy_->GetStats();
EXPECT_EQ(media_bitrate_bps, stats.media_bitrate_bps);
EXPECT_EQ(encode_fps, stats.encode_frame_rate);
}
TEST_F(SendStatisticsProxyTest, Suspended) {
// Verify that the value is false by default.
EXPECT_FALSE(statistics_proxy_->GetStats().suspended);
@ -808,36 +819,13 @@ TEST_F(SendStatisticsProxyTest, InputResolutionHistogramsAreUpdated) {
}
TEST_F(SendStatisticsProxyTest, SentResolutionHistogramsAreUpdated) {
const int64_t kMaxEncodedFrameWindowMs = 800;
const int kFps = 20;
const int kNumFramesPerWindow = kFps * kMaxEncodedFrameWindowMs / 1000;
const int kMinSamples = // Sample added when removed from EncodedFrameMap.
SendStatisticsProxy::kMinRequiredMetricsSamples + kNumFramesPerWindow;
EncodedImage encoded_image;
// Not enough samples, stats should not be updated.
for (int i = 0; i < kMinSamples - 1; ++i) {
fake_clock_.AdvanceTimeMilliseconds(1000 / kFps);
++encoded_image._timeStamp;
encoded_image._encodedWidth = kWidth;
encoded_image._encodedHeight = kHeight;
for (int i = 0; i <= SendStatisticsProxy::kMinRequiredMetricsSamples; ++i) {
encoded_image._timeStamp = i + 1;
statistics_proxy_->OnSendEncodedImage(encoded_image, nullptr);
}
SetUp(); // Reset stats proxy also causes histograms to be reported.
EXPECT_EQ(0, metrics::NumSamples("WebRTC.Video.SentWidthInPixels"));
EXPECT_EQ(0, metrics::NumSamples("WebRTC.Video.SentHeightInPixels"));
// Enough samples, max resolution per frame should be reported.
encoded_image._timeStamp = 0xfffffff0; // Will wrap.
for (int i = 0; i < kMinSamples; ++i) {
fake_clock_.AdvanceTimeMilliseconds(1000 / kFps);
++encoded_image._timeStamp;
encoded_image._encodedWidth = kWidth;
encoded_image._encodedHeight = kHeight;
statistics_proxy_->OnSendEncodedImage(encoded_image, nullptr);
encoded_image._encodedWidth = kWidth / 2;
encoded_image._encodedHeight = kHeight / 2;
statistics_proxy_->OnSendEncodedImage(encoded_image, nullptr);
}
statistics_proxy_.reset();
EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.SentWidthInPixels"));
EXPECT_EQ(1, metrics::NumEvents("WebRTC.Video.SentWidthInPixels", kWidth));
@ -863,11 +851,9 @@ TEST_F(SendStatisticsProxyTest, SentFpsHistogramIsUpdated) {
const int kFps = 20;
const int kMinPeriodicSamples = 6;
int frames = kMinPeriodicSamples * kFpsPeriodicIntervalMs * kFps / 1000 + 1;
for (int i = 0; i < frames; ++i) {
for (int i = 0; i <= frames; ++i) {
fake_clock_.AdvanceTimeMilliseconds(1000 / kFps);
++encoded_image._timeStamp;
statistics_proxy_->OnSendEncodedImage(encoded_image, nullptr);
// Frame with same timestamp should not be counted.
encoded_image._timeStamp = i + 1;
statistics_proxy_->OnSendEncodedImage(encoded_image, nullptr);
}
statistics_proxy_.reset();
@ -904,7 +890,7 @@ TEST_F(SendStatisticsProxyTest, SentFpsHistogramExcludesSuspendedTime) {
const int kSuspendTimeMs = 10000;
const int kMinPeriodicSamples = 6;
int frames = kMinPeriodicSamples * kFpsPeriodicIntervalMs * kFps / 1000;
for (int i = 0; i < frames; ++i) {
for (int i = 0; i <= frames; ++i) {
fake_clock_.AdvanceTimeMilliseconds(1000 / kFps);
encoded_image._timeStamp = i + 1;
statistics_proxy_->OnSendEncodedImage(encoded_image, nullptr);
@ -913,7 +899,7 @@ TEST_F(SendStatisticsProxyTest, SentFpsHistogramExcludesSuspendedTime) {
statistics_proxy_->OnSuspendChange(true);
fake_clock_.AdvanceTimeMilliseconds(kSuspendTimeMs);
for (int i = 0; i < frames; ++i) {
for (int i = 0; i <= frames; ++i) {
fake_clock_.AdvanceTimeMilliseconds(1000 / kFps);
encoded_image._timeStamp = i + 1;
statistics_proxy_->OnSendEncodedImage(encoded_image, nullptr);

View File

@ -375,13 +375,12 @@ class VideoStreamEncoder::VideoSourceProxy {
RTC_DISALLOW_COPY_AND_ASSIGN(VideoSourceProxy);
};
VideoStreamEncoder::VideoStreamEncoder(
uint32_t number_of_cores,
SendStatisticsProxy* stats_proxy,
const VideoSendStream::Config::EncoderSettings& settings,
rtc::VideoSinkInterface<VideoFrame>* pre_encode_callback,
EncodedFrameObserver* encoder_timing,
std::unique_ptr<OveruseFrameDetector> overuse_detector)
VideoStreamEncoder::VideoStreamEncoder(uint32_t number_of_cores,
SendStatisticsProxy* stats_proxy,
const VideoSendStream::Config::EncoderSettings& settings,
rtc::VideoSinkInterface<VideoFrame>* pre_encode_callback,
EncodedFrameObserver* encoder_timing,
std::unique_ptr<OveruseFrameDetector> overuse_detector)
: shutdown_event_(true /* manual_reset */, false),
number_of_cores_(number_of_cores),
initial_rampup_(0),
@ -389,7 +388,7 @@ VideoStreamEncoder::VideoStreamEncoder(
sink_(nullptr),
settings_(settings),
codec_type_(PayloadStringToCodecType(settings.payload_name)),
video_sender_(Clock::GetRealTimeClock(), this, nullptr),
video_sender_(Clock::GetRealTimeClock(), this, this),
overuse_detector_(
overuse_detector.get()
? overuse_detector.release()
@ -869,6 +868,12 @@ void VideoStreamEncoder::OnDroppedFrame() {
});
}
void VideoStreamEncoder::SendStatistics(uint32_t bit_rate,
uint32_t frame_rate) {
RTC_DCHECK(module_process_thread_checker_.CalledOnValidThread());
stats_proxy_->OnEncoderStatsUpdate(frame_rate, bit_rate);
}
void VideoStreamEncoder::OnReceivedIntraFrameRequest(size_t stream_index) {
if (!encoder_queue_.IsCurrent()) {
encoder_queue_.PostTask(

View File

@ -50,6 +50,7 @@ class VideoBitrateAllocationObserver;
// Call Stop() when done.
class VideoStreamEncoder : public rtc::VideoSinkInterface<VideoFrame>,
public EncodedImageCallback,
public VCMSendStatisticsCallback,
public AdaptationObserverInterface {
public:
// Interface for receiving encoded video frames and notifications about
@ -160,6 +161,10 @@ class VideoStreamEncoder : public rtc::VideoSinkInterface<VideoFrame>,
// Implements VideoSinkInterface.
void OnFrame(const VideoFrame& video_frame) override;
// Implements VideoSendStatisticsCallback.
void SendStatistics(uint32_t bit_rate,
uint32_t frame_rate) override;
void EncodeVideoFrame(const VideoFrame& frame,
int64_t time_when_posted_in_ms);