Use TimeDelta and Timestamp in VCMJitterEstimator

* Uses DataSize to represent incoming and outgoing bytes.
* Puts units into doubles as they enter the Kalman filter
* Moved to its own GN target.

Change-Id: I1e7d5486a00a7158d418f553a6c77f9dd56bf3c2
Bug: webrtc:13756
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/253121
Reviewed-by: Åsa Persson <asapersson@webrtc.org>
Commit-Queue: Evan Shrubsole <eshr@webrtc.org>
Cr-Commit-Position: refs/heads/main@{#36143}
This commit is contained in:
Evan Shrubsole
2022-03-07 13:21:51 +01:00
committed by WebRTC LUCI CQ
parent a5f3c20f2d
commit 13e42a88df
9 changed files with 355 additions and 298 deletions

View File

@ -214,6 +214,26 @@ rtc_library("rtt_filter") {
]
}
rtc_library("jitter_estimator") {
sources = [
"jitter_estimator.cc",
"jitter_estimator.h",
]
deps = [
":rtt_filter",
"../../api/units:data_size",
"../../api/units:frequency",
"../../api/units:time_delta",
"../../api/units:timestamp",
"../../rtc_base",
"../../rtc_base:safe_conversions",
"../../rtc_base/experiments:jitter_upper_bound_experiment",
"../../system_wrappers",
"../../system_wrappers:field_trial",
]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("video_coding") {
visibility = [ "*" ]
sources = [
@ -236,8 +256,6 @@ rtc_library("video_coding") {
"inter_frame_delay.cc",
"inter_frame_delay.h",
"internal_defines.h",
"jitter_estimator.cc",
"jitter_estimator.h",
"loss_notification_controller.cc",
"loss_notification_controller.h",
"media_opt_util.cc",
@ -268,6 +286,7 @@ rtc_library("video_coding") {
":encoded_frame",
":frame_buffer",
":frame_helpers",
":jitter_estimator",
":packet_buffer",
":rtt_filter",
":timing",
@ -285,6 +304,8 @@ rtc_library("video_coding") {
"../../api:sequence_checker",
"../../api/task_queue",
"../../api/units:data_rate",
"../../api/units:data_size",
"../../api/units:frequency",
"../../api/units:time_delta",
"../../api/units:timestamp",
"../../api/video:builtin_video_bitrate_allocator_factory",
@ -381,6 +402,7 @@ rtc_library("video_coding_legacy") {
deps = [
":codec_globals_headers",
":encoded_frame",
":jitter_estimator",
":timing",
":video_codec_interface",
":video_coding",
@ -1129,6 +1151,7 @@ if (rtc_include_tests) {
":frame_buffer",
":frame_dependencies_calculator",
":h264_packet_buffer",
":jitter_estimator",
":nack_requester",
":packet_buffer",
":rtt_filter",
@ -1158,6 +1181,7 @@ if (rtc_include_tests) {
"../../api:videocodec_test_fixture_api",
"../../api/task_queue:default_task_queue_factory",
"../../api/test/video:function_video_factory",
"../../api/units:data_size",
"../../api/units:frequency",
"../../api/units:time_delta",
"../../api/units:timestamp",

View File

@ -19,6 +19,8 @@
#include <vector>
#include "absl/container/inlined_vector.h"
#include "api/units/data_size.h"
#include "api/units/time_delta.h"
#include "api/video/encoded_image.h"
#include "api/video/video_timing.h"
#include "modules/video_coding/frame_helpers.h"
@ -250,7 +252,7 @@ std::unique_ptr<EncodedFrame> FrameBuffer::GetNextFrame() {
RTC_DCHECK(!frames_to_decode_.empty());
bool superframe_delayed_by_retransmission = false;
size_t superframe_size = 0;
DataSize superframe_size = DataSize::Zero();
const EncodedFrame& first_frame = *frames_to_decode_[0]->second.frame;
absl::optional<Timestamp> render_time = first_frame.RenderTimestamp();
int64_t receive_time_ms = first_frame.ReceivedTime();
@ -270,7 +272,7 @@ std::unique_ptr<EncodedFrame> FrameBuffer::GetNextFrame() {
superframe_delayed_by_retransmission |= frame->delayed_by_retransmission();
receive_time_ms = std::max(receive_time_ms, frame->ReceivedTime());
superframe_size += frame->size();
superframe_size += DataSize::Bytes(frame->size());
PropagateDecodability(frame_it->second);
decoded_frames_history_.InsertDecoded(frame_it->first, frame->Timestamp());
@ -297,17 +299,19 @@ std::unique_ptr<EncodedFrame> FrameBuffer::GetNextFrame() {
if (inter_frame_delay_.CalculateDelay(first_frame.Timestamp(), &frame_delay,
receive_time_ms)) {
jitter_estimator_.UpdateEstimate(frame_delay, superframe_size);
jitter_estimator_.UpdateEstimate(TimeDelta::Millis(frame_delay),
superframe_size);
}
float rtt_mult = protection_mode_ == kProtectionNackFEC ? 0.0 : 1.0;
absl::optional<float> rtt_mult_add_cap_ms = absl::nullopt;
absl::optional<TimeDelta> rtt_mult_add_cap_ms = absl::nullopt;
if (rtt_mult_settings_.has_value()) {
rtt_mult = rtt_mult_settings_->rtt_mult_setting;
rtt_mult_add_cap_ms = rtt_mult_settings_->rtt_mult_add_cap_ms;
rtt_mult_add_cap_ms =
TimeDelta::Millis(rtt_mult_settings_->rtt_mult_add_cap_ms);
}
timing_->SetJitterDelay(TimeDelta::Millis(
jitter_estimator_.GetJitterEstimate(rtt_mult, rtt_mult_add_cap_ms)));
timing_->SetJitterDelay(
jitter_estimator_.GetJitterEstimate(rtt_mult, rtt_mult_add_cap_ms));
timing_->UpdateCurrentDelay(*render_time, now);
} else {
if (RttMultExperiment::RttMultEnabled())
@ -352,7 +356,7 @@ int FrameBuffer::Size() {
void FrameBuffer::UpdateRtt(int64_t rtt_ms) {
MutexLock lock(&mutex_);
jitter_estimator_.UpdateRtt(rtt_ms);
jitter_estimator_.UpdateRtt(TimeDelta::Millis(rtt_ms));
}
bool FrameBuffer::ValidReferences(const EncodedFrame& frame) const {

View File

@ -572,7 +572,7 @@ void VCMJitterBuffer::FindAndInsertContinuousFramesWithState(
uint32_t VCMJitterBuffer::EstimatedJitterMs() {
MutexLock lock(&mutex_);
const double rtt_mult = 1.0f;
return jitter_estimate_.GetJitterEstimate(rtt_mult, absl::nullopt);
return jitter_estimate_.GetJitterEstimate(rtt_mult, absl::nullopt).ms();
}
void VCMJitterBuffer::SetNackSettings(size_t max_nack_list_size,
@ -879,7 +879,9 @@ void VCMJitterBuffer::UpdateJitterEstimate(int64_t latest_packet_time_ms,
// Filter out frames which have been reordered in time by the network
if (not_reordered) {
// Update the jitter estimate with the new samples
jitter_estimate_.UpdateEstimate(frame_delay, frame_size, incomplete_frame);
jitter_estimate_.UpdateEstimate(TimeDelta::Millis(frame_delay),
DataSize::Bytes(frame_size),
incomplete_frame);
}
}

View File

@ -17,7 +17,10 @@
#include <cstdint>
#include "absl/types/optional.h"
#include "modules/video_coding/internal_defines.h"
#include "api/units/data_size.h"
#include "api/units/frequency.h"
#include "api/units/time_delta.h"
#include "api/units/timestamp.h"
#include "modules/video_coding/rtt_filter.h"
#include "rtc_base/experiments/jitter_upper_bound_experiment.h"
#include "rtc_base/numerics/safe_conversions.h"
@ -28,24 +31,26 @@ namespace webrtc {
namespace {
static constexpr uint32_t kStartupDelaySamples = 30;
static constexpr int64_t kFsAccuStartupSamples = 5;
static constexpr double kMaxFramerateEstimate = 200.0;
static constexpr int64_t kNackCountTimeoutMs = 60000;
static constexpr Frequency kMaxFramerateEstimate = Frequency::Hertz(200);
static constexpr TimeDelta kNackCountTimeout = TimeDelta::Seconds(60);
static constexpr double kDefaultMaxTimestampDeviationInSigmas = 3.5;
constexpr double kPhi = 0.97;
constexpr double kPsi = 0.9999;
constexpr uint32_t kAlphaCountMax = 400;
constexpr double kThetaLow = 0.000001;
constexpr uint32_t kNackLimit = 3;
constexpr int32_t kNumStdDevDelayOutlier = 15;
constexpr int32_t kNumStdDevFrameSizeOutlier = 3;
// ~Less than 1% chance (look up in normal distribution table)...
constexpr double kNoiseStdDevs = 2.33;
// ...of getting 30 ms freezes
constexpr double kNoiseStdDevOffset = 30.0;
} // namespace
VCMJitterEstimator::VCMJitterEstimator(Clock* clock)
: _phi(0.97),
_psi(0.9999),
_alphaCountMax(400),
_thetaLow(0.000001),
_nackLimit(3),
_numStdDevDelayOutlier(15),
_numStdDevFrameSizeOutlier(3),
_noiseStdDevs(2.33), // ~Less than 1% chance
// (look up in normal distribution table)...
_noiseStdDevOffset(30.0), // ...of getting 30 ms freezes
_rttFilter(),
fps_counter_(30), // TODO(sprang): Use an estimator with limit based on
: fps_counter_(30), // TODO(sprang): Use an estimator with limit based on
// time, rather than number of samples.
time_deviation_upper_bound_(
JitterUpperBoundExperiment::GetUpperBoundSigmas().value_or(
@ -56,133 +61,133 @@ VCMJitterEstimator::VCMJitterEstimator(Clock* clock)
Reset();
}
VCMJitterEstimator::~VCMJitterEstimator() {}
VCMJitterEstimator::~VCMJitterEstimator() = default;
// Resets the JitterEstimate.
void VCMJitterEstimator::Reset() {
_theta[0] = 1 / (512e3 / 8);
_theta[1] = 0;
_varNoise = 4.0;
theta_[0] = 1 / (512e3 / 8);
theta_[1] = 0;
var_noise_ = 4.0;
_thetaCov[0][0] = 1e-4;
_thetaCov[1][1] = 1e2;
_thetaCov[0][1] = _thetaCov[1][0] = 0;
_Qcov[0][0] = 2.5e-10;
_Qcov[1][1] = 1e-10;
_Qcov[0][1] = _Qcov[1][0] = 0;
_avgFrameSize = 500;
_maxFrameSize = 500;
_varFrameSize = 100;
_lastUpdateT = -1;
_prevEstimate = -1.0;
_prevFrameSize = 0;
_avgNoise = 0.0;
_alphaCount = 1;
_filterJitterEstimate = 0.0;
_latestNackTimestamp = 0;
_nackCount = 0;
_latestNackTimestamp = 0;
_fsSum = 0;
_fsCount = 0;
_startupCount = 0;
_rttFilter.Reset();
theta_cov_[0][0] = 1e-4;
theta_cov_[1][1] = 1e2;
theta_cov_[0][1] = theta_cov_[1][0] = 0;
q_cov_[0][0] = 2.5e-10;
q_cov_[1][1] = 1e-10;
q_cov_[0][1] = q_cov_[1][0] = 0;
avg_frame_size_ = kDefaultAvgAndMaxFrameSize;
max_frame_size_ = kDefaultAvgAndMaxFrameSize;
var_frame_size_ = 100;
last_update_time_ = absl::nullopt;
prev_estimate_ = absl::nullopt;
prev_frame_size_ = absl::nullopt;
avg_noise_ = 0.0;
alpha_count_ = 1;
filter_jitter_estimate_ = TimeDelta::Zero();
latest_nack_ = Timestamp::Zero();
nack_count_ = 0;
frame_size_sum_ = DataSize::Zero();
frame_size_count_ = 0;
startup_count_ = 0;
rtt_filter_.Reset();
fps_counter_.Reset();
}
// Updates the estimates with the new measurements.
void VCMJitterEstimator::UpdateEstimate(int64_t frameDelayMS,
uint32_t frameSizeBytes,
bool incompleteFrame /* = false */) {
if (frameSizeBytes == 0) {
void VCMJitterEstimator::UpdateEstimate(TimeDelta frame_delay,
DataSize frame_size,
bool incomplete_frame /* = false */) {
if (frame_size.IsZero()) {
return;
}
int deltaFS = frameSizeBytes - _prevFrameSize;
if (_fsCount < kFsAccuStartupSamples) {
_fsSum += frameSizeBytes;
_fsCount++;
} else if (_fsCount == kFsAccuStartupSamples) {
// Can't use DataSize since this can be negative.
double delta_frame_bytes =
frame_size.bytes() - prev_frame_size_.value_or(DataSize::Zero()).bytes();
if (frame_size_count_ < kFsAccuStartupSamples) {
frame_size_sum_ += frame_size;
frame_size_count_++;
} else if (frame_size_count_ == kFsAccuStartupSamples) {
// Give the frame size filter.
_avgFrameSize = static_cast<double>(_fsSum) / static_cast<double>(_fsCount);
_fsCount++;
avg_frame_size_ = frame_size_sum_ / static_cast<double>(frame_size_count_);
frame_size_count_++;
}
if (!incompleteFrame || frameSizeBytes > _avgFrameSize) {
double avgFrameSize = _phi * _avgFrameSize + (1 - _phi) * frameSizeBytes;
if (frameSizeBytes < _avgFrameSize + 2 * sqrt(_varFrameSize)) {
if (!incomplete_frame || frame_size > avg_frame_size_) {
DataSize avg_frame_size = kPhi * avg_frame_size_ + (1 - kPhi) * frame_size;
DataSize deviation_size = DataSize::Bytes(2 * sqrt(var_frame_size_));
if (frame_size < avg_frame_size_ + deviation_size) {
// Only update the average frame size if this sample wasn't a key frame.
_avgFrameSize = avgFrameSize;
avg_frame_size_ = avg_frame_size;
}
// Update the variance anyway since we want to capture cases where we only
// get key frames.
_varFrameSize = VCM_MAX(
_phi * _varFrameSize + (1 - _phi) * (frameSizeBytes - avgFrameSize) *
(frameSizeBytes - avgFrameSize),
1.0);
double delta_bytes = frame_size.bytes() - avg_frame_size.bytes();
var_frame_size_ = std::max(
kPhi * var_frame_size_ + (1 - kPhi) * (delta_bytes * delta_bytes), 1.0);
}
// Update max frameSize estimate.
_maxFrameSize =
VCM_MAX(_psi * _maxFrameSize, static_cast<double>(frameSizeBytes));
// Update max_frame_size_ estimate.
max_frame_size_ = std::max(kPsi * max_frame_size_, frame_size);
if (_prevFrameSize == 0) {
_prevFrameSize = frameSizeBytes;
if (!prev_frame_size_) {
prev_frame_size_ = frame_size;
return;
}
_prevFrameSize = frameSizeBytes;
prev_frame_size_ = frame_size;
// Cap frameDelayMS based on the current time deviation noise.
int64_t max_time_deviation_ms =
static_cast<int64_t>(time_deviation_upper_bound_ * sqrt(_varNoise) + 0.5);
frameDelayMS = std::max(std::min(frameDelayMS, max_time_deviation_ms),
-max_time_deviation_ms);
// Cap frame_delay based on the current time deviation noise.
TimeDelta max_time_deviation =
TimeDelta::Millis(time_deviation_upper_bound_ * sqrt(var_noise_) + 0.5);
frame_delay.Clamp(-max_time_deviation, max_time_deviation);
// Only update the Kalman filter if the sample is not considered an extreme
// outlier. Even if it is an extreme outlier from a delay point of view, if
// the frame size also is large the deviation is probably due to an incorrect
// line slope.
double deviation = DeviationFromExpectedDelay(frameDelayMS, deltaFS);
double deviation = DeviationFromExpectedDelay(frame_delay, delta_frame_bytes);
if (fabs(deviation) < _numStdDevDelayOutlier * sqrt(_varNoise) ||
frameSizeBytes >
_avgFrameSize + _numStdDevFrameSizeOutlier * sqrt(_varFrameSize)) {
if (fabs(deviation) < kNumStdDevDelayOutlier * sqrt(var_noise_) ||
frame_size.bytes() >
avg_frame_size_.bytes() +
kNumStdDevFrameSizeOutlier * sqrt(var_frame_size_)) {
// Update the variance of the deviation from the line given by the Kalman
// filter.
EstimateRandomJitter(deviation, incompleteFrame);
EstimateRandomJitter(deviation, incomplete_frame);
// Prevent updating with frames which have been congested by a large frame,
// and therefore arrives almost at the same time as that frame.
// This can occur when we receive a large frame (key frame) which has been
// delayed. The next frame is of normal size (delta frame), and thus deltaFS
// will be << 0. This removes all frame samples which arrives after a key
// frame.
if ((!incompleteFrame || deviation >= 0.0) &&
static_cast<double>(deltaFS) > -0.25 * _maxFrameSize) {
if ((!incomplete_frame || deviation >= 0) &&
delta_frame_bytes > -0.25 * max_frame_size_.bytes()) {
// Update the Kalman filter with the new data
KalmanEstimateChannel(frameDelayMS, deltaFS);
KalmanEstimateChannel(frame_delay, delta_frame_bytes);
}
} else {
int nStdDev =
(deviation >= 0) ? _numStdDevDelayOutlier : -_numStdDevDelayOutlier;
EstimateRandomJitter(nStdDev * sqrt(_varNoise), incompleteFrame);
(deviation >= 0) ? kNumStdDevDelayOutlier : -kNumStdDevDelayOutlier;
EstimateRandomJitter(nStdDev * sqrt(var_noise_), incomplete_frame);
}
// Post process the total estimated jitter
if (_startupCount >= kStartupDelaySamples) {
if (startup_count_ >= kStartupDelaySamples) {
PostProcessEstimate();
} else {
_startupCount++;
startup_count_++;
}
}
// Updates the nack/packet ratio.
void VCMJitterEstimator::FrameNacked() {
if (_nackCount < _nackLimit) {
_nackCount++;
if (nack_count_ < kNackLimit) {
nack_count_++;
}
_latestNackTimestamp = clock_->TimeInMicroseconds();
latest_nack_ = clock_->CurrentTime();
}
// Updates Kalman estimate of the channel.
// The caller is expected to sanity check the inputs.
void VCMJitterEstimator::KalmanEstimateChannel(int64_t frameDelayMS,
int32_t deltaFSBytes) {
void VCMJitterEstimator::KalmanEstimateChannel(TimeDelta frame_delay,
double delta_frame_size_bytes) {
double Mh[2];
double hMh_sigma;
double kalmanGain[2];
@ -193,31 +198,31 @@ void VCMJitterEstimator::KalmanEstimateChannel(int64_t frameDelayMS,
// Prediction
// M = M + Q
_thetaCov[0][0] += _Qcov[0][0];
_thetaCov[0][1] += _Qcov[0][1];
_thetaCov[1][0] += _Qcov[1][0];
_thetaCov[1][1] += _Qcov[1][1];
theta_cov_[0][0] += q_cov_[0][0];
theta_cov_[0][1] += q_cov_[0][1];
theta_cov_[1][0] += q_cov_[1][0];
theta_cov_[1][1] += q_cov_[1][1];
// Kalman gain
// K = M*h'/(sigma2n + h*M*h') = M*h'/(1 + h*M*h')
// h = [dFS 1]
// Mh = M*h'
// hMh_sigma = h*M*h' + R
Mh[0] = _thetaCov[0][0] * deltaFSBytes + _thetaCov[0][1];
Mh[1] = _thetaCov[1][0] * deltaFSBytes + _thetaCov[1][1];
Mh[0] = theta_cov_[0][0] * delta_frame_size_bytes + theta_cov_[0][1];
Mh[1] = theta_cov_[1][0] * delta_frame_size_bytes + theta_cov_[1][1];
// sigma weights measurements with a small deltaFS as noisy and
// measurements with large deltaFS as good
if (_maxFrameSize < 1.0) {
if (max_frame_size_ < DataSize::Bytes(1)) {
return;
}
double sigma = (300.0 * exp(-fabs(static_cast<double>(deltaFSBytes)) /
(1e0 * _maxFrameSize)) +
double sigma = (300.0 * exp(-fabs(delta_frame_size_bytes) /
(1e0 * max_frame_size_.bytes())) +
1) *
sqrt(_varNoise);
sqrt(var_noise_);
if (sigma < 1.0) {
sigma = 1.0;
}
hMh_sigma = deltaFSBytes * Mh[0] + Mh[1] + sigma;
hMh_sigma = delta_frame_size_bytes * Mh[0] + Mh[1] + sigma;
if ((hMh_sigma < 1e-9 && hMh_sigma >= 0) ||
(hMh_sigma > -1e-9 && hMh_sigma <= 0)) {
RTC_DCHECK_NOTREACHED();
@ -228,94 +233,96 @@ void VCMJitterEstimator::KalmanEstimateChannel(int64_t frameDelayMS,
// Correction
// theta = theta + K*(dT - h*theta)
measureRes = frameDelayMS - (deltaFSBytes * _theta[0] + _theta[1]);
_theta[0] += kalmanGain[0] * measureRes;
_theta[1] += kalmanGain[1] * measureRes;
measureRes =
frame_delay.ms() - (delta_frame_size_bytes * theta_[0] + theta_[1]);
theta_[0] += kalmanGain[0] * measureRes;
theta_[1] += kalmanGain[1] * measureRes;
if (_theta[0] < _thetaLow) {
_theta[0] = _thetaLow;
if (theta_[0] < kThetaLow) {
theta_[0] = kThetaLow;
}
// M = (I - K*h)*M
t00 = _thetaCov[0][0];
t01 = _thetaCov[0][1];
_thetaCov[0][0] = (1 - kalmanGain[0] * deltaFSBytes) * t00 -
kalmanGain[0] * _thetaCov[1][0];
_thetaCov[0][1] = (1 - kalmanGain[0] * deltaFSBytes) * t01 -
kalmanGain[0] * _thetaCov[1][1];
_thetaCov[1][0] = _thetaCov[1][0] * (1 - kalmanGain[1]) -
kalmanGain[1] * deltaFSBytes * t00;
_thetaCov[1][1] = _thetaCov[1][1] * (1 - kalmanGain[1]) -
kalmanGain[1] * deltaFSBytes * t01;
t00 = theta_cov_[0][0];
t01 = theta_cov_[0][1];
theta_cov_[0][0] = (1 - kalmanGain[0] * delta_frame_size_bytes) * t00 -
kalmanGain[0] * theta_cov_[1][0];
theta_cov_[0][1] = (1 - kalmanGain[0] * delta_frame_size_bytes) * t01 -
kalmanGain[0] * theta_cov_[1][1];
theta_cov_[1][0] = theta_cov_[1][0] * (1 - kalmanGain[1]) -
kalmanGain[1] * delta_frame_size_bytes * t00;
theta_cov_[1][1] = theta_cov_[1][1] * (1 - kalmanGain[1]) -
kalmanGain[1] * delta_frame_size_bytes * t01;
// Covariance matrix, must be positive semi-definite.
RTC_DCHECK(_thetaCov[0][0] + _thetaCov[1][1] >= 0 &&
_thetaCov[0][0] * _thetaCov[1][1] -
_thetaCov[0][1] * _thetaCov[1][0] >=
RTC_DCHECK(theta_cov_[0][0] + theta_cov_[1][1] >= 0 &&
theta_cov_[0][0] * theta_cov_[1][1] -
theta_cov_[0][1] * theta_cov_[1][0] >=
0 &&
_thetaCov[0][0] >= 0);
theta_cov_[0][0] >= 0);
}
// Calculate difference in delay between a sample and the expected delay
// estimated by the Kalman filter
double VCMJitterEstimator::DeviationFromExpectedDelay(
int64_t frameDelayMS,
int32_t deltaFSBytes) const {
return frameDelayMS - (_theta[0] * deltaFSBytes + _theta[1]);
TimeDelta frame_delay,
double delta_frame_size_bytes) const {
return frame_delay.ms() - (theta_[0] * delta_frame_size_bytes + theta_[1]);
}
// Estimates the random jitter by calculating the variance of the sample
// distance from the line given by theta.
void VCMJitterEstimator::EstimateRandomJitter(double d_dT,
bool incompleteFrame) {
uint64_t now = clock_->TimeInMicroseconds();
if (_lastUpdateT != -1) {
fps_counter_.AddSample(now - _lastUpdateT);
bool incomplete_frame) {
Timestamp now = clock_->CurrentTime();
if (last_update_time_.has_value()) {
fps_counter_.AddSample((now - *last_update_time_).us());
}
_lastUpdateT = now;
last_update_time_ = now;
if (_alphaCount == 0) {
if (alpha_count_ == 0) {
RTC_DCHECK_NOTREACHED();
return;
}
double alpha =
static_cast<double>(_alphaCount - 1) / static_cast<double>(_alphaCount);
_alphaCount++;
if (_alphaCount > _alphaCountMax)
_alphaCount = _alphaCountMax;
static_cast<double>(alpha_count_ - 1) / static_cast<double>(alpha_count_);
alpha_count_++;
if (alpha_count_ > kAlphaCountMax)
alpha_count_ = kAlphaCountMax;
// In order to avoid a low frame rate stream to react slower to changes,
// scale the alpha weight relative a 30 fps stream.
double fps = GetFrameRate();
if (fps > 0.0) {
double rate_scale = 30.0 / fps;
Frequency fps = GetFrameRate();
if (fps > Frequency::Zero()) {
constexpr Frequency k30Fps = Frequency::Hertz(30);
double rate_scale = k30Fps / fps;
// At startup, there can be a lot of noise in the fps estimate.
// Interpolate rate_scale linearly, from 1.0 at sample #1, to 30.0 / fps
// at sample #kStartupDelaySamples.
if (_alphaCount < kStartupDelaySamples) {
if (alpha_count_ < kStartupDelaySamples) {
rate_scale =
(_alphaCount * rate_scale + (kStartupDelaySamples - _alphaCount)) /
(alpha_count_ * rate_scale + (kStartupDelaySamples - alpha_count_)) /
kStartupDelaySamples;
}
alpha = pow(alpha, rate_scale);
}
double avgNoise = alpha * _avgNoise + (1 - alpha) * d_dT;
double varNoise =
alpha * _varNoise + (1 - alpha) * (d_dT - _avgNoise) * (d_dT - _avgNoise);
if (!incompleteFrame || varNoise > _varNoise) {
_avgNoise = avgNoise;
_varNoise = varNoise;
double avgNoise = alpha * avg_noise_ + (1 - alpha) * d_dT;
double varNoise = alpha * var_noise_ +
(1 - alpha) * (d_dT - avg_noise_) * (d_dT - avg_noise_);
if (!incomplete_frame || varNoise > var_noise_) {
avg_noise_ = avgNoise;
var_noise_ = varNoise;
}
if (_varNoise < 1.0) {
if (var_noise_ < 1.0) {
// The variance should never be zero, since we might get stuck and consider
// all samples as outliers.
_varNoise = 1.0;
var_noise_ = 1.0;
}
}
double VCMJitterEstimator::NoiseThreshold() const {
double noiseThreshold = _noiseStdDevs * sqrt(_varNoise) - _noiseStdDevOffset;
double noiseThreshold = kNoiseStdDevs * sqrt(var_noise_) - kNoiseStdDevOffset;
if (noiseThreshold < 1.0) {
noiseThreshold = 1.0;
}
@ -323,88 +330,91 @@ double VCMJitterEstimator::NoiseThreshold() const {
}
// Calculates the current jitter estimate from the filtered estimates.
double VCMJitterEstimator::CalculateEstimate() {
double ret = _theta[0] * (_maxFrameSize - _avgFrameSize) + NoiseThreshold();
TimeDelta VCMJitterEstimator::CalculateEstimate() {
double retMs =
theta_[0] * (max_frame_size_.bytes() - avg_frame_size_.bytes()) +
NoiseThreshold();
TimeDelta ret = TimeDelta::Millis(retMs);
constexpr TimeDelta kMinPrevEstimate = TimeDelta::Micros(10);
constexpr TimeDelta kMaxEstimate = TimeDelta::Seconds(10);
// A very low estimate (or negative) is neglected.
if (ret < 1.0) {
if (_prevEstimate <= 0.01) {
ret = 1.0;
if (ret < TimeDelta::Millis(1)) {
if (!prev_estimate_ || prev_estimate_ <= kMinPrevEstimate) {
ret = TimeDelta::Millis(1);
} else {
ret = _prevEstimate;
ret = *prev_estimate_;
}
}
if (ret > 10000.0) { // Sanity
ret = 10000.0;
if (ret > kMaxEstimate) { // Sanity
ret = kMaxEstimate;
}
_prevEstimate = ret;
prev_estimate_ = ret;
return ret;
}
void VCMJitterEstimator::PostProcessEstimate() {
_filterJitterEstimate = CalculateEstimate();
filter_jitter_estimate_ = CalculateEstimate();
}
void VCMJitterEstimator::UpdateRtt(int64_t rttMs) {
_rttFilter.Update(TimeDelta::Millis(rttMs));
void VCMJitterEstimator::UpdateRtt(TimeDelta rtt) {
rtt_filter_.Update(rtt);
}
// Returns the current filtered estimate if available,
// otherwise tries to calculate an estimate.
int VCMJitterEstimator::GetJitterEstimate(
double rttMultiplier,
absl::optional<double> rttMultAddCapMs) {
double jitterMS = CalculateEstimate() + OPERATING_SYSTEM_JITTER;
uint64_t now = clock_->TimeInMicroseconds();
TimeDelta VCMJitterEstimator::GetJitterEstimate(
double rtt_multiplier,
absl::optional<TimeDelta> rtt_mult_add_cap) {
TimeDelta jitter = CalculateEstimate() + OPERATING_SYSTEM_JITTER;
Timestamp now = clock_->CurrentTime();
if (now - _latestNackTimestamp > kNackCountTimeoutMs * 1000)
_nackCount = 0;
if (now - latest_nack_ > kNackCountTimeout)
nack_count_ = 0;
if (_filterJitterEstimate > jitterMS)
jitterMS = _filterJitterEstimate;
if (_nackCount >= _nackLimit) {
if (rttMultAddCapMs.has_value()) {
jitterMS += std::min(_rttFilter.Rtt().ms() * rttMultiplier,
rttMultAddCapMs.value());
if (filter_jitter_estimate_ > jitter)
jitter = filter_jitter_estimate_;
if (nack_count_ >= kNackLimit) {
if (rtt_mult_add_cap.has_value()) {
jitter += std::min(rtt_filter_.Rtt() * rtt_multiplier,
rtt_mult_add_cap.value());
} else {
jitterMS += _rttFilter.Rtt().ms() * rttMultiplier;
jitter += rtt_filter_.Rtt() * rtt_multiplier;
}
}
if (enable_reduced_delay_) {
static const double kJitterScaleLowThreshold = 5.0;
static const double kJitterScaleHighThreshold = 10.0;
double fps = GetFrameRate();
static const Frequency kJitterScaleLowThreshold = Frequency::Hertz(5);
static const Frequency kJitterScaleHighThreshold = Frequency::Hertz(10);
Frequency fps = GetFrameRate();
// Ignore jitter for very low fps streams.
if (fps < kJitterScaleLowThreshold) {
if (fps == 0.0) {
return rtc::checked_cast<int>(std::max(0.0, jitterMS) + 0.5);
if (fps.IsZero()) {
return std::max(TimeDelta::Zero(), jitter);
}
return 0;
return TimeDelta::Zero();
}
// Semi-low frame rate; scale by factor linearly interpolated from 0.0 at
// kJitterScaleLowThreshold to 1.0 at kJitterScaleHighThreshold.
if (fps < kJitterScaleHighThreshold) {
jitterMS =
(1.0 / (kJitterScaleHighThreshold - kJitterScaleLowThreshold)) *
(fps - kJitterScaleLowThreshold) * jitterMS;
jitter = (1.0 / (kJitterScaleHighThreshold - kJitterScaleLowThreshold)) *
(fps - kJitterScaleLowThreshold) * jitter;
}
}
return rtc::checked_cast<int>(std::max(0.0, jitterMS) + 0.5);
return std::max(TimeDelta::Zero(), jitter);
}
double VCMJitterEstimator::GetFrameRate() const {
if (fps_counter_.ComputeMean() <= 0.0)
return 0;
Frequency VCMJitterEstimator::GetFrameRate() const {
TimeDelta mean_frame_period = TimeDelta::Micros(fps_counter_.ComputeMean());
if (mean_frame_period <= TimeDelta::Zero())
return Frequency::Zero();
double fps = 1000000.0 / fps_counter_.ComputeMean();
Frequency fps = 1 / mean_frame_period;
// Sanity check.
RTC_DCHECK_GE(fps, 0.0);
if (fps > kMaxFramerateEstimate) {
fps = kMaxFramerateEstimate;
}
return fps;
RTC_DCHECK_GE(fps, Frequency::Zero());
return std::min(fps, kMaxFramerateEstimate);
}
} // namespace webrtc

View File

@ -11,6 +11,11 @@
#ifndef MODULES_VIDEO_CODING_JITTER_ESTIMATOR_H_
#define MODULES_VIDEO_CODING_JITTER_ESTIMATOR_H_
#include "absl/types/optional.h"
#include "api/units/data_size.h"
#include "api/units/frequency.h"
#include "api/units/time_delta.h"
#include "api/units/timestamp.h"
#include "modules/video_coding/rtt_filter.h"
#include "rtc_base/rolling_accumulator.h"
@ -31,24 +36,25 @@ class VCMJitterEstimator {
// Updates the jitter estimate with the new data.
//
// Input:
// - frameDelay : Delay-delta calculated by UTILDelayEstimate in
// milliseconds.
// - frameSize : Frame size of the current frame.
// - incompleteFrame : Flags if the frame is used to update the
// - frame_delay : Delay-delta calculated by UTILDelayEstimate.
// - frame_size : Frame size of the current frame.
// - incomplete_frame : Flags if the frame is used to update the
// estimate before it was complete.
// Default is false.
void UpdateEstimate(int64_t frameDelayMS,
uint32_t frameSizeBytes,
bool incompleteFrame = false);
void UpdateEstimate(TimeDelta frame_delay,
DataSize frame_size,
bool incomplete_frame = false);
// Returns the current jitter estimate in milliseconds and adds an RTT
// dependent term in cases of retransmission.
// Returns the current jitter estimate and adds an RTT dependent term in cases
// of retransmission.
// Input:
// - rttMultiplier : RTT param multiplier (when applicable).
// - rtt_multiplier : RTT param multiplier (when applicable).
// - rtt_mult_add_cap : Multiplier cap from the RTTMultExperiment.
//
// Return value : Jitter estimate in milliseconds.
virtual int GetJitterEstimate(double rttMultiplier,
absl::optional<double> rttMultAddCapMs);
// Return value : Jitter estimate.
virtual TimeDelta GetJitterEstimate(
double rtt_multiplier,
absl::optional<TimeDelta> rtt_mult_add_cap);
// Updates the nack counter.
void FrameNacked();
@ -56,45 +62,47 @@ class VCMJitterEstimator {
// Updates the RTT filter.
//
// Input:
// - rttMs : RTT in ms.
void UpdateRtt(int64_t rttMs);
// - rtt : Round trip time.
void UpdateRtt(TimeDelta rtt);
// A constant describing the delay from the jitter buffer to the delay on the
// receiving side which is not accounted for by the jitter buffer nor the
// decoding delay estimate.
static const uint32_t OPERATING_SYSTEM_JITTER = 10;
static constexpr TimeDelta OPERATING_SYSTEM_JITTER = TimeDelta::Millis(10);
protected:
// These are protected for better testing possibilities.
double _theta[2]; // Estimated line parameters (slope, offset)
double _varNoise; // Variance of the time-deviation from the line
double theta_[2]; // Estimated line parameters (slope, offset)
double var_noise_; // Variance of the time-deviation from the line
private:
// Updates the Kalman filter for the line describing the frame size dependent
// jitter.
//
// Input:
// - frameDelayMS : Delay-delta calculated by UTILDelayEstimate in
// milliseconds.
// - deltaFSBytes : Frame size delta, i.e. frame size at time T
// : minus frame size at time T-1.
void KalmanEstimateChannel(int64_t frameDelayMS, int32_t deltaFSBytes);
// - frame_delay
// Delay-delta calculated by UTILDelayEstimate.
// - delta_frame_size_bytes
// Frame size delta, i.e. frame size at time T minus frame size
// at time T-1.
void KalmanEstimateChannel(TimeDelta frame_delay,
double delta_frame_size_bytes);
// Updates the random jitter estimate, i.e. the variance of the time
// deviations from the line given by the Kalman filter.
//
// Input:
// - d_dT : The deviation from the kalman estimate.
// - incompleteFrame : True if the frame used to update the
// - incomplete_frame : True if the frame used to update the
// estimate with was incomplete.
void EstimateRandomJitter(double d_dT, bool incompleteFrame);
void EstimateRandomJitter(double d_dT, bool incomplete_frame);
double NoiseThreshold() const;
// Calculates the current jitter estimate.
//
// Return value : The current jitter estimate in milliseconds.
double CalculateEstimate();
// Return value : The current jitter estimate.
TimeDelta CalculateEstimate();
// Post process the calculated estimate.
void PostProcessEstimate();
@ -103,52 +111,48 @@ class VCMJitterEstimator {
// estimated by the Kalman filter.
//
// Input:
// - frameDelayMS : Delay-delta calculated by UTILDelayEstimate in
// milliseconds.
// - deltaFS : Frame size delta, i.e. frame size at time
// T minus frame size at time T-1.
// - frame_delay : Delay-delta calculated by UTILDelayEstimate.
// - delta_frame_size_bytes : Frame size delta, i.e. frame size at
// time
// T minus frame size at time T-1.
//
// Return value : The difference in milliseconds.
double DeviationFromExpectedDelay(int64_t frameDelayMS,
int32_t deltaFSBytes) const;
// Return value : The delay difference in ms.
double DeviationFromExpectedDelay(TimeDelta frame_delay,
double delta_frame_size_bytes) const;
double GetFrameRate() const;
Frequency GetFrameRate() const;
// Constants, filter parameters.
const double _phi;
const double _psi;
const uint32_t _alphaCountMax;
const double _thetaLow;
const uint32_t _nackLimit;
const int32_t _numStdDevDelayOutlier;
const int32_t _numStdDevFrameSizeOutlier;
const double _noiseStdDevs;
const double _noiseStdDevOffset;
double theta_cov_[2][2]; // Estimate covariance
double q_cov_[2][2]; // Process noise covariance
double _thetaCov[2][2]; // Estimate covariance
double _Qcov[2][2]; // Process noise covariance
double _avgFrameSize; // Average frame size
double _varFrameSize; // Frame size variance
double _maxFrameSize; // Largest frame size received (descending
// with a factor _psi)
uint32_t _fsSum;
uint32_t _fsCount;
static constexpr DataSize kDefaultAvgAndMaxFrameSize = DataSize::Bytes(500);
DataSize avg_frame_size_ = kDefaultAvgAndMaxFrameSize; // Average frame size
double var_frame_size_; // Frame size variance. Unit is bytes^2.
// Largest frame size received (descending with a factor kPsi)
DataSize max_frame_size_ = kDefaultAvgAndMaxFrameSize;
DataSize frame_size_sum_ = DataSize::Zero();
uint32_t frame_size_count_;
int64_t _lastUpdateT;
double _prevEstimate; // The previously returned jitter estimate
uint32_t _prevFrameSize; // Frame size of the previous frame
double _avgNoise; // Average of the random jitter
uint32_t _alphaCount;
double _filterJitterEstimate; // The filtered sum of jitter estimates
absl::optional<Timestamp> last_update_time_;
// The previously returned jitter estimate
absl::optional<TimeDelta> prev_estimate_;
// Frame size of the previous frame
absl::optional<DataSize> prev_frame_size_;
// Average of the random jitter
double avg_noise_;
uint32_t alpha_count_;
// The filtered sum of jitter estimates
TimeDelta filter_jitter_estimate_ = TimeDelta::Zero();
uint32_t _startupCount;
int64_t
_latestNackTimestamp; // Timestamp in ms when the latest nack was seen
uint32_t _nackCount; // Keeps track of the number of nacks received,
// but never goes above _nackLimit
VCMRttFilter _rttFilter;
uint32_t startup_count_;
// Time when the latest nack was seen
Timestamp latest_nack_ = Timestamp::Zero();
// Keeps track of the number of nacks received, but never goes above
// kNackLimit.
uint32_t nack_count_;
VCMRttFilter rtt_filter_;
// Tracks frame rates in microseconds.
rtc::RollingAccumulator<uint64_t> fps_counter_;
const double time_deviation_upper_bound_;
const bool enable_reduced_delay_;

View File

@ -14,6 +14,9 @@
#include "absl/types/optional.h"
#include "api/array_view.h"
#include "api/units/data_size.h"
#include "api/units/frequency.h"
#include "api/units/time_delta.h"
#include "modules/video_coding/jitter_estimator.h"
#include "rtc_base/experiments/jitter_upper_bound_experiment.h"
#include "rtc_base/numerics/histogram_percentile_counter.h"
@ -33,10 +36,6 @@ class TestVCMJitterEstimator : public ::testing::Test {
estimator_ = std::make_unique<VCMJitterEstimator>(&fake_clock_);
}
void AdvanceClock(int64_t microseconds) {
fake_clock_.AdvanceTimeMicroseconds(microseconds);
}
SimulatedClock fake_clock_;
std::unique_ptr<VCMJitterEstimator> estimator_;
};
@ -46,11 +45,16 @@ class ValueGenerator {
public:
explicit ValueGenerator(int32_t amplitude)
: amplitude_(amplitude), counter_(0) {}
virtual ~ValueGenerator() {}
int64_t Delay() const { return ((counter_ % 11) - 5) * amplitude_; }
virtual ~ValueGenerator() = default;
uint32_t FrameSize() const { return 1000 + Delay(); }
TimeDelta Delay() const {
return TimeDelta::Millis((counter_ % 11) - 5) * amplitude_;
}
DataSize FrameSize() const {
return DataSize::Bytes(1000 + Delay().ms() / 5);
}
void Advance() { ++counter_; }
@ -62,12 +66,13 @@ class ValueGenerator {
// 5 fps, disable jitter delay altogether.
TEST_F(TestVCMJitterEstimator, TestLowRate) {
ValueGenerator gen(10);
uint64_t time_delta_us = rtc::kNumMicrosecsPerSec / 5;
TimeDelta time_delta = 1 / Frequency::Hertz(5);
for (int i = 0; i < 60; ++i) {
estimator_->UpdateEstimate(gen.Delay(), gen.FrameSize());
AdvanceClock(time_delta_us);
fake_clock_.AdvanceTime(time_delta);
if (i > 2)
EXPECT_EQ(estimator_->GetJitterEstimate(0, absl::nullopt), 0);
EXPECT_EQ(estimator_->GetJitterEstimate(0, absl::nullopt),
TimeDelta::Zero());
gen.Advance();
}
}
@ -78,12 +83,13 @@ TEST_F(TestVCMJitterEstimator, TestLowRateDisabled) {
SetUp();
ValueGenerator gen(10);
uint64_t time_delta_us = rtc::kNumMicrosecsPerSec / 5;
TimeDelta time_delta = 1 / Frequency::Hertz(5);
for (int i = 0; i < 60; ++i) {
estimator_->UpdateEstimate(gen.Delay(), gen.FrameSize());
AdvanceClock(time_delta_us);
fake_clock_.AdvanceTime(time_delta);
if (i > 2)
EXPECT_GT(estimator_->GetJitterEstimate(0, absl::nullopt), 0);
EXPECT_GT(estimator_->GetJitterEstimate(0, absl::nullopt),
TimeDelta::Zero());
gen.Advance();
}
}
@ -97,7 +103,7 @@ TEST_F(TestVCMJitterEstimator, TestUpperBound) {
percentiles(1000) {}
double upper_bound;
double rtt_mult;
absl::optional<double> rtt_mult_add_cap_ms;
absl::optional<TimeDelta> rtt_mult_add_cap_ms;
rtc::HistogramPercentileCounter percentiles;
};
std::vector<TestContext> test_cases(4);
@ -113,11 +119,11 @@ TEST_F(TestVCMJitterEstimator, TestUpperBound) {
// Large upper bound, rtt_mult = 1, and large rtt_mult addition cap value.
test_cases[2].upper_bound = 1000.0;
test_cases[2].rtt_mult = 1.0;
test_cases[2].rtt_mult_add_cap_ms = 200.0;
test_cases[2].rtt_mult_add_cap_ms = TimeDelta::Millis(200);
// Large upper bound, rtt_mult = 1, and small rtt_mult addition cap value.
test_cases[3].upper_bound = 1000.0;
test_cases[3].rtt_mult = 1.0;
test_cases[3].rtt_mult_add_cap_ms = 10.0;
test_cases[3].rtt_mult_add_cap_ms = TimeDelta::Millis(10);
// Test jitter buffer upper_bound and rtt_mult addition cap sizes.
for (TestContext& context : test_cases) {
@ -130,16 +136,17 @@ TEST_F(TestVCMJitterEstimator, TestUpperBound) {
SetUp();
ValueGenerator gen(50);
uint64_t time_delta_us = rtc::kNumMicrosecsPerSec / 30;
constexpr int64_t kRttMs = 250;
TimeDelta time_delta = 1 / Frequency::Hertz(30);
constexpr TimeDelta kRtt = TimeDelta::Millis(250);
for (int i = 0; i < 100; ++i) {
estimator_->UpdateEstimate(gen.Delay(), gen.FrameSize());
AdvanceClock(time_delta_us);
fake_clock_.AdvanceTime(time_delta);
estimator_->FrameNacked(); // To test rtt_mult.
estimator_->UpdateRtt(kRttMs); // To test rtt_mult.
estimator_->UpdateRtt(kRtt); // To test rtt_mult.
context.percentiles.Add(
static_cast<uint32_t>(estimator_->GetJitterEstimate(
context.rtt_mult, context.rtt_mult_add_cap_ms)));
estimator_
->GetJitterEstimate(context.rtt_mult, context.rtt_mult_add_cap_ms)
.ms());
gen.Advance();
}
}

View File

@ -100,6 +100,7 @@ rtc_library("video") {
"../modules/video_coding:codec_globals_headers",
"../modules/video_coding:frame_buffer",
"../modules/video_coding:frame_helpers",
"../modules/video_coding:jitter_estimator",
"../modules/video_coding:nack_requester",
"../modules/video_coding:packet_buffer",
"../modules/video_coding:timing",
@ -308,6 +309,7 @@ rtc_library("frame_buffer_proxy") {
"../api:sequence_checker",
"../api/metronome",
"../api/task_queue",
"../api/units:data_size",
"../api/video:encoded_frame",
"../api/video:video_rtp_headers",
"../modules/video_coding",

View File

@ -17,6 +17,7 @@
#include "absl/base/attributes.h"
#include "absl/functional/bind_front.h"
#include "api/sequence_checker.h"
#include "api/units/data_size.h"
#include "api/video/encoded_frame.h"
#include "api/video/video_content_type.h"
#include "modules/video_coding/frame_buffer2.h"
@ -261,7 +262,7 @@ class FrameBuffer3Proxy : public FrameBufferProxy {
void UpdateRtt(int64_t max_rtt_ms) override {
RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
jitter_estimator_.UpdateRtt(max_rtt_ms);
jitter_estimator_.UpdateRtt(TimeDelta::Millis(max_rtt_ms));
}
void StartNextDecode(bool keyframe_required) override {
@ -298,7 +299,7 @@ class FrameBuffer3Proxy : public FrameBufferProxy {
Timestamp now = clock_->CurrentTime();
bool superframe_delayed_by_retransmission = false;
size_t superframe_size = 0;
DataSize superframe_size = DataSize::Zero();
const EncodedFrame& first_frame = *frames.front();
int64_t receive_time_ms = first_frame.ReceivedTime();
@ -319,7 +320,7 @@ class FrameBuffer3Proxy : public FrameBufferProxy {
superframe_delayed_by_retransmission |=
frame->delayed_by_retransmission();
receive_time_ms = std::max(receive_time_ms, frame->ReceivedTime());
superframe_size += frame->size();
superframe_size += DataSize::Bytes(frame->size());
}
if (!superframe_delayed_by_retransmission) {
@ -327,17 +328,19 @@ class FrameBuffer3Proxy : public FrameBufferProxy {
if (inter_frame_delay_.CalculateDelay(first_frame.Timestamp(),
&frame_delay, receive_time_ms)) {
jitter_estimator_.UpdateEstimate(frame_delay, superframe_size);
jitter_estimator_.UpdateEstimate(TimeDelta::Millis(frame_delay),
superframe_size);
}
float rtt_mult = protection_mode_ == kProtectionNackFEC ? 0.0 : 1.0;
absl::optional<float> rtt_mult_add_cap_ms = absl::nullopt;
absl::optional<TimeDelta> rtt_mult_add_cap_ms = absl::nullopt;
if (rtt_mult_settings_.has_value()) {
rtt_mult = rtt_mult_settings_->rtt_mult_setting;
rtt_mult_add_cap_ms = rtt_mult_settings_->rtt_mult_add_cap_ms;
rtt_mult_add_cap_ms =
TimeDelta::Millis(rtt_mult_settings_->rtt_mult_add_cap_ms);
}
timing_->SetJitterDelay(TimeDelta::Millis(
jitter_estimator_.GetJitterEstimate(rtt_mult, rtt_mult_add_cap_ms)));
timing_->SetJitterDelay(
jitter_estimator_.GetJitterEstimate(rtt_mult, rtt_mult_add_cap_ms));
timing_->UpdateCurrentDelay(render_time, now);
} else if (RttMultExperiment::RttMultEnabled()) {
jitter_estimator_.FrameNacked();

View File

@ -692,6 +692,7 @@ void VideoReceiveStream2::OnCompleteFrame(std::unique_ptr<EncodedFrame> frame) {
void VideoReceiveStream2::OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) {
RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
// TODO(bugs.webrtc.org/13757): Replace with TimeDelta.
frame_buffer_->UpdateRtt(max_rtt_ms);
rtp_video_stream_receiver_.UpdateRtt(max_rtt_ms);
stats_proxy_.OnRttUpdate(avg_rtt_ms);