Use Timestamp and TimeDelta in VCMTiming

* Switches TimestampExtrapolator to use Timestamp as well.

Bug: webrtc:13589
Change-Id: I042be5d693068553d2e8eb92fa532092d77bd7ef
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/249993
Reviewed-by: Tomas Gunnarsson <tommi@webrtc.org>
Commit-Queue: Evan Shrubsole <eshr@webrtc.org>
Cr-Commit-Position: refs/heads/main@{#36112}
This commit is contained in:
Evan Shrubsole
2022-03-02 15:13:55 +01:00
committed by WebRTC LUCI CQ
parent 9558ab41eb
commit d6cdf80072
26 changed files with 527 additions and 500 deletions

View File

@ -149,7 +149,11 @@ rtc_library("encoded_frame") {
"encoded_frame.h",
]
deps = [ "../../modules/video_coding:encoded_frame" ]
deps = [
"../../modules/video_coding:encoded_frame",
"../units:timestamp",
]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("rtp_video_frame_assembler") {

View File

@ -10,8 +10,16 @@
#include "api/video/encoded_frame.h"
#include "absl/types/optional.h"
namespace webrtc {
absl::optional<Timestamp> EncodedFrame::RenderTimestamp() const {
return RenderTimeMs() >= 0
? absl::make_optional(Timestamp::Millis(RenderTimeMs()))
: absl::nullopt;
}
bool EncodedFrame::delayed_by_retransmission() const {
return false;
}

View File

@ -14,6 +14,8 @@
#include <stddef.h>
#include <stdint.h>
#include "absl/types/optional.h"
#include "api/units/timestamp.h"
#include "modules/video_coding/encoded_frame.h"
namespace webrtc {
@ -30,10 +32,15 @@ class EncodedFrame : public webrtc::VCMEncodedFrame {
virtual ~EncodedFrame() {}
// When this frame was received.
// TODO(bugs.webrtc.org/13756): Use Timestamp instead of int.
virtual int64_t ReceivedTime() const = 0;
// When this frame should be rendered.
// TODO(bugs.webrtc.org/13756): Use Timestamp instead of int.
virtual int64_t RenderTime() const = 0;
// Returns a Timestamp from `RenderTime`, or nullopt if there is no
// render time.
absl::optional<webrtc::Timestamp> RenderTimestamp() const;
// This information is currently needed by the timing calculation class.
// TODO(philipel): Remove this function when a new timing class has

View File

@ -190,6 +190,7 @@ rtc_library("timing") {
deps = [
"../../api/units:time_delta",
"../../api/video:video_rtp_headers",
"../../rtc_base:logging",
"../../rtc_base:macromagic",
"../../rtc_base:rtc_numerics",
"../../rtc_base/experiments:field_trial_parser",
@ -1144,7 +1145,9 @@ if (rtc_include_tests) {
"../../api:videocodec_test_fixture_api",
"../../api/task_queue:default_task_queue_factory",
"../../api/test/video:function_video_factory",
"../../api/units:frequency",
"../../api/units:time_delta",
"../../api/units:timestamp",
"../../api/video:builtin_video_bitrate_allocator_factory",
"../../api/video:encoded_frame",
"../../api/video:render_resolution",

View File

@ -104,7 +104,7 @@ void FrameBuffer::NextFrame(int64_t max_wait_time_ms,
void FrameBuffer::StartWaitForNextFrameOnQueue() {
RTC_DCHECK(callback_queue_);
RTC_DCHECK(!callback_task_.Running());
int64_t wait_ms = FindNextFrame(clock_->TimeInMilliseconds());
int64_t wait_ms = FindNextFrame(clock_->CurrentTime());
callback_task_ = RepeatingTaskHandle::DelayedStart(
callback_queue_->Get(), TimeDelta::Millis(wait_ms),
[this] {
@ -118,13 +118,12 @@ void FrameBuffer::StartWaitForNextFrameOnQueue() {
if (!frames_to_decode_.empty()) {
// We have frames, deliver!
frame = GetNextFrame();
timing_->SetLastDecodeScheduledTimestamp(
clock_->TimeInMilliseconds());
timing_->SetLastDecodeScheduledTimestamp(clock_->CurrentTime());
} else if (clock_->TimeInMilliseconds() < latest_return_time_ms_) {
// If there's no frames to decode and there is still time left, it
// means that the frame buffer was cleared between creation and
// execution of this task. Continue waiting for the remaining time.
int64_t wait_ms = FindNextFrame(clock_->TimeInMilliseconds());
int64_t wait_ms = FindNextFrame(clock_->CurrentTime());
return TimeDelta::Millis(wait_ms);
}
frame_handler = std::move(frame_handler_);
@ -137,8 +136,8 @@ void FrameBuffer::StartWaitForNextFrameOnQueue() {
TaskQueueBase::DelayPrecision::kHigh);
}
int64_t FrameBuffer::FindNextFrame(int64_t now_ms) {
int64_t wait_ms = latest_return_time_ms_ - now_ms;
int64_t FrameBuffer::FindNextFrame(Timestamp now) {
int64_t wait_ms = latest_return_time_ms_ - now.ms();
frames_to_decode_.clear();
// `last_continuous_frame_` may be empty below, but nullopt is smaller
@ -217,14 +216,16 @@ int64_t FrameBuffer::FindNextFrame(int64_t now_ms) {
frames_to_decode_ = std::move(current_superframe);
if (frame->RenderTime() == -1) {
frame->SetRenderTime(timing_->RenderTimeMs(frame->Timestamp(), now_ms));
absl::optional<Timestamp> render_time = frame->RenderTimestamp();
if (!render_time) {
render_time = timing_->RenderTime(frame->Timestamp(), now);
frame->SetRenderTime(render_time->ms());
}
bool too_many_frames_queued =
frames_.size() > zero_playout_delay_max_decode_queue_size_ ? true
: false;
wait_ms = timing_->MaxWaitingTime(frame->RenderTime(), now_ms,
too_many_frames_queued);
wait_ms =
timing_->MaxWaitingTime(*render_time, now, too_many_frames_queued).ms();
// This will cause the frame buffer to prefer high framerate rather
// than high resolution in the case of the decoder not decoding fast
@ -236,14 +237,14 @@ int64_t FrameBuffer::FindNextFrame(int64_t now_ms) {
break;
}
wait_ms = std::min<int64_t>(wait_ms, latest_return_time_ms_ - now_ms);
wait_ms = std::min<int64_t>(wait_ms, latest_return_time_ms_ - now.ms());
wait_ms = std::max<int64_t>(wait_ms, 0);
return wait_ms;
}
std::unique_ptr<EncodedFrame> FrameBuffer::GetNextFrame() {
RTC_DCHECK_RUN_ON(&callback_checker_);
int64_t now_ms = clock_->TimeInMilliseconds();
Timestamp now = clock_->CurrentTime();
// TODO(ilnik): remove `frames_out` use frames_to_decode_ directly.
std::vector<std::unique_ptr<EncodedFrame>> frames_out;
@ -251,21 +252,21 @@ std::unique_ptr<EncodedFrame> FrameBuffer::GetNextFrame() {
bool superframe_delayed_by_retransmission = false;
size_t superframe_size = 0;
const EncodedFrame& first_frame = *frames_to_decode_[0]->second.frame;
int64_t render_time_ms = first_frame.RenderTime();
absl::optional<Timestamp> render_time = first_frame.RenderTimestamp();
int64_t receive_time_ms = first_frame.ReceivedTime();
// Gracefully handle bad RTP timestamps and render time issues.
if (FrameHasBadRenderTiming(first_frame.RenderTimeMs(), now_ms,
timing_->TargetVideoDelay())) {
if (!render_time ||
FrameHasBadRenderTiming(*render_time, now, timing_->TargetVideoDelay())) {
jitter_estimator_.Reset();
timing_->Reset();
render_time_ms = timing_->RenderTimeMs(first_frame.Timestamp(), now_ms);
render_time = timing_->RenderTime(first_frame.Timestamp(), now);
}
for (FrameMap::iterator& frame_it : frames_to_decode_) {
RTC_DCHECK(frame_it != frames_.end());
std::unique_ptr<EncodedFrame> frame = std::move(frame_it->second.frame);
frame->SetRenderTime(render_time_ms);
frame->SetRenderTime(render_time->ms());
superframe_delayed_by_retransmission |= frame->delayed_by_retransmission();
receive_time_ms = std::max(receive_time_ms, frame->ReceivedTime());
@ -305,9 +306,9 @@ std::unique_ptr<EncodedFrame> FrameBuffer::GetNextFrame() {
rtt_mult = rtt_mult_settings_->rtt_mult_setting;
rtt_mult_add_cap_ms = rtt_mult_settings_->rtt_mult_add_cap_ms;
}
timing_->SetJitterDelay(
jitter_estimator_.GetJitterEstimate(rtt_mult, rtt_mult_add_cap_ms));
timing_->UpdateCurrentDelay(render_time_ms, now_ms);
timing_->SetJitterDelay(TimeDelta::Millis(
jitter_estimator_.GetJitterEstimate(rtt_mult, rtt_mult_add_cap_ms)));
timing_->UpdateCurrentDelay(*render_time, now);
} else {
if (RttMultExperiment::RttMultEnabled())
jitter_estimator_.FrameNacked();
@ -446,8 +447,10 @@ int64_t FrameBuffer::InsertFrame(std::unique_ptr<EncodedFrame> frame) {
if (!UpdateFrameInfoWithIncomingFrame(*frame, info))
return last_continuous_frame_id;
if (!frame->delayed_by_retransmission())
timing_->IncomingTimestamp(frame->Timestamp(), frame->ReceivedTime());
// If ReceiveTime is negative then it is not a valid timestamp.
if (!frame->delayed_by_retransmission() && frame->ReceivedTime() >= 0)
timing_->IncomingTimestamp(frame->Timestamp(),
Timestamp::Millis(frame->ReceivedTime()));
// It can happen that a frame will be reported as fully received even if a
// lower spatial layer frame is missing.
@ -591,18 +594,17 @@ void FrameBuffer::UpdateJitterDelay() {
if (!stats_callback_)
return;
int max_decode_ms;
int current_delay_ms;
int target_delay_ms;
int jitter_buffer_ms;
int min_playout_delay_ms;
int render_delay_ms;
if (timing_->GetTimings(&max_decode_ms, &current_delay_ms, &target_delay_ms,
&jitter_buffer_ms, &min_playout_delay_ms,
&render_delay_ms)) {
TimeDelta max_decode = TimeDelta::Zero();
TimeDelta current_delay = TimeDelta::Zero();
TimeDelta target_delay = TimeDelta::Zero();
TimeDelta jitter_buffer = TimeDelta::Zero();
TimeDelta min_playout_delay = TimeDelta::Zero();
TimeDelta render_delay = TimeDelta::Zero();
if (timing_->GetTimings(&max_decode, &current_delay, &target_delay,
&jitter_buffer, &min_playout_delay, &render_delay)) {
stats_callback_->OnFrameBufferTimingsUpdated(
max_decode_ms, current_delay_ms, target_delay_ms, jitter_buffer_ms,
min_playout_delay_ms, render_delay_ms);
max_decode.ms(), current_delay.ms(), target_delay.ms(),
jitter_buffer.ms(), min_playout_delay.ms(), render_delay.ms());
}
}

View File

@ -118,7 +118,7 @@ class FrameBuffer {
// Check that the references of `frame` are valid.
bool ValidReferences(const EncodedFrame& frame) const;
int64_t FindNextFrame(int64_t now_ms) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
int64_t FindNextFrame(Timestamp now) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
std::unique_ptr<EncodedFrame> GetNextFrame()
RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);

View File

@ -16,6 +16,8 @@
#include <memory>
#include <vector>
#include "api/units/time_delta.h"
#include "api/units/timestamp.h"
#include "modules/video_coding/frame_object.h"
#include "modules/video_coding/jitter_estimator.h"
#include "modules/video_coding/timing.h"
@ -40,56 +42,55 @@ class VCMTimingFake : public VCMTiming {
public:
explicit VCMTimingFake(Clock* clock) : VCMTiming(clock) {}
int64_t RenderTimeMs(uint32_t frame_timestamp,
int64_t now_ms) const override {
if (last_ms_ == -1) {
last_ms_ = now_ms + kDelayMs;
Timestamp RenderTime(uint32_t frame_timestamp, Timestamp now) const override {
if (last_render_time_.IsMinusInfinity()) {
last_render_time_ = now + kDelay;
last_timestamp_ = frame_timestamp;
}
uint32_t diff = MinDiff(frame_timestamp, last_timestamp_);
auto diff = MinDiff(frame_timestamp, last_timestamp_);
auto timeDiff = TimeDelta::Millis(diff / 90);
if (AheadOf(frame_timestamp, last_timestamp_))
last_ms_ += diff / 90;
last_render_time_ += timeDiff;
else
last_ms_ -= diff / 90;
last_render_time_ -= timeDiff;
last_timestamp_ = frame_timestamp;
return last_ms_;
return last_render_time_;
}
int64_t MaxWaitingTime(int64_t render_time_ms,
int64_t now_ms,
bool too_many_frames_queued) const override {
return render_time_ms - now_ms - kDecodeTime;
TimeDelta MaxWaitingTime(Timestamp render_time,
Timestamp now,
bool too_many_frames_queued) const override {
return render_time - now - kDecodeTime;
}
bool GetTimings(int* max_decode_ms,
int* current_delay_ms,
int* target_delay_ms,
int* jitter_buffer_ms,
int* min_playout_delay_ms,
int* render_delay_ms) const override {
bool GetTimings(TimeDelta* max_decode,
TimeDelta* current_delay,
TimeDelta* target_delay,
TimeDelta* jitter_buffer,
TimeDelta* min_playout_delay,
TimeDelta* render_delay) const override {
return true;
}
int GetCurrentJitter() {
int max_decode_ms;
int current_delay_ms;
int target_delay_ms;
int jitter_buffer_ms;
int min_playout_delay_ms;
int render_delay_ms;
VCMTiming::GetTimings(&max_decode_ms, &current_delay_ms, &target_delay_ms,
&jitter_buffer_ms, &min_playout_delay_ms,
&render_delay_ms);
return jitter_buffer_ms;
TimeDelta GetCurrentJitter() {
TimeDelta max_decode = TimeDelta::Zero();
TimeDelta current_delay = TimeDelta::Zero();
TimeDelta target_delay = TimeDelta::Zero();
TimeDelta jitter_buffer = TimeDelta::Zero();
TimeDelta min_playout_delay = TimeDelta::Zero();
TimeDelta render_delay = TimeDelta::Zero();
VCMTiming::GetTimings(&max_decode, &current_delay, &target_delay,
&jitter_buffer, &min_playout_delay, &render_delay);
return jitter_buffer;
}
private:
static constexpr int kDelayMs = 50;
static constexpr int kDecodeTime = kDelayMs / 2;
static constexpr TimeDelta kDelay = TimeDelta::Millis(50);
const TimeDelta kDecodeTime = kDelay / 2;
mutable uint32_t last_timestamp_ = 0;
mutable int64_t last_ms_ = -1;
mutable Timestamp last_render_time_ = Timestamp::MinusInfinity();
};
class FrameObjectFake : public EncodedFrame {
@ -120,12 +121,12 @@ class VCMReceiveStatisticsCallbackMock : public VCMReceiveStatisticsCallback {
MOCK_METHOD(void, OnDroppedFrames, (uint32_t frames_dropped), (override));
MOCK_METHOD(void,
OnFrameBufferTimingsUpdated,
(int max_decode_ms,
int current_delay_ms,
int target_delay_ms,
int jitter_buffer_ms,
int min_playout_delay_ms,
int render_delay_ms),
(int max_decode,
int current_delay,
int target_delay,
int jitter_buffer,
int min_playout_delay,
int render_delay),
(override));
MOCK_METHOD(void,
OnTimingFrameInfoUpdated,
@ -474,7 +475,7 @@ TEST_F(TestFrameBuffer2, ProtectionModeNackFEC) {
ExtractFrame();
ExtractFrame();
ASSERT_EQ(4u, frames_.size());
EXPECT_LT(timing_.GetCurrentJitter(), kRttMs);
EXPECT_LT(timing_.GetCurrentJitter().ms(), kRttMs);
}
TEST_F(TestFrameBuffer2, NoContinuousFrame) {

View File

@ -16,29 +16,29 @@
namespace webrtc {
bool FrameHasBadRenderTiming(int64_t render_time_ms,
int64_t now_ms,
int target_video_delay) {
bool FrameHasBadRenderTiming(Timestamp render_time,
Timestamp now,
TimeDelta target_video_delay) {
// Zero render time means render immediately.
if (render_time_ms == 0) {
if (render_time.IsZero()) {
return false;
}
if (render_time_ms < 0) {
if (render_time < Timestamp::Zero()) {
return true;
}
const int64_t kMaxVideoDelayMs = 10000;
if (std::abs(render_time_ms - now_ms) > kMaxVideoDelayMs) {
int frame_delay = static_cast<int>(std::abs(render_time_ms - now_ms));
constexpr TimeDelta kMaxVideoDelay = TimeDelta::Millis(10000);
TimeDelta frame_delay = (render_time - now).Abs();
if (frame_delay > kMaxVideoDelay) {
RTC_LOG(LS_WARNING)
<< "A frame about to be decoded is out of the configured "
"delay bounds ("
<< frame_delay << " > " << kMaxVideoDelayMs
<< frame_delay.ms() << " > " << kMaxVideoDelay.ms()
<< "). Resetting the video jitter buffer.";
return true;
}
if (target_video_delay > kMaxVideoDelayMs) {
if (target_video_delay > kMaxVideoDelay) {
RTC_LOG(LS_WARNING) << "The video target delay has grown larger than "
<< kMaxVideoDelayMs << " ms.";
<< kMaxVideoDelay.ms() << " ms.";
return true;
}
return false;

View File

@ -18,10 +18,9 @@
namespace webrtc {
// TODO(https://bugs.webrtc.org/13589): Switch to using Timestamp and TimeDelta.
bool FrameHasBadRenderTiming(int64_t render_time_ms,
int64_t now_ms,
int target_video_delay);
bool FrameHasBadRenderTiming(Timestamp render_time,
Timestamp now,
TimeDelta target_video_delay);
std::unique_ptr<EncodedFrame> CombineAndDeleteFrames(
absl::InlinedVector<std::unique_ptr<EncodedFrame>, 4> frames);

View File

@ -142,7 +142,7 @@ void VCMDecodedFrameCallback::Decoded(VideoFrame& decodedImage,
const TimeDelta decode_time = decode_time_ms
? TimeDelta::Millis(*decode_time_ms)
: now - *frameInfo->decodeStart;
_timing->StopDecodeTimer(decode_time.ms(), now.ms());
_timing->StopDecodeTimer(decode_time, now);
decodedImage.set_processing_time(
{*frameInfo->decodeStart, *frameInfo->decodeStart + decode_time});

View File

@ -69,7 +69,7 @@ int32_t VCMReceiver::InsertPacket(const VCMPacket& packet) {
// We don't want to include timestamps which have suffered from
// retransmission here, since we compensate with extra retransmission
// delay within the jitter estimate.
timing_->IncomingTimestamp(packet.timestamp, clock_->TimeInMilliseconds());
timing_->IncomingTimestamp(packet.timestamp, clock_->CurrentTime());
}
return VCM_OK;
}
@ -94,16 +94,18 @@ VCMEncodedFrame* VCMReceiver::FrameForDecoding(uint16_t max_wait_time_ms,
}
if (min_playout_delay_ms >= 0)
timing_->set_min_playout_delay(min_playout_delay_ms);
timing_->set_min_playout_delay(TimeDelta::Millis(min_playout_delay_ms));
if (max_playout_delay_ms >= 0)
timing_->set_max_playout_delay(max_playout_delay_ms);
timing_->set_max_playout_delay(TimeDelta::Millis(max_playout_delay_ms));
// We have a frame - Set timing and render timestamp.
timing_->SetJitterDelay(jitter_buffer_.EstimatedJitterMs());
const int64_t now_ms = clock_->TimeInMilliseconds();
timing_->SetJitterDelay(
TimeDelta::Millis(jitter_buffer_.EstimatedJitterMs()));
const Timestamp now = clock_->CurrentTime();
const int64_t now_ms = now.ms();
timing_->UpdateCurrentDelay(frame_timestamp);
render_time_ms = timing_->RenderTimeMs(frame_timestamp, now_ms);
render_time_ms = timing_->RenderTime(frame_timestamp, now).ms();
// Check render timing.
bool timing_error = false;
// Assume that render timing errors are due to changes in the video stream.
@ -117,7 +119,7 @@ VCMEncodedFrame* VCMReceiver::FrameForDecoding(uint16_t max_wait_time_ms,
<< frame_delay << " > " << max_video_delay_ms_
<< "). Resetting the video jitter buffer.";
timing_error = true;
} else if (static_cast<int>(timing_->TargetVideoDelay()) >
} else if (static_cast<int>(timing_->TargetVideoDelay().ms()) >
max_video_delay_ms_) {
RTC_LOG(LS_WARNING) << "The video target delay has grown larger than "
<< max_video_delay_ms_
@ -140,8 +142,11 @@ VCMEncodedFrame* VCMReceiver::FrameForDecoding(uint16_t max_wait_time_ms,
uint16_t new_max_wait_time =
static_cast<uint16_t>(VCM_MAX(available_wait_time, 0));
uint32_t wait_time_ms = rtc::saturated_cast<uint32_t>(
timing_->MaxWaitingTime(render_time_ms, clock_->TimeInMilliseconds(),
/*too_many_frames_queued=*/false));
timing_
->MaxWaitingTime(Timestamp::Millis(render_time_ms),
clock_->CurrentTime(),
/*too_many_frames_queued=*/false)
.ms());
if (new_max_wait_time < wait_time_ms) {
// We're not allowed to wait until the frame is supposed to be rendered,
// waiting as long as we're allowed to avoid busy looping, and then return

View File

@ -124,7 +124,7 @@ TEST_F(TestVCMReceiver, NonDecodableDuration_OneIncomplete) {
const int kMinDelayMs = 500;
receiver_.SetNackSettings(kMaxNackListSize, kMaxPacketAgeToNack,
kMaxNonDecodableDuration);
timing_.set_min_playout_delay(kMinDelayMs);
timing_.set_min_playout_delay(TimeDelta::Millis(kMinDelayMs));
int64_t key_frame_inserted = clock_.TimeInMilliseconds();
EXPECT_GE(InsertFrame(VideoFrameType::kVideoFrameKey, true), kNoError);
// Insert an incomplete frame.
@ -152,7 +152,7 @@ TEST_F(TestVCMReceiver, NonDecodableDuration_NoTrigger) {
const int kMinDelayMs = 500;
receiver_.SetNackSettings(kMaxNackListSize, kMaxPacketAgeToNack,
kMaxNonDecodableDuration);
timing_.set_min_playout_delay(kMinDelayMs);
timing_.set_min_playout_delay(TimeDelta::Millis(kMinDelayMs));
int64_t key_frame_inserted = clock_.TimeInMilliseconds();
EXPECT_GE(InsertFrame(VideoFrameType::kVideoFrameKey, true), kNoError);
// Insert an incomplete frame.
@ -182,7 +182,7 @@ TEST_F(TestVCMReceiver, NonDecodableDuration_NoTrigger2) {
const int kMinDelayMs = 500;
receiver_.SetNackSettings(kMaxNackListSize, kMaxPacketAgeToNack,
kMaxNonDecodableDuration);
timing_.set_min_playout_delay(kMinDelayMs);
timing_.set_min_playout_delay(TimeDelta::Millis(kMinDelayMs));
int64_t key_frame_inserted = clock_.TimeInMilliseconds();
EXPECT_GE(InsertFrame(VideoFrameType::kVideoFrameKey, true), kNoError);
// Insert enough frames to have too long non-decodable sequence, except that
@ -212,7 +212,7 @@ TEST_F(TestVCMReceiver, NonDecodableDuration_KeyFrameAfterIncompleteFrames) {
const int kMinDelayMs = 500;
receiver_.SetNackSettings(kMaxNackListSize, kMaxPacketAgeToNack,
kMaxNonDecodableDuration);
timing_.set_min_playout_delay(kMinDelayMs);
timing_.set_min_playout_delay(TimeDelta::Millis(kMinDelayMs));
int64_t key_frame_inserted = clock_.TimeInMilliseconds();
EXPECT_GE(InsertFrame(VideoFrameType::kVideoFrameKey, true), kNoError);
// Insert an incomplete frame.
@ -448,9 +448,9 @@ TEST_F(VCMReceiverTimingTest, FrameForDecodingPreferLateDecoding) {
int64_t arrive_timestamps[kNumFrames];
int64_t render_timestamps[kNumFrames];
int render_delay_ms;
int max_decode_ms;
int dummy;
TimeDelta render_delay_ms = TimeDelta::Zero();
TimeDelta max_decode_ms = TimeDelta::Zero();
TimeDelta dummy = TimeDelta::Zero();
timing_.GetTimings(&max_decode_ms, &dummy, &dummy, &dummy, &dummy,
&render_delay_ms);
@ -479,8 +479,9 @@ TEST_F(VCMReceiverTimingTest, FrameForDecodingPreferLateDecoding) {
receiver_.FrameForDecoding(kMaxWaitTime, prefer_late_decoding);
int64_t end_time = clock_.TimeInMilliseconds();
if (frame) {
EXPECT_EQ(frame->RenderTimeMs() - max_decode_ms - render_delay_ms,
end_time);
EXPECT_EQ(
frame->RenderTimeMs() - max_decode_ms.ms() - render_delay_ms.ms(),
end_time);
receiver_.ReleaseFrame(frame);
++num_frames_return;
} else {

View File

@ -12,6 +12,7 @@
#include <algorithm>
#include "api/units/time_delta.h"
#include "rtc_base/experiments/field_trial_parser.h"
#include "rtc_base/time/timestamp_extrapolator.h"
#include "system_wrappers/include/clock.h"
@ -19,27 +20,27 @@
namespace webrtc {
namespace {
// Default pacing that is used for the low-latency renderer path.
constexpr TimeDelta kZeroPlayoutDelayDefaultMinPacing = TimeDelta::Millis(8);
} // namespace
VCMTiming::VCMTiming(Clock* clock)
: clock_(clock),
ts_extrapolator_(std::make_unique<TimestampExtrapolator>(
clock_->TimeInMilliseconds())),
ts_extrapolator_(
std::make_unique<TimestampExtrapolator>(clock_->CurrentTime())),
codec_timer_(std::make_unique<VCMCodecTimer>()),
render_delay_ms_(kDefaultRenderDelayMs),
min_playout_delay_ms_(0),
max_playout_delay_ms_(10000),
jitter_delay_ms_(0),
current_delay_ms_(0),
render_delay_(kDefaultRenderDelay),
min_playout_delay_(TimeDelta::Zero()),
max_playout_delay_(TimeDelta::Seconds(10)),
jitter_delay_(TimeDelta::Zero()),
current_delay_(TimeDelta::Zero()),
prev_frame_timestamp_(0),
timing_frame_info_(),
num_decoded_frames_(0),
low_latency_renderer_enabled_("enabled", true),
zero_playout_delay_min_pacing_("min_pacing",
kZeroPlayoutDelayDefaultMinPacing),
last_decode_scheduled_ts_(0) {
last_decode_scheduled_(Timestamp::Zero()) {
ParseFieldTrial({&low_latency_renderer_enabled_},
field_trial::FindFullName("WebRTC-LowLatencyRenderer"));
ParseFieldTrial({&zero_playout_delay_min_pacing_},
@ -48,218 +49,208 @@ VCMTiming::VCMTiming(Clock* clock)
void VCMTiming::Reset() {
MutexLock lock(&mutex_);
ts_extrapolator_->Reset(clock_->TimeInMilliseconds());
ts_extrapolator_->Reset(clock_->CurrentTime());
codec_timer_ = std::make_unique<VCMCodecTimer>();
render_delay_ms_ = kDefaultRenderDelayMs;
min_playout_delay_ms_ = 0;
jitter_delay_ms_ = 0;
current_delay_ms_ = 0;
render_delay_ = kDefaultRenderDelay;
min_playout_delay_ = TimeDelta::Zero();
jitter_delay_ = TimeDelta::Zero();
current_delay_ = TimeDelta::Zero();
prev_frame_timestamp_ = 0;
}
void VCMTiming::set_render_delay(int render_delay_ms) {
void VCMTiming::set_render_delay(TimeDelta render_delay) {
MutexLock lock(&mutex_);
render_delay_ms_ = render_delay_ms;
render_delay_ = render_delay;
}
void VCMTiming::set_min_playout_delay(int min_playout_delay_ms) {
void VCMTiming::set_min_playout_delay(TimeDelta min_playout_delay) {
MutexLock lock(&mutex_);
min_playout_delay_ms_ = min_playout_delay_ms;
min_playout_delay_ = min_playout_delay;
}
int VCMTiming::min_playout_delay() {
TimeDelta VCMTiming::min_playout_delay() {
MutexLock lock(&mutex_);
return min_playout_delay_ms_;
return min_playout_delay_;
}
void VCMTiming::set_max_playout_delay(int max_playout_delay_ms) {
void VCMTiming::set_max_playout_delay(TimeDelta max_playout_delay) {
MutexLock lock(&mutex_);
max_playout_delay_ms_ = max_playout_delay_ms;
max_playout_delay_ = max_playout_delay;
}
int VCMTiming::max_playout_delay() {
TimeDelta VCMTiming::max_playout_delay() {
MutexLock lock(&mutex_);
return max_playout_delay_ms_;
return max_playout_delay_;
}
void VCMTiming::SetJitterDelay(int jitter_delay_ms) {
void VCMTiming::SetJitterDelay(TimeDelta jitter_delay) {
MutexLock lock(&mutex_);
if (jitter_delay_ms != jitter_delay_ms_) {
jitter_delay_ms_ = jitter_delay_ms;
if (jitter_delay != jitter_delay_) {
jitter_delay_ = jitter_delay;
// When in initial state, set current delay to minimum delay.
if (current_delay_ms_ == 0) {
current_delay_ms_ = jitter_delay_ms_;
if (current_delay_.IsZero()) {
current_delay_ = jitter_delay_;
}
}
}
void VCMTiming::UpdateCurrentDelay(uint32_t frame_timestamp) {
MutexLock lock(&mutex_);
int target_delay_ms = TargetDelayInternal();
TimeDelta target_delay = TargetDelayInternal();
if (current_delay_ms_ == 0) {
if (current_delay_.IsZero()) {
// Not initialized, set current delay to target.
current_delay_ms_ = target_delay_ms;
} else if (target_delay_ms != current_delay_ms_) {
int64_t delay_diff_ms =
static_cast<int64_t>(target_delay_ms) - current_delay_ms_;
current_delay_ = target_delay;
} else if (target_delay != current_delay_) {
TimeDelta delay_diff = target_delay - current_delay_;
// Never change the delay with more than 100 ms every second. If we're
// changing the delay in too large steps we will get noticeable freezes. By
// limiting the change we can increase the delay in smaller steps, which
// will be experienced as the video is played in slow motion. When lowering
// the delay the video will be played at a faster pace.
int64_t max_change_ms = 0;
TimeDelta max_change = TimeDelta::Zero();
if (frame_timestamp < 0x0000ffff && prev_frame_timestamp_ > 0xffff0000) {
// wrap
max_change_ms = kDelayMaxChangeMsPerS *
(frame_timestamp + (static_cast<int64_t>(1) << 32) -
prev_frame_timestamp_) /
90000;
max_change =
TimeDelta::Millis(kDelayMaxChangeMsPerS *
(frame_timestamp + (static_cast<int64_t>(1) << 32) -
prev_frame_timestamp_) /
90000);
} else {
max_change_ms = kDelayMaxChangeMsPerS *
(frame_timestamp - prev_frame_timestamp_) / 90000;
max_change =
TimeDelta::Millis(kDelayMaxChangeMsPerS *
(frame_timestamp - prev_frame_timestamp_) / 90000);
}
if (max_change_ms <= 0) {
if (max_change <= TimeDelta::Zero()) {
// Any changes less than 1 ms are truncated and will be postponed.
// Negative change will be due to reordering and should be ignored.
return;
}
delay_diff_ms = std::max(delay_diff_ms, -max_change_ms);
delay_diff_ms = std::min(delay_diff_ms, max_change_ms);
delay_diff = std::max(delay_diff, -max_change);
delay_diff = std::min(delay_diff, max_change);
current_delay_ms_ = current_delay_ms_ + delay_diff_ms;
current_delay_ = current_delay_ + delay_diff;
}
prev_frame_timestamp_ = frame_timestamp;
}
void VCMTiming::UpdateCurrentDelay(int64_t render_time_ms,
int64_t actual_decode_time_ms) {
void VCMTiming::UpdateCurrentDelay(Timestamp render_time,
Timestamp actual_decode_time) {
MutexLock lock(&mutex_);
uint32_t target_delay_ms = TargetDelayInternal();
int64_t delayed_ms =
actual_decode_time_ms -
(render_time_ms - RequiredDecodeTimeMs() - render_delay_ms_);
if (delayed_ms < 0) {
TimeDelta target_delay = TargetDelayInternal();
TimeDelta delayed =
(actual_decode_time - render_time) + RequiredDecodeTime() + render_delay_;
if (delayed < TimeDelta::Zero()) {
return;
}
if (current_delay_ms_ + delayed_ms <= target_delay_ms) {
current_delay_ms_ += delayed_ms;
if (current_delay_ + delayed <= target_delay) {
current_delay_ += delayed;
} else {
current_delay_ms_ = target_delay_ms;
current_delay_ = target_delay;
}
}
void VCMTiming::StopDecodeTimer(uint32_t /*time_stamp*/,
int32_t decode_time_ms,
int64_t now_ms,
int64_t /*render_time_ms*/) {
StopDecodeTimer(decode_time_ms, now_ms);
}
void VCMTiming::StopDecodeTimer(int32_t decode_time_ms, int64_t now_ms) {
void VCMTiming::StopDecodeTimer(TimeDelta decode_time, Timestamp now) {
MutexLock lock(&mutex_);
codec_timer_->AddTiming(decode_time_ms, now_ms);
RTC_DCHECK_GE(decode_time_ms, 0);
codec_timer_->AddTiming(decode_time.ms(), now.ms());
RTC_DCHECK_GE(decode_time, TimeDelta::Zero());
++num_decoded_frames_;
}
void VCMTiming::IncomingTimestamp(uint32_t time_stamp, int64_t now_ms) {
void VCMTiming::IncomingTimestamp(uint32_t rtp_timestamp, Timestamp now) {
MutexLock lock(&mutex_);
ts_extrapolator_->Update(now_ms, time_stamp);
ts_extrapolator_->Update(now, rtp_timestamp);
}
int64_t VCMTiming::RenderTimeMs(uint32_t frame_timestamp,
int64_t now_ms) const {
Timestamp VCMTiming::RenderTime(uint32_t frame_timestamp, Timestamp now) const {
MutexLock lock(&mutex_);
return RenderTimeMsInternal(frame_timestamp, now_ms);
return RenderTimeInternal(frame_timestamp, now);
}
void VCMTiming::SetLastDecodeScheduledTimestamp(
int64_t last_decode_scheduled_ts) {
Timestamp last_decode_scheduled) {
MutexLock lock(&mutex_);
last_decode_scheduled_ts_ = last_decode_scheduled_ts;
last_decode_scheduled_ = last_decode_scheduled;
}
int64_t VCMTiming::RenderTimeMsInternal(uint32_t frame_timestamp,
int64_t now_ms) const {
constexpr int kLowLatencyRendererMaxPlayoutDelayMs = 500;
if (min_playout_delay_ms_ == 0 &&
(max_playout_delay_ms_ == 0 ||
Timestamp VCMTiming::RenderTimeInternal(uint32_t frame_timestamp,
Timestamp now) const {
constexpr TimeDelta kLowLatencyRendererMaxPlayoutDelay =
TimeDelta::Millis(500);
if (min_playout_delay_.IsZero() &&
(max_playout_delay_.IsZero() ||
(low_latency_renderer_enabled_ &&
max_playout_delay_ms_ <= kLowLatencyRendererMaxPlayoutDelayMs))) {
max_playout_delay_ <= kLowLatencyRendererMaxPlayoutDelay))) {
// Render as soon as possible or with low-latency renderer algorithm.
return 0;
return Timestamp::Zero();
}
// Note that TimestampExtrapolator::ExtrapolateLocalTime is not a const
// method; it mutates the object's wraparound state.
int64_t estimated_complete_time_ms =
ts_extrapolator_->ExtrapolateLocalTime(frame_timestamp);
if (estimated_complete_time_ms == -1) {
estimated_complete_time_ms = now_ms;
}
Timestamp estimated_complete_time =
ts_extrapolator_->ExtrapolateLocalTime(frame_timestamp).value_or(now);
// Make sure the actual delay stays in the range of `min_playout_delay_ms_`
// and `max_playout_delay_ms_`.
int actual_delay = std::max(current_delay_ms_, min_playout_delay_ms_);
actual_delay = std::min(actual_delay, max_playout_delay_ms_);
return estimated_complete_time_ms + actual_delay;
// Make sure the actual delay stays in the range of `min_playout_delay_`
// and `max_playout_delay_`.
TimeDelta actual_delay =
current_delay_.Clamped(min_playout_delay_, max_playout_delay_);
return estimated_complete_time + actual_delay;
}
int VCMTiming::RequiredDecodeTimeMs() const {
TimeDelta VCMTiming::RequiredDecodeTime() const {
const int decode_time_ms = codec_timer_->RequiredDecodeTimeMs();
RTC_DCHECK_GE(decode_time_ms, 0);
return decode_time_ms;
return TimeDelta::Millis(decode_time_ms);
}
int64_t VCMTiming::MaxWaitingTime(int64_t render_time_ms,
int64_t now_ms,
bool too_many_frames_queued) const {
TimeDelta VCMTiming::MaxWaitingTime(Timestamp render_time,
Timestamp now,
bool too_many_frames_queued) const {
MutexLock lock(&mutex_);
if (render_time_ms == 0 && zero_playout_delay_min_pacing_->us() > 0 &&
min_playout_delay_ms_ == 0 && max_playout_delay_ms_ > 0) {
// `render_time_ms` == 0 indicates that the frame should be decoded and
if (render_time.IsZero() && zero_playout_delay_min_pacing_->us() > 0 &&
min_playout_delay_.IsZero() && max_playout_delay_ > TimeDelta::Zero()) {
// `render_time` == 0 indicates that the frame should be decoded and
// rendered as soon as possible. However, the decoder can be choked if too
// many frames are sent at once. Therefore, limit the interframe delay to
// |zero_playout_delay_min_pacing_| unless too many frames are queued in
// which case the frames are sent to the decoder at once.
if (too_many_frames_queued) {
return 0;
return TimeDelta::Zero();
}
int64_t earliest_next_decode_start_time =
last_decode_scheduled_ts_ + zero_playout_delay_min_pacing_->ms();
int64_t max_wait_time_ms = now_ms >= earliest_next_decode_start_time
? 0
: earliest_next_decode_start_time - now_ms;
return max_wait_time_ms;
Timestamp earliest_next_decode_start_time =
last_decode_scheduled_ + zero_playout_delay_min_pacing_;
TimeDelta max_wait_time = now >= earliest_next_decode_start_time
? TimeDelta::Zero()
: earliest_next_decode_start_time - now;
return max_wait_time;
}
return render_time_ms - now_ms - RequiredDecodeTimeMs() - render_delay_ms_;
return render_time - now - RequiredDecodeTime() - render_delay_;
}
int VCMTiming::TargetVideoDelay() const {
TimeDelta VCMTiming::TargetVideoDelay() const {
MutexLock lock(&mutex_);
return TargetDelayInternal();
}
int VCMTiming::TargetDelayInternal() const {
return std::max(min_playout_delay_ms_,
jitter_delay_ms_ + RequiredDecodeTimeMs() + render_delay_ms_);
TimeDelta VCMTiming::TargetDelayInternal() const {
return std::max(min_playout_delay_,
jitter_delay_ + RequiredDecodeTime() + render_delay_);
}
bool VCMTiming::GetTimings(int* max_decode_ms,
int* current_delay_ms,
int* target_delay_ms,
int* jitter_buffer_ms,
int* min_playout_delay_ms,
int* render_delay_ms) const {
bool VCMTiming::GetTimings(TimeDelta* max_decode,
TimeDelta* current_delay,
TimeDelta* target_delay,
TimeDelta* jitter_buffer,
TimeDelta* min_playout_delay,
TimeDelta* render_delay) const {
MutexLock lock(&mutex_);
*max_decode_ms = RequiredDecodeTimeMs();
*current_delay_ms = current_delay_ms_;
*target_delay_ms = TargetDelayInternal();
*jitter_buffer_ms = jitter_delay_ms_;
*min_playout_delay_ms = min_playout_delay_ms_;
*render_delay_ms = render_delay_ms_;
*max_decode = RequiredDecodeTime();
*current_delay = current_delay_;
*target_delay = TargetDelayInternal();
*jitter_buffer = jitter_delay_;
*min_playout_delay = min_playout_delay_;
*render_delay = render_delay_;
return (num_decoded_frames_ > 0);
}

View File

@ -29,6 +29,9 @@ class TimestampExtrapolator;
class VCMTiming {
public:
static constexpr auto kDefaultRenderDelay = TimeDelta::Millis(10);
static constexpr auto kDelayMaxChangeMsPerS = 100;
explicit VCMTiming(Clock* clock);
virtual ~VCMTiming() = default;
@ -36,19 +39,19 @@ class VCMTiming {
void Reset();
// Set the amount of time needed to render an image. Defaults to 10 ms.
void set_render_delay(int render_delay_ms);
void set_render_delay(TimeDelta render_delay);
// Set the minimum time the video must be delayed on the receiver to
// get the desired jitter buffer level.
void SetJitterDelay(int required_delay_ms);
void SetJitterDelay(TimeDelta required_delay);
// Set/get the minimum playout delay from capture to render in ms.
void set_min_playout_delay(int min_playout_delay_ms);
int min_playout_delay();
// Set/get the minimum playout delay from capture to render.
void set_min_playout_delay(TimeDelta min_playout_delay);
TimeDelta min_playout_delay();
// Set/get the maximum playout delay from capture to render in ms.
void set_max_playout_delay(int max_playout_delay_ms);
int max_playout_delay();
void set_max_playout_delay(TimeDelta max_playout_delay);
TimeDelta max_playout_delay();
// Increases or decreases the current delay to get closer to the target delay.
// Calculates how long it has been since the previous call to this function,
@ -59,51 +62,44 @@ class VCMTiming {
// Given the actual decode time in ms and the render time in ms for a frame,
// this function calculates how late the frame is and increases the delay
// accordingly.
void UpdateCurrentDelay(int64_t render_time_ms,
int64_t actual_decode_time_ms);
void UpdateCurrentDelay(Timestamp render_time, Timestamp actual_decode_time);
// Stops the decoder timer, should be called when the decoder returns a frame
// or when the decoded frame callback is called.
void StopDecodeTimer(int32_t decode_time_ms, int64_t now_ms);
// TODO(kron): Remove once downstream projects has been changed to use the
// above function.
void StopDecodeTimer(uint32_t time_stamp,
int32_t decode_time_ms,
int64_t now_ms,
int64_t render_time_ms);
void StopDecodeTimer(TimeDelta decode_time, Timestamp now);
// Used to report that a frame is passed to decoding. Updates the timestamp
// filter which is used to map between timestamps and receiver system time.
void IncomingTimestamp(uint32_t time_stamp, int64_t last_packet_time_ms);
void IncomingTimestamp(uint32_t rtp_timestamp, Timestamp last_packet_time);
// Returns the receiver system time when the frame with timestamp
// `frame_timestamp` should be rendered, assuming that the system time
// currently is `now_ms`.
virtual int64_t RenderTimeMs(uint32_t frame_timestamp, int64_t now_ms) const;
// currently is `now`.
virtual Timestamp RenderTime(uint32_t frame_timestamp, Timestamp now) const;
// Returns the maximum time in ms that we can wait for a frame to become
// complete before we must pass it to the decoder. render_time_ms==0 indicates
// complete before we must pass it to the decoder. render_time==0 indicates
// that the frames should be processed as quickly as possible, with possibly
// only a small delay added to make sure that the decoder is not overloaded.
// In this case, the parameter too_many_frames_queued is used to signal that
// the decode queue is full and that the frame should be decoded as soon as
// possible.
virtual int64_t MaxWaitingTime(int64_t render_time_ms,
int64_t now_ms,
bool too_many_frames_queued) const;
virtual TimeDelta MaxWaitingTime(Timestamp render_time,
Timestamp now,
bool too_many_frames_queued) const;
// Returns the current target delay which is required delay + decode time +
// render delay.
int TargetVideoDelay() const;
TimeDelta TargetVideoDelay() const;
// Return current timing information. Returns true if the first frame has been
// decoded, false otherwise.
virtual bool GetTimings(int* max_decode_ms,
int* current_delay_ms,
int* target_delay_ms,
int* jitter_buffer_ms,
int* min_playout_delay_ms,
int* render_delay_ms) const;
virtual bool GetTimings(TimeDelta* max_decode,
TimeDelta* current_delay,
TimeDelta* target_delay,
TimeDelta* jitter_buffer,
TimeDelta* min_playout_delay,
TimeDelta* render_delay) const;
void SetTimingFrameInfo(const TimingFrameInfo& info);
absl::optional<TimingFrameInfo> GetTimingFrameInfo();
@ -113,16 +109,13 @@ class VCMTiming {
absl::optional<int> MaxCompositionDelayInFrames() const;
// Updates the last time a frame was scheduled for decoding.
void SetLastDecodeScheduledTimestamp(int64_t last_decode_scheduled_ts);
enum { kDefaultRenderDelayMs = 10 };
enum { kDelayMaxChangeMsPerS = 100 };
void SetLastDecodeScheduledTimestamp(Timestamp last_decode_scheduled);
protected:
int RequiredDecodeTimeMs() const RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
int64_t RenderTimeMsInternal(uint32_t frame_timestamp, int64_t now_ms) const
TimeDelta RequiredDecodeTime() const RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
Timestamp RenderTimeInternal(uint32_t frame_timestamp, Timestamp now) const
RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
int TargetDelayInternal() const RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
TimeDelta TargetDelayInternal() const RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
private:
mutable Mutex mutex_;
@ -131,16 +124,16 @@ class VCMTiming {
RTC_PT_GUARDED_BY(mutex_);
std::unique_ptr<VCMCodecTimer> codec_timer_ RTC_GUARDED_BY(mutex_)
RTC_PT_GUARDED_BY(mutex_);
int render_delay_ms_ RTC_GUARDED_BY(mutex_);
TimeDelta render_delay_ RTC_GUARDED_BY(mutex_);
// Best-effort playout delay range for frames from capture to render.
// The receiver tries to keep the delay between `min_playout_delay_ms_`
// and `max_playout_delay_ms_` taking the network jitter into account.
// A special case is where min_playout_delay_ms_ = max_playout_delay_ms_ = 0,
// in which case the receiver tries to play the frames as they arrive.
int min_playout_delay_ms_ RTC_GUARDED_BY(mutex_);
int max_playout_delay_ms_ RTC_GUARDED_BY(mutex_);
int jitter_delay_ms_ RTC_GUARDED_BY(mutex_);
int current_delay_ms_ RTC_GUARDED_BY(mutex_);
TimeDelta min_playout_delay_ RTC_GUARDED_BY(mutex_);
TimeDelta max_playout_delay_ RTC_GUARDED_BY(mutex_);
TimeDelta jitter_delay_ RTC_GUARDED_BY(mutex_);
TimeDelta current_delay_ RTC_GUARDED_BY(mutex_);
uint32_t prev_frame_timestamp_ RTC_GUARDED_BY(mutex_);
absl::optional<TimingFrameInfo> timing_frame_info_ RTC_GUARDED_BY(mutex_);
size_t num_decoded_frames_ RTC_GUARDED_BY(mutex_);
@ -158,7 +151,7 @@ class VCMTiming {
// Timestamp at which the last frame was scheduled to be sent to the decoder.
// Used only when the RTP header extension playout delay is set to min=0 ms
// which is indicated by a render time set to 0.
int64_t last_decode_scheduled_ts_ RTC_GUARDED_BY(mutex_);
Timestamp last_decode_scheduled_ RTC_GUARDED_BY(mutex_);
};
} // namespace webrtc

View File

@ -10,13 +10,18 @@
#include "modules/video_coding/timing.h"
#include "api/units/frequency.h"
#include "api/units/time_delta.h"
#include "system_wrappers/include/clock.h"
#include "test/field_trial.h"
#include "test/gtest.h"
namespace webrtc {
namespace {
const int kFps = 25;
constexpr Frequency k25Fps = Frequency::Hertz(25);
constexpr Frequency k90kHz = Frequency::KiloHertz(90);
} // namespace
TEST(ReceiverTimingTest, JitterDelay) {
@ -29,102 +34,105 @@ TEST(ReceiverTimingTest, JitterDelay) {
timing.Reset();
timing.IncomingTimestamp(timestamp, clock.TimeInMilliseconds());
uint32_t jitter_delay_ms = 20;
timing.SetJitterDelay(jitter_delay_ms);
timing.IncomingTimestamp(timestamp, clock.CurrentTime());
TimeDelta jitter_delay = TimeDelta::Millis(20);
timing.SetJitterDelay(jitter_delay);
timing.UpdateCurrentDelay(timestamp);
timing.set_render_delay(0);
uint32_t wait_time_ms = timing.MaxWaitingTime(
timing.RenderTimeMs(timestamp, clock.TimeInMilliseconds()),
clock.TimeInMilliseconds(), /*too_many_frames_queued=*/false);
timing.set_render_delay(TimeDelta::Zero());
auto wait_time = timing.MaxWaitingTime(
timing.RenderTime(timestamp, clock.CurrentTime()), clock.CurrentTime(),
/*too_many_frames_queued=*/false);
// First update initializes the render time. Since we have no decode delay
// we get wait_time_ms = renderTime - now - renderDelay = jitter.
EXPECT_EQ(jitter_delay_ms, wait_time_ms);
// we get wait_time = renderTime - now - renderDelay = jitter.
EXPECT_EQ(jitter_delay, wait_time);
jitter_delay_ms += VCMTiming::kDelayMaxChangeMsPerS + 10;
jitter_delay += TimeDelta::Millis(VCMTiming::kDelayMaxChangeMsPerS + 10);
timestamp += 90000;
clock.AdvanceTimeMilliseconds(1000);
timing.SetJitterDelay(jitter_delay_ms);
timing.SetJitterDelay(jitter_delay);
timing.UpdateCurrentDelay(timestamp);
wait_time_ms = timing.MaxWaitingTime(
timing.RenderTimeMs(timestamp, clock.TimeInMilliseconds()),
clock.TimeInMilliseconds(), /*too_many_frames_queued=*/false);
wait_time = timing.MaxWaitingTime(
timing.RenderTime(timestamp, clock.CurrentTime()), clock.CurrentTime(),
/*too_many_frames_queued=*/false);
// Since we gradually increase the delay we only get 100 ms every second.
EXPECT_EQ(jitter_delay_ms - 10, wait_time_ms);
EXPECT_EQ(jitter_delay - TimeDelta::Millis(10), wait_time);
timestamp += 90000;
clock.AdvanceTimeMilliseconds(1000);
timing.UpdateCurrentDelay(timestamp);
wait_time_ms = timing.MaxWaitingTime(
timing.RenderTimeMs(timestamp, clock.TimeInMilliseconds()),
clock.TimeInMilliseconds(), /*too_many_frames_queued=*/false);
EXPECT_EQ(jitter_delay_ms, wait_time_ms);
wait_time = timing.MaxWaitingTime(
timing.RenderTime(timestamp, clock.CurrentTime()), clock.CurrentTime(),
/*too_many_frames_queued=*/false);
EXPECT_EQ(jitter_delay, wait_time);
// Insert frames without jitter, verify that this gives the exact wait time.
const int kNumFrames = 300;
for (int i = 0; i < kNumFrames; i++) {
clock.AdvanceTimeMilliseconds(1000 / kFps);
timestamp += 90000 / kFps;
timing.IncomingTimestamp(timestamp, clock.TimeInMilliseconds());
clock.AdvanceTime(1 / k25Fps);
timestamp += k90kHz / k25Fps;
timing.IncomingTimestamp(timestamp, clock.CurrentTime());
}
timing.UpdateCurrentDelay(timestamp);
wait_time_ms = timing.MaxWaitingTime(
timing.RenderTimeMs(timestamp, clock.TimeInMilliseconds()),
clock.TimeInMilliseconds(), /*too_many_frames_queued=*/false);
EXPECT_EQ(jitter_delay_ms, wait_time_ms);
wait_time = timing.MaxWaitingTime(
timing.RenderTime(timestamp, clock.CurrentTime()), clock.CurrentTime(),
/*too_many_frames_queued=*/false);
EXPECT_EQ(jitter_delay, wait_time);
// Add decode time estimates for 1 second.
const uint32_t kDecodeTimeMs = 10;
for (int i = 0; i < kFps; i++) {
clock.AdvanceTimeMilliseconds(kDecodeTimeMs);
timing.StopDecodeTimer(kDecodeTimeMs, clock.TimeInMilliseconds());
timestamp += 90000 / kFps;
clock.AdvanceTimeMilliseconds(1000 / kFps - kDecodeTimeMs);
timing.IncomingTimestamp(timestamp, clock.TimeInMilliseconds());
const TimeDelta kDecodeTime = TimeDelta::Millis(10);
for (int i = 0; i < k25Fps.hertz(); i++) {
clock.AdvanceTime(kDecodeTime);
timing.StopDecodeTimer(kDecodeTime, clock.CurrentTime());
timestamp += k90kHz / k25Fps;
clock.AdvanceTime(1 / k25Fps - kDecodeTime);
timing.IncomingTimestamp(timestamp, clock.CurrentTime());
}
timing.UpdateCurrentDelay(timestamp);
wait_time_ms = timing.MaxWaitingTime(
timing.RenderTimeMs(timestamp, clock.TimeInMilliseconds()),
clock.TimeInMilliseconds(), /*too_many_frames_queued=*/false);
EXPECT_EQ(jitter_delay_ms, wait_time_ms);
wait_time = timing.MaxWaitingTime(
timing.RenderTime(timestamp, clock.CurrentTime()), clock.CurrentTime(),
/*too_many_frames_queued=*/false);
EXPECT_EQ(jitter_delay, wait_time);
const int kMinTotalDelayMs = 200;
timing.set_min_playout_delay(kMinTotalDelayMs);
const TimeDelta kMinTotalDelay = TimeDelta::Millis(200);
timing.set_min_playout_delay(kMinTotalDelay);
clock.AdvanceTimeMilliseconds(5000);
timestamp += 5 * 90000;
timing.UpdateCurrentDelay(timestamp);
const int kRenderDelayMs = 10;
timing.set_render_delay(kRenderDelayMs);
wait_time_ms = timing.MaxWaitingTime(
timing.RenderTimeMs(timestamp, clock.TimeInMilliseconds()),
clock.TimeInMilliseconds(), /*too_many_frames_queued=*/false);
const TimeDelta kRenderDelay = TimeDelta::Millis(10);
timing.set_render_delay(kRenderDelay);
wait_time = timing.MaxWaitingTime(
timing.RenderTime(timestamp, clock.CurrentTime()), clock.CurrentTime(),
/*too_many_frames_queued=*/false);
// We should at least have kMinTotalDelayMs - decodeTime (10) - renderTime
// (10) to wait.
EXPECT_EQ(kMinTotalDelayMs - kDecodeTimeMs - kRenderDelayMs, wait_time_ms);
EXPECT_EQ(kMinTotalDelay - kDecodeTime - kRenderDelay, wait_time);
// The total video delay should be equal to the min total delay.
EXPECT_EQ(kMinTotalDelayMs, timing.TargetVideoDelay());
EXPECT_EQ(kMinTotalDelay, timing.TargetVideoDelay());
// Reset playout delay.
timing.set_min_playout_delay(0);
timing.set_min_playout_delay(TimeDelta::Zero());
clock.AdvanceTimeMilliseconds(5000);
timestamp += 5 * 90000;
timing.UpdateCurrentDelay(timestamp);
}
TEST(ReceiverTimingTest, TimestampWrapAround) {
SimulatedClock clock(0);
constexpr auto kStartTime = Timestamp::Millis(1337);
SimulatedClock clock(kStartTime);
VCMTiming timing(&clock);
// Provoke a wrap-around. The fifth frame will have wrapped at 25 fps.
uint32_t timestamp = 0xFFFFFFFFu - 3 * 90000 / kFps;
constexpr uint32_t kRtpTicksPerFrame = k90kHz / k25Fps;
uint32_t timestamp = 0xFFFFFFFFu - 3 * kRtpTicksPerFrame;
for (int i = 0; i < 5; ++i) {
timing.IncomingTimestamp(timestamp, clock.TimeInMilliseconds());
clock.AdvanceTimeMilliseconds(1000 / kFps);
timestamp += 90000 / kFps;
EXPECT_EQ(3 * 1000 / kFps,
timing.RenderTimeMs(0xFFFFFFFFu, clock.TimeInMilliseconds()));
EXPECT_EQ(3 * 1000 / kFps + 1,
timing.RenderTimeMs(89u, // One ms later in 90 kHz.
clock.TimeInMilliseconds()));
timing.IncomingTimestamp(timestamp, clock.CurrentTime());
clock.AdvanceTime(1 / k25Fps);
timestamp += kRtpTicksPerFrame;
EXPECT_EQ(kStartTime + 3 / k25Fps,
timing.RenderTime(0xFFFFFFFFu, clock.CurrentTime()));
// One ms later in 90 kHz.
EXPECT_EQ(kStartTime + 3 / k25Fps + TimeDelta::Millis(1),
timing.RenderTime(89u, clock.CurrentTime()));
}
}
@ -132,85 +140,85 @@ TEST(ReceiverTimingTest, MaxWaitingTimeIsZeroForZeroRenderTime) {
// This is the default path when the RTP playout delay header extension is set
// to min==0 and max==0.
constexpr int64_t kStartTimeUs = 3.15e13; // About one year in us.
constexpr int64_t kTimeDeltaMs = 1000.0 / 60.0;
constexpr int64_t kZeroRenderTimeMs = 0;
constexpr TimeDelta kTimeDelta = 1 / Frequency::Hertz(60);
constexpr Timestamp kZeroRenderTime = Timestamp::Zero();
SimulatedClock clock(kStartTimeUs);
VCMTiming timing(&clock);
timing.Reset();
timing.set_max_playout_delay(0);
timing.set_max_playout_delay(TimeDelta::Zero());
for (int i = 0; i < 10; ++i) {
clock.AdvanceTimeMilliseconds(kTimeDeltaMs);
int64_t now_ms = clock.TimeInMilliseconds();
EXPECT_LT(timing.MaxWaitingTime(kZeroRenderTimeMs, now_ms,
clock.AdvanceTime(kTimeDelta);
Timestamp now = clock.CurrentTime();
EXPECT_LT(timing.MaxWaitingTime(kZeroRenderTime, now,
/*too_many_frames_queued=*/false),
0);
TimeDelta::Zero());
}
// Another frame submitted at the same time also returns a negative max
// waiting time.
int64_t now_ms = clock.TimeInMilliseconds();
EXPECT_LT(timing.MaxWaitingTime(kZeroRenderTimeMs, now_ms,
Timestamp now = clock.CurrentTime();
EXPECT_LT(timing.MaxWaitingTime(kZeroRenderTime, now,
/*too_many_frames_queued=*/false),
0);
TimeDelta::Zero());
// MaxWaitingTime should be less than zero even if there's a burst of frames.
EXPECT_LT(timing.MaxWaitingTime(kZeroRenderTimeMs, now_ms,
EXPECT_LT(timing.MaxWaitingTime(kZeroRenderTime, now,
/*too_many_frames_queued=*/false),
0);
EXPECT_LT(timing.MaxWaitingTime(kZeroRenderTimeMs, now_ms,
TimeDelta::Zero());
EXPECT_LT(timing.MaxWaitingTime(kZeroRenderTime, now,
/*too_many_frames_queued=*/false),
0);
EXPECT_LT(timing.MaxWaitingTime(kZeroRenderTimeMs, now_ms,
TimeDelta::Zero());
EXPECT_LT(timing.MaxWaitingTime(kZeroRenderTime, now,
/*too_many_frames_queued=*/false),
0);
TimeDelta::Zero());
}
TEST(ReceiverTimingTest, MaxWaitingTimeZeroDelayPacingExperiment) {
// The minimum pacing is enabled by a field trial and active if the RTP
// playout delay header extension is set to min==0.
constexpr int64_t kMinPacingMs = 3;
constexpr TimeDelta kMinPacing = TimeDelta::Millis(3);
test::ScopedFieldTrials override_field_trials(
"WebRTC-ZeroPlayoutDelay/min_pacing:3ms/");
constexpr int64_t kStartTimeUs = 3.15e13; // About one year in us.
constexpr int64_t kTimeDeltaMs = 1000.0 / 60.0;
constexpr int64_t kZeroRenderTimeMs = 0;
constexpr TimeDelta kTimeDelta = 1 / Frequency::Hertz(60);
constexpr auto kZeroRenderTime = Timestamp::Zero();
SimulatedClock clock(kStartTimeUs);
VCMTiming timing(&clock);
timing.Reset();
// MaxWaitingTime() returns zero for evenly spaced video frames.
for (int i = 0; i < 10; ++i) {
clock.AdvanceTimeMilliseconds(kTimeDeltaMs);
int64_t now_ms = clock.TimeInMilliseconds();
EXPECT_EQ(timing.MaxWaitingTime(kZeroRenderTimeMs, now_ms,
clock.AdvanceTime(kTimeDelta);
Timestamp now = clock.CurrentTime();
EXPECT_EQ(timing.MaxWaitingTime(kZeroRenderTime, now,
/*too_many_frames_queued=*/false),
0);
timing.SetLastDecodeScheduledTimestamp(now_ms);
TimeDelta::Zero());
timing.SetLastDecodeScheduledTimestamp(now);
}
// Another frame submitted at the same time is paced according to the field
// trial setting.
int64_t now_ms = clock.TimeInMilliseconds();
EXPECT_EQ(timing.MaxWaitingTime(kZeroRenderTimeMs, now_ms,
auto now = clock.CurrentTime();
EXPECT_EQ(timing.MaxWaitingTime(kZeroRenderTime, now,
/*too_many_frames_queued=*/false),
kMinPacingMs);
kMinPacing);
// If there's a burst of frames, the wait time is calculated based on next
// decode time.
EXPECT_EQ(timing.MaxWaitingTime(kZeroRenderTimeMs, now_ms,
EXPECT_EQ(timing.MaxWaitingTime(kZeroRenderTime, now,
/*too_many_frames_queued=*/false),
kMinPacingMs);
EXPECT_EQ(timing.MaxWaitingTime(kZeroRenderTimeMs, now_ms,
kMinPacing);
EXPECT_EQ(timing.MaxWaitingTime(kZeroRenderTime, now,
/*too_many_frames_queued=*/false),
kMinPacingMs);
kMinPacing);
// Allow a few ms to pass, this should be subtracted from the MaxWaitingTime.
constexpr int64_t kTwoMs = 2;
clock.AdvanceTimeMilliseconds(kTwoMs);
now_ms = clock.TimeInMilliseconds();
EXPECT_EQ(timing.MaxWaitingTime(kZeroRenderTimeMs, now_ms,
constexpr TimeDelta kTwoMs = TimeDelta::Millis(2);
clock.AdvanceTime(kTwoMs);
now = clock.CurrentTime();
EXPECT_EQ(timing.MaxWaitingTime(kZeroRenderTime, now,
/*too_many_frames_queued=*/false),
kMinPacingMs - kTwoMs);
kMinPacing - kTwoMs);
// A frame is decoded at the current time, the wait time should be restored to
// pacing delay.
timing.SetLastDecodeScheduledTimestamp(now_ms);
EXPECT_EQ(timing.MaxWaitingTime(kZeroRenderTimeMs, now_ms,
timing.SetLastDecodeScheduledTimestamp(now);
EXPECT_EQ(timing.MaxWaitingTime(kZeroRenderTime, now,
/*too_many_frames_queued=*/false),
kMinPacingMs);
kMinPacing);
}
TEST(ReceiverTimingTest, DefaultMaxWaitingTimeUnaffectedByPacingExperiment) {
@ -219,65 +227,65 @@ TEST(ReceiverTimingTest, DefaultMaxWaitingTimeUnaffectedByPacingExperiment) {
test::ScopedFieldTrials override_field_trials(
"WebRTC-ZeroPlayoutDelay/min_pacing:3ms/");
constexpr int64_t kStartTimeUs = 3.15e13; // About one year in us.
constexpr int64_t kTimeDeltaMs = 1000.0 / 60.0;
const TimeDelta kTimeDelta = TimeDelta::Millis(1000.0 / 60.0);
SimulatedClock clock(kStartTimeUs);
VCMTiming timing(&clock);
timing.Reset();
clock.AdvanceTimeMilliseconds(kTimeDeltaMs);
int64_t now_ms = clock.TimeInMilliseconds();
int64_t render_time_ms = now_ms + 30;
clock.AdvanceTime(kTimeDelta);
auto now = clock.CurrentTime();
Timestamp render_time = now + TimeDelta::Millis(30);
// Estimate the internal processing delay from the first frame.
int64_t estimated_processing_delay =
(render_time_ms - now_ms) -
timing.MaxWaitingTime(render_time_ms, now_ms,
TimeDelta estimated_processing_delay =
(render_time - now) -
timing.MaxWaitingTime(render_time, now,
/*too_many_frames_queued=*/false);
EXPECT_GT(estimated_processing_delay, 0);
EXPECT_GT(estimated_processing_delay, TimeDelta::Zero());
// Any other frame submitted at the same time should be scheduled according to
// its render time.
for (int i = 0; i < 5; ++i) {
render_time_ms += kTimeDeltaMs;
EXPECT_EQ(timing.MaxWaitingTime(render_time_ms, now_ms,
render_time += kTimeDelta;
EXPECT_EQ(timing.MaxWaitingTime(render_time, now,
/*too_many_frames_queued=*/false),
render_time_ms - now_ms - estimated_processing_delay);
render_time - now - estimated_processing_delay);
}
}
TEST(ReceiverTiminTest, MaxWaitingTimeReturnsZeroIfTooManyFramesQueuedIsTrue) {
TEST(ReceiverTimingTest, MaxWaitingTimeReturnsZeroIfTooManyFramesQueuedIsTrue) {
// The minimum pacing is enabled by a field trial and active if the RTP
// playout delay header extension is set to min==0.
constexpr int64_t kMinPacingMs = 3;
constexpr TimeDelta kMinPacing = TimeDelta::Millis(3);
test::ScopedFieldTrials override_field_trials(
"WebRTC-ZeroPlayoutDelay/min_pacing:3ms/");
constexpr int64_t kStartTimeUs = 3.15e13; // About one year in us.
constexpr int64_t kTimeDeltaMs = 1000.0 / 60.0;
constexpr int64_t kZeroRenderTimeMs = 0;
const TimeDelta kTimeDelta = TimeDelta::Millis(1000.0 / 60.0);
constexpr auto kZeroRenderTime = Timestamp::Zero();
SimulatedClock clock(kStartTimeUs);
VCMTiming timing(&clock);
timing.Reset();
// MaxWaitingTime() returns zero for evenly spaced video frames.
for (int i = 0; i < 10; ++i) {
clock.AdvanceTimeMilliseconds(kTimeDeltaMs);
int64_t now_ms = clock.TimeInMilliseconds();
EXPECT_EQ(timing.MaxWaitingTime(kZeroRenderTimeMs, now_ms,
clock.AdvanceTime(kTimeDelta);
auto now = clock.CurrentTime();
EXPECT_EQ(timing.MaxWaitingTime(kZeroRenderTime, now,
/*too_many_frames_queued=*/false),
0);
timing.SetLastDecodeScheduledTimestamp(now_ms);
TimeDelta::Zero());
timing.SetLastDecodeScheduledTimestamp(now);
}
// Another frame submitted at the same time is paced according to the field
// trial setting.
int64_t now_ms = clock.TimeInMilliseconds();
EXPECT_EQ(timing.MaxWaitingTime(kZeroRenderTimeMs, now_ms,
auto now_ms = clock.CurrentTime();
EXPECT_EQ(timing.MaxWaitingTime(kZeroRenderTime, now_ms,
/*too_many_frames_queued=*/false),
kMinPacingMs);
kMinPacing);
// MaxWaitingTime returns 0 even if there's a burst of frames if
// too_many_frames_queued is set to true.
EXPECT_EQ(timing.MaxWaitingTime(kZeroRenderTimeMs, now_ms,
EXPECT_EQ(timing.MaxWaitingTime(kZeroRenderTime, now_ms,
/*too_many_frames_queued=*/true),
0);
EXPECT_EQ(timing.MaxWaitingTime(kZeroRenderTimeMs, now_ms,
TimeDelta::Zero());
EXPECT_EQ(timing.MaxWaitingTime(kZeroRenderTime, now_ms,
/*too_many_frames_queued=*/true),
0);
TimeDelta::Zero());
}
} // namespace webrtc

View File

@ -203,8 +203,9 @@ int32_t VideoReceiver::Decode(uint16_t maxWaitTimeMs) {
}
// If this frame was too late, we should adjust the delay accordingly
_timing->UpdateCurrentDelay(frame->RenderTimeMs(),
clock_->TimeInMilliseconds());
if (frame->RenderTimeMs() > 0)
_timing->UpdateCurrentDelay(Timestamp::Millis(frame->RenderTimeMs()),
clock_->CurrentTime());
if (first_frame_received_()) {
RTC_LOG(LS_INFO) << "Received first complete decodable video frame";

View File

@ -17,4 +17,6 @@ rtc_library("timestamp_extrapolator") {
"timestamp_extrapolator.cc",
"timestamp_extrapolator.h",
]
deps = [ "../../api/units:timestamp" ]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}

View File

@ -12,10 +12,13 @@
#include <algorithm>
#include "absl/types/optional.h"
namespace webrtc {
TimestampExtrapolator::TimestampExtrapolator(int64_t start_ms)
: _startMs(0),
TimestampExtrapolator::TimestampExtrapolator(Timestamp start)
: _start(Timestamp::Zero()),
_prev(Timestamp::Zero()),
_firstTimestamp(0),
_wrapArounds(0),
_prevUnwrappedTimestamp(-1),
@ -30,12 +33,12 @@ TimestampExtrapolator::TimestampExtrapolator(int64_t start_ms)
_accDrift(6600), // in timestamp ticks, i.e. 15 ms
_accMaxError(7000),
_pP11(1e10) {
Reset(start_ms);
Reset(start);
}
void TimestampExtrapolator::Reset(int64_t start_ms) {
_startMs = start_ms;
_prevMs = _startMs;
void TimestampExtrapolator::Reset(Timestamp start) {
_start = start;
_prev = _start;
_firstTimestamp = 0;
_w[0] = 90.0;
_w[1] = 0;
@ -51,17 +54,18 @@ void TimestampExtrapolator::Reset(int64_t start_ms) {
_detectorAccumulatorNeg = 0;
}
void TimestampExtrapolator::Update(int64_t tMs, uint32_t ts90khz) {
if (tMs - _prevMs > 10e3) {
void TimestampExtrapolator::Update(Timestamp now, uint32_t ts90khz) {
if (now - _prev > TimeDelta::Seconds(10)) {
// Ten seconds without a complete frame.
// Reset the extrapolator
Reset(tMs);
Reset(now);
} else {
_prevMs = tMs;
_prev = now;
}
// Remove offset to prevent badly scaled matrices
tMs -= _startMs;
const TimeDelta offset = now - _start;
double tMs = offset.ms();
CheckForWrapArounds(ts90khz);
@ -79,7 +83,7 @@ void TimestampExtrapolator::Update(int64_t tMs, uint32_t ts90khz) {
}
double residual = (static_cast<double>(unwrapped_ts90khz) - _firstTimestamp) -
static_cast<double>(tMs) * _w[0] - _w[1];
tMs * _w[0] - _w[1];
if (DelayChangeDetection(residual) &&
_packetCount >= _startUpFilterDelayInPackets) {
// A sudden change of average network delay has been detected.
@ -123,32 +127,28 @@ void TimestampExtrapolator::Update(int64_t tMs, uint32_t ts90khz) {
}
}
int64_t TimestampExtrapolator::ExtrapolateLocalTime(uint32_t timestamp90khz) {
int64_t localTimeMs = 0;
absl::optional<Timestamp> TimestampExtrapolator::ExtrapolateLocalTime(
uint32_t timestamp90khz) {
CheckForWrapArounds(timestamp90khz);
double unwrapped_ts90khz =
static_cast<double>(timestamp90khz) +
_wrapArounds * ((static_cast<int64_t>(1) << 32) - 1);
if (_packetCount == 0) {
localTimeMs = -1;
return absl::nullopt;
} else if (_packetCount < _startUpFilterDelayInPackets) {
localTimeMs =
_prevMs +
static_cast<int64_t>(
static_cast<double>(unwrapped_ts90khz - _prevUnwrappedTimestamp) /
90.0 +
0.5);
auto diffMs = static_cast<int64_t>(
static_cast<double>(unwrapped_ts90khz - _prevUnwrappedTimestamp) /
90.0 +
0.5);
return _prev + TimeDelta::Millis(diffMs);
} else if (_w[0] < 1e-3) {
return _start;
} else {
if (_w[0] < 1e-3) {
localTimeMs = _startMs;
} else {
double timestampDiff =
unwrapped_ts90khz - static_cast<double>(_firstTimestamp);
localTimeMs = static_cast<int64_t>(static_cast<double>(_startMs) +
(timestampDiff - _w[1]) / _w[0] + 0.5);
}
double timestampDiff =
unwrapped_ts90khz - static_cast<double>(_firstTimestamp);
auto diffMs = static_cast<int64_t>((timestampDiff - _w[1]) / _w[0] + 0.5);
return _start + TimeDelta::Millis(diffMs);
}
return localTimeMs;
}
// Investigates if the timestamp clock has overflowed since the last timestamp

View File

@ -13,23 +13,26 @@
#include <stdint.h>
#include "absl/types/optional.h"
#include "api/units/timestamp.h"
namespace webrtc {
// Not thread safe.
class TimestampExtrapolator {
public:
explicit TimestampExtrapolator(int64_t start_ms);
void Update(int64_t tMs, uint32_t ts90khz);
int64_t ExtrapolateLocalTime(uint32_t timestamp90khz);
void Reset(int64_t start_ms);
explicit TimestampExtrapolator(Timestamp start);
void Update(Timestamp now, uint32_t ts90khz);
absl::optional<Timestamp> ExtrapolateLocalTime(uint32_t timestamp90khz);
void Reset(Timestamp start);
private:
void CheckForWrapArounds(uint32_t ts90khz);
bool DelayChangeDetection(double error);
double _w[2];
double _pP[2][2];
int64_t _startMs;
int64_t _prevMs;
Timestamp _start;
Timestamp _prev;
uint32_t _firstTimestamp;
int32_t _wrapArounds;
int64_t _prevUnwrappedTimestamp;

View File

@ -148,7 +148,7 @@ struct FrameMetadata {
contentType(frame.contentType()),
delayed_by_retransmission(frame.delayed_by_retransmission()),
rtp_timestamp(frame.Timestamp()),
receive_time(frame.ReceivedTime()) {}
receive_time_ms(frame.ReceivedTime()) {}
const bool is_last_spatial_layer;
const bool is_keyframe;
@ -156,7 +156,7 @@ struct FrameMetadata {
const VideoContentType contentType;
const bool delayed_by_retransmission;
const uint32_t rtp_timestamp;
const int64_t receive_time;
const int64_t receive_time_ms;
};
// Encapsulates use of the new frame buffer for use in VideoReceiveStream. This
@ -247,9 +247,12 @@ class FrameBuffer3Proxy : public FrameBufferProxy {
if (complete_units < buffer_->GetTotalNumberOfContinuousTemporalUnits()) {
stats_proxy_->OnCompleteFrame(metadata.is_keyframe, metadata.size,
metadata.contentType);
if (!metadata.delayed_by_retransmission)
RTC_DCHECK_GE(metadata.receive_time_ms, 0)
<< "Frame receive time must be positive for received frames, was "
<< metadata.receive_time_ms << ".";
if (!metadata.delayed_by_retransmission && metadata.receive_time_ms >= 0)
timing_->IncomingTimestamp(metadata.rtp_timestamp,
metadata.receive_time);
Timestamp::Millis(metadata.receive_time_ms));
MaybeScheduleFrameForRelease();
}
@ -293,7 +296,7 @@ class FrameBuffer3Proxy : public FrameBufferProxy {
timeout_tracker_.OnEncodedFrameReleased();
int64_t now_ms = clock_->TimeInMilliseconds();
Timestamp now = clock_->CurrentTime();
bool superframe_delayed_by_retransmission = false;
size_t superframe_size = 0;
const EncodedFrame& first_frame = *frames.front();
@ -303,12 +306,11 @@ class FrameBuffer3Proxy : public FrameBufferProxy {
keyframe_required_ = false;
// Gracefully handle bad RTP timestamps and render time issues.
if (FrameHasBadRenderTiming(render_time.ms(), now_ms,
if (FrameHasBadRenderTiming(render_time, now,
timing_->TargetVideoDelay())) {
jitter_estimator_.Reset();
timing_->Reset();
render_time = Timestamp::Millis(
timing_->RenderTimeMs(first_frame.Timestamp(), now_ms));
render_time = timing_->RenderTime(first_frame.Timestamp(), now);
}
for (std::unique_ptr<EncodedFrame>& frame : frames) {
@ -334,9 +336,9 @@ class FrameBuffer3Proxy : public FrameBufferProxy {
rtt_mult = rtt_mult_settings_->rtt_mult_setting;
rtt_mult_add_cap_ms = rtt_mult_settings_->rtt_mult_add_cap_ms;
}
timing_->SetJitterDelay(
jitter_estimator_.GetJitterEstimate(rtt_mult, rtt_mult_add_cap_ms));
timing_->UpdateCurrentDelay(render_time.ms(), now_ms);
timing_->SetJitterDelay(TimeDelta::Millis(
jitter_estimator_.GetJitterEstimate(rtt_mult, rtt_mult_add_cap_ms)));
timing_->UpdateCurrentDelay(render_time, now);
} else if (RttMultExperiment::RttMultEnabled()) {
jitter_estimator_.FrameNacked();
}
@ -349,7 +351,7 @@ class FrameBuffer3Proxy : public FrameBufferProxy {
std::unique_ptr<EncodedFrame> frame =
CombineAndDeleteFrames(std::move(frames));
timing_->SetLastDecodeScheduledTimestamp(now_ms);
timing_->SetLastDecodeScheduledTimestamp(now);
decoder_ready_for_new_frame_ = false;
// VideoReceiveStream2 wants frames on the decoder thread.
@ -399,18 +401,18 @@ class FrameBuffer3Proxy : public FrameBufferProxy {
}
void UpdateJitterDelay() {
int max_decode_ms;
int current_delay_ms;
int target_delay_ms;
int jitter_buffer_ms;
int min_playout_delay_ms;
int render_delay_ms;
if (timing_->GetTimings(&max_decode_ms, &current_delay_ms, &target_delay_ms,
&jitter_buffer_ms, &min_playout_delay_ms,
&render_delay_ms)) {
TimeDelta max_decode = TimeDelta::Zero();
TimeDelta current_delay = TimeDelta::Zero();
TimeDelta target_delay = TimeDelta::Zero();
TimeDelta jitter_buffer = TimeDelta::Zero();
TimeDelta min_playout_delay = TimeDelta::Zero();
TimeDelta render_delay = TimeDelta::Zero();
if (timing_->GetTimings(&max_decode, &current_delay, &target_delay,
&jitter_buffer, &min_playout_delay,
&render_delay)) {
stats_proxy_->OnFrameBufferTimingsUpdated(
max_decode_ms, current_delay_ms, target_delay_ms, jitter_buffer_ms,
min_playout_delay_ms, render_delay_ms);
max_decode.ms(), current_delay.ms(), target_delay.ms(),
jitter_buffer.ms(), min_playout_delay.ms(), render_delay.ms());
}
}
@ -437,8 +439,8 @@ class FrameBuffer3Proxy : public FrameBufferProxy {
}
// Found keyframe - decode right away.
if (next_frame.front()->is_keyframe()) {
auto render_time = Timestamp::Millis(timing_->RenderTimeMs(
next_frame.front()->Timestamp(), clock_->TimeInMilliseconds()));
auto render_time = timing_->RenderTime(next_frame.front()->Timestamp(),
clock_->CurrentTime());
OnFrameReady(std::move(next_frame), render_time);
return;
}

View File

@ -233,7 +233,7 @@ class FrameBufferProxyFixture
kMaxWaitForFrame,
&decode_sync_)) {
// Avoid starting with negative render times.
timing_.set_min_playout_delay(10);
timing_.set_min_playout_delay(TimeDelta::Millis(10));
ON_CALL(stats_callback_, OnDroppedFrames)
.WillByDefault(
@ -631,8 +631,7 @@ TEST_P(FrameBufferProxyTest, TestStatsCallback) {
EXPECT_CALL(stats_callback_, OnFrameBufferTimingsUpdated);
// Fake timing having received decoded frame.
timing_.StopDecodeTimer(clock_->TimeInMicroseconds() + 1,
clock_->TimeInMilliseconds());
timing_.StopDecodeTimer(TimeDelta::Millis(1), clock_->CurrentTime());
StartNextDecodeForceKeyframe();
proxy_->InsertFrame(Builder().Id(0).Time(0).AsLast().Build());
EXPECT_THAT(WaitForFrameOrTimeout(TimeDelta::Zero()), Frame(WithId(0)));
@ -775,8 +774,8 @@ TEST_P(LowLatencyFrameBufferProxyTest,
FramesDecodedInstantlyWithLowLatencyRendering) {
// Initial keyframe.
StartNextDecodeForceKeyframe();
timing_.set_min_playout_delay(0);
timing_.set_max_playout_delay(10);
timing_.set_min_playout_delay(TimeDelta::Zero());
timing_.set_max_playout_delay(TimeDelta::Millis(10));
auto frame = Builder().Id(0).Time(0).AsLast().Build();
// Playout delay of 0 implies low-latency rendering.
frame->SetPlayoutDelay({0, 10});
@ -798,8 +797,8 @@ TEST_P(LowLatencyFrameBufferProxyTest,
TEST_P(LowLatencyFrameBufferProxyTest, ZeroPlayoutDelayFullQueue) {
// Initial keyframe.
StartNextDecodeForceKeyframe();
timing_.set_min_playout_delay(0);
timing_.set_max_playout_delay(10);
timing_.set_min_playout_delay(TimeDelta::Zero());
timing_.set_max_playout_delay(TimeDelta::Millis(10));
auto frame = Builder().Id(0).Time(0).AsLast().Build();
// Playout delay of 0 implies low-latency rendering.
frame->SetPlayoutDelay({0, 10});
@ -822,8 +821,8 @@ TEST_P(LowLatencyFrameBufferProxyTest, ZeroPlayoutDelayFullQueue) {
TEST_P(LowLatencyFrameBufferProxyTest, MinMaxDelayZeroLowLatencyMode) {
// Initial keyframe.
StartNextDecodeForceKeyframe();
timing_.set_min_playout_delay(0);
timing_.set_max_playout_delay(0);
timing_.set_min_playout_delay(TimeDelta::Zero());
timing_.set_max_playout_delay(TimeDelta::Zero());
auto frame = Builder().Id(0).Time(0).AsLast().Build();
// Playout delay of 0 implies low-latency rendering.
frame->SetPlayoutDelay({0, 0});

View File

@ -30,10 +30,9 @@ FrameDecodeTiming::OnFrameBufferUpdated(uint32_t next_temporal_unit_rtp,
uint32_t last_temporal_unit_rtp,
bool too_many_frames_queued) {
const Timestamp now = clock_->CurrentTime();
Timestamp render_time = Timestamp::Millis(
timing_->RenderTimeMs(next_temporal_unit_rtp, now.ms()));
TimeDelta max_wait = TimeDelta::Millis(timing_->MaxWaitingTime(
render_time.ms(), now.ms(), too_many_frames_queued));
Timestamp render_time = timing_->RenderTime(next_temporal_unit_rtp, now);
TimeDelta max_wait =
timing_->MaxWaitingTime(render_time, now, too_many_frames_queued);
// If the delay is not too far in the past, or this is the last decodable
// frame then it is the best frame to be decoded. Otherwise, fast-forward

View File

@ -32,20 +32,18 @@ class FakeVCMTiming : public webrtc::VCMTiming {
public:
explicit FakeVCMTiming(Clock* clock) : webrtc::VCMTiming(clock) {}
int64_t RenderTimeMs(uint32_t frame_timestamp,
int64_t now_ms) const override {
Timestamp RenderTime(uint32_t frame_timestamp, Timestamp now) const override {
RTC_DCHECK(render_time_map_.contains(frame_timestamp));
auto it = render_time_map_.find(frame_timestamp);
return it->second.ms();
return it->second;
}
int64_t MaxWaitingTime(int64_t render_time_ms,
int64_t now_ms,
bool too_many_frames_queued) const override {
auto render_time = Timestamp::Millis(render_time_ms);
TimeDelta MaxWaitingTime(Timestamp render_time,
Timestamp now,
bool too_many_frames_queued) const override {
RTC_DCHECK(wait_time_map_.contains(render_time));
auto it = wait_time_map_.find(render_time);
return it->second.ms();
return it->second;
}
void SetTimes(uint32_t frame_timestamp,

View File

@ -269,7 +269,7 @@ VideoReceiveStream2::VideoReceiveStream2(
decoder_payload_types.insert(decoder.payload_type);
}
timing_->set_render_delay(config_.render_delay_ms);
timing_->set_render_delay(TimeDelta::Millis(config_.render_delay_ms));
frame_buffer_ = FrameBufferProxy::CreateFromFieldTrial(
clock_, call_->worker_thread(), timing_.get(), &stats_proxy_,
@ -710,7 +710,7 @@ absl::optional<Syncable::Info> VideoReceiveStream2::GetInfo() const {
if (!info)
return absl::nullopt;
info->current_delay_ms = timing_->TargetVideoDelay();
info->current_delay_ms = timing_->TargetVideoDelay().ms();
return info;
}
@ -970,7 +970,7 @@ void VideoReceiveStream2::UpdatePlayoutDelays() const {
std::max({frame_minimum_playout_delay_ms_, base_minimum_playout_delay_ms_,
syncable_minimum_playout_delay_ms_});
if (minimum_delay_ms >= 0) {
timing_->set_min_playout_delay(minimum_delay_ms);
timing_->set_min_playout_delay(TimeDelta::Millis(minimum_delay_ms));
if (frame_minimum_playout_delay_ms_ == 0 &&
frame_maximum_playout_delay_ms_ > 0 && low_latency_renderer_enabled_) {
// TODO(kron): Estimate frame rate from video stream.
@ -991,7 +991,7 @@ void VideoReceiveStream2::UpdatePlayoutDelays() const {
const int maximum_delay_ms = frame_maximum_playout_delay_ms_;
if (maximum_delay_ms >= 0) {
timing_->set_max_playout_delay(maximum_delay_ms);
timing_->set_max_playout_delay(TimeDelta::Millis(maximum_delay_ms));
}
}

View File

@ -163,30 +163,30 @@ TEST_F(VideoReceiveStream2Test, PlayoutDelay) {
test_frame->SetPlayoutDelay(kPlayoutDelayMs);
video_receive_stream_->OnCompleteFrame(std::move(test_frame));
EXPECT_EQ(kPlayoutDelayMs.min_ms, timing_->min_playout_delay());
EXPECT_EQ(kPlayoutDelayMs.max_ms, timing_->max_playout_delay());
EXPECT_EQ(kPlayoutDelayMs.min_ms, timing_->min_playout_delay().ms());
EXPECT_EQ(kPlayoutDelayMs.max_ms, timing_->max_playout_delay().ms());
// Check that the biggest minimum delay is chosen.
video_receive_stream_->SetMinimumPlayoutDelay(400);
EXPECT_EQ(400, timing_->min_playout_delay());
EXPECT_EQ(400, timing_->min_playout_delay().ms());
// Check base minimum delay validation.
EXPECT_FALSE(video_receive_stream_->SetBaseMinimumPlayoutDelayMs(12345));
EXPECT_FALSE(video_receive_stream_->SetBaseMinimumPlayoutDelayMs(-1));
EXPECT_TRUE(video_receive_stream_->SetBaseMinimumPlayoutDelayMs(500));
EXPECT_EQ(500, timing_->min_playout_delay());
EXPECT_EQ(500, timing_->min_playout_delay().ms());
// Check that intermidiate values are remembered and the biggest remembered
// is chosen.
video_receive_stream_->SetBaseMinimumPlayoutDelayMs(0);
EXPECT_EQ(400, timing_->min_playout_delay());
EXPECT_EQ(400, timing_->min_playout_delay().ms());
video_receive_stream_->SetMinimumPlayoutDelay(0);
EXPECT_EQ(123, timing_->min_playout_delay());
EXPECT_EQ(123, timing_->min_playout_delay().ms());
}
TEST_F(VideoReceiveStream2Test, PlayoutDelayPreservesDefaultMaxValue) {
const int default_max_playout_latency = timing_->max_playout_delay();
const TimeDelta default_max_playout_latency = timing_->max_playout_delay();
const VideoPlayoutDelay kPlayoutDelayMs = {123, -1};
std::unique_ptr<FrameObjectFake> test_frame(new FrameObjectFake());
@ -196,13 +196,13 @@ TEST_F(VideoReceiveStream2Test, PlayoutDelayPreservesDefaultMaxValue) {
video_receive_stream_->OnCompleteFrame(std::move(test_frame));
// Ensure that -1 preserves default maximum value from `timing_`.
EXPECT_EQ(kPlayoutDelayMs.min_ms, timing_->min_playout_delay());
EXPECT_NE(kPlayoutDelayMs.max_ms, timing_->max_playout_delay());
EXPECT_EQ(kPlayoutDelayMs.min_ms, timing_->min_playout_delay().ms());
EXPECT_NE(kPlayoutDelayMs.max_ms, timing_->max_playout_delay().ms());
EXPECT_EQ(default_max_playout_latency, timing_->max_playout_delay());
}
TEST_F(VideoReceiveStream2Test, PlayoutDelayPreservesDefaultMinValue) {
const int default_min_playout_latency = timing_->min_playout_delay();
const TimeDelta default_min_playout_latency = timing_->min_playout_delay();
const VideoPlayoutDelay kPlayoutDelayMs = {-1, 321};
std::unique_ptr<FrameObjectFake> test_frame(new FrameObjectFake());
@ -212,8 +212,8 @@ TEST_F(VideoReceiveStream2Test, PlayoutDelayPreservesDefaultMinValue) {
video_receive_stream_->OnCompleteFrame(std::move(test_frame));
// Ensure that -1 preserves default minimum value from `timing_`.
EXPECT_NE(kPlayoutDelayMs.min_ms, timing_->min_playout_delay());
EXPECT_EQ(kPlayoutDelayMs.max_ms, timing_->max_playout_delay());
EXPECT_NE(kPlayoutDelayMs.min_ms, timing_->min_playout_delay().ms());
EXPECT_EQ(kPlayoutDelayMs.max_ms, timing_->max_playout_delay().ms());
EXPECT_EQ(default_min_playout_latency, timing_->min_playout_delay());
}

View File

@ -70,11 +70,11 @@ void VideoStreamDecoderImpl::OnFrame(std::unique_ptr<EncodedFrame> frame) {
}
void VideoStreamDecoderImpl::SetMinPlayoutDelay(TimeDelta min_delay) {
timing_.set_min_playout_delay(min_delay.ms());
timing_.set_min_playout_delay(min_delay);
}
void VideoStreamDecoderImpl::SetMaxPlayoutDelay(TimeDelta max_delay) {
timing_.set_max_playout_delay(max_delay.ms());
timing_.set_max_playout_delay(max_delay);
}
VideoDecoder* VideoStreamDecoderImpl::GetDecoder(int payload_type) {
@ -258,7 +258,8 @@ void VideoStreamDecoderImpl::OnDecodedFrameCallback(
Timestamp::Millis(frame_info->decode_start_time_ms +
*decode_time_ms)});
decoded_image.set_timestamp_us(frame_info->render_time_us);
timing_.StopDecodeTimer(*decode_time_ms, decode_stop_time_ms);
timing_.StopDecodeTimer(TimeDelta::Millis(*decode_time_ms),
Timestamp::Millis(decode_stop_time_ms));
callbacks_->OnDecodedFrame(decoded_image, callback_info);
});