Make Frame buffer not drop frames unnecessary

Now VCMTiming::MaxWaitingTime will not clip negative values. Thus frame
buffer will be able to distinguish between late frames and when waiting
cycle was simply interrupted by a new inserted frame right before the
waiting timer would expire.

Bug: webrtc:8917
Change-Id: I6b253f459fcb3a346064a103cc92ee332b074e1b
Reviewed-on: https://webrtc-review.googlesource.com/57741
Commit-Queue: Ilya Nikolaevskiy <ilnik@webrtc.org>
Reviewed-by: Erik Språng <sprang@webrtc.org>
Reviewed-by: Philip Eliasson <philipel@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#22210}
This commit is contained in:
Ilya Nikolaevskiy
2018-02-27 15:49:47 +01:00
committed by Commit Bot
parent 99a2c5dcb6
commit 8c4fe16e4c
5 changed files with 20 additions and 16 deletions

View File

@ -34,6 +34,10 @@ constexpr int kMaxFramesBuffered = 600;
// Max number of decoded frame info that will be saved.
constexpr int kMaxFramesHistory = 50;
// The time it's allowed for a frame to be late to its rendering prediction and
// still be rendered.
constexpr int kMaxAllowedFrameDalayMs = 5;
constexpr int64_t kLogNonDecodedIntervalMs = 5000;
} // namespace
@ -119,7 +123,9 @@ FrameBuffer::ReturnReason FrameBuffer::NextFrame(
// This will cause the frame buffer to prefer high framerate rather
// than high resolution in the case of the decoder not decoding fast
// enough and the stream has multiple spatial and temporal layers.
if (wait_ms == 0)
// For multiple temporal layers it may cause non-base layer frames to be
// skipped if they are late.
if (wait_ms < -kMaxAllowedFrameDalayMs)
continue;
break;

View File

@ -52,9 +52,9 @@ class VCMTimingFake : public VCMTiming {
return last_ms_;
}
uint32_t MaxWaitingTime(int64_t render_time_ms,
int64_t now_ms) const override {
return std::max<int>(0, render_time_ms - now_ms - kDecodeTime);
int64_t MaxWaitingTime(int64_t render_time_ms,
int64_t now_ms) const override {
return render_time_ms - now_ms - kDecodeTime;
}
bool GetTimings(int* decode_ms,
@ -359,7 +359,7 @@ TEST_F(TestFrameBuffer2, DropTemporalLayerSlowDecoder) {
for (int i = 0; i < 10; ++i) {
ExtractFrame();
clock_.AdvanceTimeMilliseconds(60);
clock_.AdvanceTimeMilliseconds(70);
}
CheckFrame(0, pid, 0);
@ -388,10 +388,10 @@ TEST_F(TestFrameBuffer2, DropSpatialLayerSlowDecoder) {
ExtractFrame();
ExtractFrame();
clock_.AdvanceTimeMilliseconds(55);
clock_.AdvanceTimeMilliseconds(57);
for (int i = 2; i < 12; ++i) {
ExtractFrame();
clock_.AdvanceTimeMilliseconds(55);
clock_.AdvanceTimeMilliseconds(57);
}
CheckFrame(0, pid, 0);

View File

@ -20,6 +20,7 @@
#include "modules/video_coding/internal_defines.h"
#include "modules/video_coding/media_opt_util.h"
#include "rtc_base/logging.h"
#include "rtc_base/numerics/safe_conversions.h"
#include "rtc_base/trace_event.h"
#include "system_wrappers/include/clock.h"
@ -192,8 +193,8 @@ VCMEncodedFrame* VCMReceiver::FrameForDecoding(uint16_t max_wait_time_ms,
static_cast<int32_t>(clock_->TimeInMilliseconds() - start_time_ms);
uint16_t new_max_wait_time =
static_cast<uint16_t>(VCM_MAX(available_wait_time, 0));
uint32_t wait_time_ms =
timing_->MaxWaitingTime(render_time_ms, clock_->TimeInMilliseconds());
uint32_t wait_time_ms = rtc::saturated_cast<uint32_t>(
timing_->MaxWaitingTime(render_time_ms, clock_->TimeInMilliseconds()));
if (new_max_wait_time < wait_time_ms) {
// We're not allowed to wait until the frame is supposed to be rendered,
// waiting as long as we're allowed to avoid busy looping, and then return

View File

@ -234,17 +234,14 @@ int VCMTiming::RequiredDecodeTimeMs() const {
return decode_time_ms;
}
uint32_t VCMTiming::MaxWaitingTime(int64_t render_time_ms,
int64_t now_ms) const {
int64_t VCMTiming::MaxWaitingTime(int64_t render_time_ms,
int64_t now_ms) const {
rtc::CritScope cs(&crit_sect_);
const int64_t max_wait_time_ms =
render_time_ms - now_ms - RequiredDecodeTimeMs() - render_delay_ms_;
if (max_wait_time_ms < 0) {
return 0;
}
return static_cast<uint32_t>(max_wait_time_ms);
return max_wait_time_ms;
}
int VCMTiming::TargetVideoDelay() const {

View File

@ -78,7 +78,7 @@ class VCMTiming {
// Returns the maximum time in ms that we can wait for a frame to become
// complete before we must pass it to the decoder.
virtual uint32_t MaxWaitingTime(int64_t render_time_ms, int64_t now_ms) const;
virtual int64_t MaxWaitingTime(int64_t render_time_ms, int64_t now_ms) const;
// Returns the current target delay which is required delay + decode time +
// render delay.