Remove unused functions in VCMTiming.
Remove VCMTiming::EnoughTimeToDecode, VCMTiming::ResetDecodeTime. Make VCMTiming::StopDecodeTimer void (always returning zero). Update ReceiverTiming.WrapAround test to insert timestamp that wraps. Bug: none Change-Id: I85a8bfd6be18371810b638284b4af73a46894be7 Reviewed-on: https://webrtc-review.googlesource.com/36060 Reviewed-by: Rasmus Brandt <brandtr@webrtc.org> Commit-Queue: Åsa Persson <asapersson@webrtc.org> Cr-Commit-Position: refs/heads/master@{#21660}
This commit is contained in:
@ -12,8 +12,6 @@
|
||||
|
||||
#include <algorithm>
|
||||
|
||||
#include "modules/video_coding/internal_defines.h"
|
||||
#include "modules/video_coding/jitter_buffer_common.h"
|
||||
#include "system_wrappers/include/clock.h"
|
||||
#include "system_wrappers/include/metrics.h"
|
||||
#include "system_wrappers/include/timestamp_extrapolator.h"
|
||||
@ -86,11 +84,6 @@ void VCMTiming::Reset() {
|
||||
prev_frame_timestamp_ = 0;
|
||||
}
|
||||
|
||||
void VCMTiming::ResetDecodeTime() {
|
||||
rtc::CritScope cs(&crit_sect_);
|
||||
codec_timer_.reset(new VCMCodecTimer());
|
||||
}
|
||||
|
||||
void VCMTiming::set_render_delay(int render_delay_ms) {
|
||||
rtc::CritScope cs(&crit_sect_);
|
||||
render_delay_ms_ = render_delay_ms;
|
||||
@ -155,9 +148,8 @@ void VCMTiming::UpdateCurrentDelay(uint32_t frame_timestamp) {
|
||||
}
|
||||
|
||||
if (max_change_ms <= 0) {
|
||||
// Any changes less than 1 ms are truncated and
|
||||
// will be postponed. Negative change will be due
|
||||
// to reordering and should be ignored.
|
||||
// Any changes less than 1 ms are truncated and will be postponed.
|
||||
// Negative change will be due to reordering and should be ignored.
|
||||
return;
|
||||
}
|
||||
delay_diff_ms = std::max(delay_diff_ms, -max_change_ms);
|
||||
@ -185,7 +177,7 @@ void VCMTiming::UpdateCurrentDelay(int64_t render_time_ms,
|
||||
}
|
||||
}
|
||||
|
||||
int32_t VCMTiming::StopDecodeTimer(uint32_t time_stamp,
|
||||
void VCMTiming::StopDecodeTimer(uint32_t time_stamp,
|
||||
int32_t decode_time_ms,
|
||||
int64_t now_ms,
|
||||
int64_t render_time_ms) {
|
||||
@ -204,7 +196,6 @@ int32_t VCMTiming::StopDecodeTimer(uint32_t time_stamp,
|
||||
sum_missed_render_deadline_ms_ += -time_until_rendering_ms;
|
||||
++num_delayed_decoded_frames_;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
void VCMTiming::IncomingTimestamp(uint32_t time_stamp, int64_t now_ms) {
|
||||
@ -228,7 +219,7 @@ int64_t VCMTiming::RenderTimeMsInternal(uint32_t frame_timestamp,
|
||||
}
|
||||
|
||||
if (min_playout_delay_ms_ == 0 && max_playout_delay_ms_ == 0) {
|
||||
// Render as soon as possible
|
||||
// Render as soon as possible.
|
||||
return now_ms;
|
||||
}
|
||||
|
||||
@ -239,7 +230,6 @@ int64_t VCMTiming::RenderTimeMsInternal(uint32_t frame_timestamp,
|
||||
return estimated_complete_time_ms + actual_delay;
|
||||
}
|
||||
|
||||
// Must be called from inside a critical section.
|
||||
int VCMTiming::RequiredDecodeTimeMs() const {
|
||||
const int decode_time_ms = codec_timer_->RequiredDecodeTimeMs();
|
||||
assert(decode_time_ms >= 0);
|
||||
@ -259,24 +249,6 @@ uint32_t VCMTiming::MaxWaitingTime(int64_t render_time_ms,
|
||||
return static_cast<uint32_t>(max_wait_time_ms);
|
||||
}
|
||||
|
||||
bool VCMTiming::EnoughTimeToDecode(
|
||||
uint32_t available_processing_time_ms) const {
|
||||
rtc::CritScope cs(&crit_sect_);
|
||||
int64_t required_decode_time_ms = RequiredDecodeTimeMs();
|
||||
if (required_decode_time_ms < 0) {
|
||||
// Haven't decoded any frames yet, try decoding one to get an estimate
|
||||
// of the decode time.
|
||||
return true;
|
||||
} else if (required_decode_time_ms == 0) {
|
||||
// Decode time is less than 1, set to 1 for now since
|
||||
// we don't have any better precision. Count ticks later?
|
||||
required_decode_time_ms = 1;
|
||||
}
|
||||
return static_cast<int64_t>(available_processing_time_ms) -
|
||||
required_decode_time_ms >
|
||||
0;
|
||||
}
|
||||
|
||||
int VCMTiming::TargetVideoDelay() const {
|
||||
rtc::CritScope cs(&crit_sect_);
|
||||
return TargetDelayInternal();
|
||||
|
||||
@ -32,7 +32,6 @@ class VCMTiming {
|
||||
|
||||
// Resets the timing to the initial state.
|
||||
void Reset();
|
||||
void ResetDecodeTime();
|
||||
|
||||
// Set the amount of time needed to render an image. Defaults to 10 ms.
|
||||
void set_render_delay(int render_delay_ms);
|
||||
@ -41,16 +40,12 @@ class VCMTiming {
|
||||
// get the desired jitter buffer level.
|
||||
void SetJitterDelay(int required_delay_ms);
|
||||
|
||||
// Set the minimum playout delay from capture to render in ms.
|
||||
// Set/get the minimum playout delay from capture to render in ms.
|
||||
void set_min_playout_delay(int min_playout_delay_ms);
|
||||
|
||||
// Returns the minimum playout delay from capture to render in ms.
|
||||
int min_playout_delay();
|
||||
|
||||
// Set the maximum playout delay from capture to render in ms.
|
||||
// Set/get the maximum playout delay from capture to render in ms.
|
||||
void set_max_playout_delay(int max_playout_delay_ms);
|
||||
|
||||
// Returns the maximum playout delay from capture to render in ms.
|
||||
int max_playout_delay();
|
||||
|
||||
// Increases or decreases the current delay to get closer to the target delay.
|
||||
@ -67,7 +62,7 @@ class VCMTiming {
|
||||
|
||||
// Stops the decoder timer, should be called when the decoder returns a frame
|
||||
// or when the decoded frame callback is called.
|
||||
int32_t StopDecodeTimer(uint32_t time_stamp,
|
||||
void StopDecodeTimer(uint32_t time_stamp,
|
||||
int32_t decode_time_ms,
|
||||
int64_t now_ms,
|
||||
int64_t render_time_ms);
|
||||
@ -75,9 +70,10 @@ class VCMTiming {
|
||||
// Used to report that a frame is passed to decoding. Updates the timestamp
|
||||
// filter which is used to map between timestamps and receiver system time.
|
||||
void IncomingTimestamp(uint32_t time_stamp, int64_t last_packet_time_ms);
|
||||
|
||||
// Returns the receiver system time when the frame with timestamp
|
||||
// frame_timestamp should be rendered, assuming that the system time currently
|
||||
// is now_ms.
|
||||
// |frame_timestamp| should be rendered, assuming that the system time
|
||||
// currently is |now_ms|.
|
||||
virtual int64_t RenderTimeMs(uint32_t frame_timestamp, int64_t now_ms) const;
|
||||
|
||||
// Returns the maximum time in ms that we can wait for a frame to become
|
||||
@ -88,10 +84,6 @@ class VCMTiming {
|
||||
// render delay.
|
||||
int TargetVideoDelay() const;
|
||||
|
||||
// Calculates whether or not there is enough time to decode a frame given a
|
||||
// certain amount of processing time.
|
||||
bool EnoughTimeToDecode(uint32_t available_processing_time_ms) const;
|
||||
|
||||
// Return current timing information. Returns true if the first frame has been
|
||||
// decoded, false otherwise.
|
||||
virtual bool GetTimings(int* decode_ms,
|
||||
|
||||
@ -8,138 +8,123 @@
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include <math.h>
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
|
||||
#include "modules/video_coding/include/video_coding.h"
|
||||
#include "modules/video_coding/internal_defines.h"
|
||||
#include "modules/video_coding/timing.h"
|
||||
#include "system_wrappers/include/clock.h"
|
||||
#include "test/gtest.h"
|
||||
#include "test/testsupport/fileutils.h"
|
||||
|
||||
namespace webrtc {
|
||||
namespace {
|
||||
const int kFps = 25;
|
||||
} // namespace
|
||||
|
||||
TEST(ReceiverTiming, Tests) {
|
||||
SimulatedClock clock(0);
|
||||
VCMTiming timing(&clock);
|
||||
uint32_t waitTime = 0;
|
||||
uint32_t jitterDelayMs = 0;
|
||||
uint32_t requiredDecodeTimeMs = 0;
|
||||
uint32_t timeStamp = 0;
|
||||
timing.Reset();
|
||||
|
||||
uint32_t timestamp = 0;
|
||||
timing.UpdateCurrentDelay(timestamp);
|
||||
|
||||
timing.Reset();
|
||||
|
||||
timing.UpdateCurrentDelay(timeStamp);
|
||||
|
||||
timing.Reset();
|
||||
|
||||
timing.IncomingTimestamp(timeStamp, clock.TimeInMilliseconds());
|
||||
jitterDelayMs = 20;
|
||||
timing.SetJitterDelay(jitterDelayMs);
|
||||
timing.UpdateCurrentDelay(timeStamp);
|
||||
timing.IncomingTimestamp(timestamp, clock.TimeInMilliseconds());
|
||||
uint32_t jitter_delay_ms = 20;
|
||||
timing.SetJitterDelay(jitter_delay_ms);
|
||||
timing.UpdateCurrentDelay(timestamp);
|
||||
timing.set_render_delay(0);
|
||||
waitTime = timing.MaxWaitingTime(
|
||||
timing.RenderTimeMs(timeStamp, clock.TimeInMilliseconds()),
|
||||
uint32_t wait_time_ms = timing.MaxWaitingTime(
|
||||
timing.RenderTimeMs(timestamp, clock.TimeInMilliseconds()),
|
||||
clock.TimeInMilliseconds());
|
||||
// First update initializes the render time. Since we have no decode delay
|
||||
// we get waitTime = renderTime - now - renderDelay = jitter.
|
||||
EXPECT_EQ(jitterDelayMs, waitTime);
|
||||
// we get wait_time_ms = renderTime - now - renderDelay = jitter.
|
||||
EXPECT_EQ(jitter_delay_ms, wait_time_ms);
|
||||
|
||||
jitterDelayMs += VCMTiming::kDelayMaxChangeMsPerS + 10;
|
||||
timeStamp += 90000;
|
||||
jitter_delay_ms += VCMTiming::kDelayMaxChangeMsPerS + 10;
|
||||
timestamp += 90000;
|
||||
clock.AdvanceTimeMilliseconds(1000);
|
||||
timing.SetJitterDelay(jitterDelayMs);
|
||||
timing.UpdateCurrentDelay(timeStamp);
|
||||
waitTime = timing.MaxWaitingTime(
|
||||
timing.RenderTimeMs(timeStamp, clock.TimeInMilliseconds()),
|
||||
timing.SetJitterDelay(jitter_delay_ms);
|
||||
timing.UpdateCurrentDelay(timestamp);
|
||||
wait_time_ms = timing.MaxWaitingTime(
|
||||
timing.RenderTimeMs(timestamp, clock.TimeInMilliseconds()),
|
||||
clock.TimeInMilliseconds());
|
||||
// Since we gradually increase the delay we only get 100 ms every second.
|
||||
EXPECT_EQ(jitterDelayMs - 10, waitTime);
|
||||
EXPECT_EQ(jitter_delay_ms - 10, wait_time_ms);
|
||||
|
||||
timeStamp += 90000;
|
||||
timestamp += 90000;
|
||||
clock.AdvanceTimeMilliseconds(1000);
|
||||
timing.UpdateCurrentDelay(timeStamp);
|
||||
waitTime = timing.MaxWaitingTime(
|
||||
timing.RenderTimeMs(timeStamp, clock.TimeInMilliseconds()),
|
||||
timing.UpdateCurrentDelay(timestamp);
|
||||
wait_time_ms = timing.MaxWaitingTime(
|
||||
timing.RenderTimeMs(timestamp, clock.TimeInMilliseconds()),
|
||||
clock.TimeInMilliseconds());
|
||||
EXPECT_EQ(waitTime, jitterDelayMs);
|
||||
EXPECT_EQ(jitter_delay_ms, wait_time_ms);
|
||||
|
||||
// 300 incoming frames without jitter, verify that this gives the exact wait
|
||||
// time.
|
||||
for (int i = 0; i < 300; i++) {
|
||||
clock.AdvanceTimeMilliseconds(1000 / 25);
|
||||
timeStamp += 90000 / 25;
|
||||
timing.IncomingTimestamp(timeStamp, clock.TimeInMilliseconds());
|
||||
// Insert frames without jitter, verify that this gives the exact wait time.
|
||||
const int kNumFrames = 300;
|
||||
for (int i = 0; i < kNumFrames; i++) {
|
||||
clock.AdvanceTimeMilliseconds(1000 / kFps);
|
||||
timestamp += 90000 / kFps;
|
||||
timing.IncomingTimestamp(timestamp, clock.TimeInMilliseconds());
|
||||
}
|
||||
timing.UpdateCurrentDelay(timeStamp);
|
||||
waitTime = timing.MaxWaitingTime(
|
||||
timing.RenderTimeMs(timeStamp, clock.TimeInMilliseconds()),
|
||||
timing.UpdateCurrentDelay(timestamp);
|
||||
wait_time_ms = timing.MaxWaitingTime(
|
||||
timing.RenderTimeMs(timestamp, clock.TimeInMilliseconds()),
|
||||
clock.TimeInMilliseconds());
|
||||
EXPECT_EQ(waitTime, jitterDelayMs);
|
||||
EXPECT_EQ(jitter_delay_ms, wait_time_ms);
|
||||
|
||||
// Add decode time estimates.
|
||||
for (int i = 0; i < 10; i++) {
|
||||
int64_t startTimeMs = clock.TimeInMilliseconds();
|
||||
clock.AdvanceTimeMilliseconds(10);
|
||||
// Add decode time estimates for 1 second.
|
||||
const uint32_t kDecodeTimeMs = 10;
|
||||
for (int i = 0; i < kFps; i++) {
|
||||
clock.AdvanceTimeMilliseconds(kDecodeTimeMs);
|
||||
timing.StopDecodeTimer(
|
||||
timeStamp, clock.TimeInMilliseconds() - startTimeMs,
|
||||
clock.TimeInMilliseconds(),
|
||||
timing.RenderTimeMs(timeStamp, clock.TimeInMilliseconds()));
|
||||
timeStamp += 90000 / 25;
|
||||
clock.AdvanceTimeMilliseconds(1000 / 25 - 10);
|
||||
timing.IncomingTimestamp(timeStamp, clock.TimeInMilliseconds());
|
||||
timestamp, kDecodeTimeMs, clock.TimeInMilliseconds(),
|
||||
timing.RenderTimeMs(timestamp, clock.TimeInMilliseconds()));
|
||||
timestamp += 90000 / kFps;
|
||||
clock.AdvanceTimeMilliseconds(1000 / kFps - kDecodeTimeMs);
|
||||
timing.IncomingTimestamp(timestamp, clock.TimeInMilliseconds());
|
||||
}
|
||||
requiredDecodeTimeMs = 10;
|
||||
timing.SetJitterDelay(jitterDelayMs);
|
||||
clock.AdvanceTimeMilliseconds(1000);
|
||||
timeStamp += 90000;
|
||||
timing.UpdateCurrentDelay(timeStamp);
|
||||
waitTime = timing.MaxWaitingTime(
|
||||
timing.RenderTimeMs(timeStamp, clock.TimeInMilliseconds()),
|
||||
timing.UpdateCurrentDelay(timestamp);
|
||||
wait_time_ms = timing.MaxWaitingTime(
|
||||
timing.RenderTimeMs(timestamp, clock.TimeInMilliseconds()),
|
||||
clock.TimeInMilliseconds());
|
||||
EXPECT_EQ(waitTime, jitterDelayMs);
|
||||
EXPECT_EQ(jitter_delay_ms, wait_time_ms);
|
||||
|
||||
int minTotalDelayMs = 200;
|
||||
timing.set_min_playout_delay(minTotalDelayMs);
|
||||
const int kMinTotalDelayMs = 200;
|
||||
timing.set_min_playout_delay(kMinTotalDelayMs);
|
||||
clock.AdvanceTimeMilliseconds(5000);
|
||||
timeStamp += 5 * 90000;
|
||||
timing.UpdateCurrentDelay(timeStamp);
|
||||
timestamp += 5 * 90000;
|
||||
timing.UpdateCurrentDelay(timestamp);
|
||||
const int kRenderDelayMs = 10;
|
||||
timing.set_render_delay(kRenderDelayMs);
|
||||
waitTime = timing.MaxWaitingTime(
|
||||
timing.RenderTimeMs(timeStamp, clock.TimeInMilliseconds()),
|
||||
wait_time_ms = timing.MaxWaitingTime(
|
||||
timing.RenderTimeMs(timestamp, clock.TimeInMilliseconds()),
|
||||
clock.TimeInMilliseconds());
|
||||
// We should at least have minTotalDelayMs - decodeTime (10) - renderTime
|
||||
// We should at least have kMinTotalDelayMs - decodeTime (10) - renderTime
|
||||
// (10) to wait.
|
||||
EXPECT_EQ(waitTime, minTotalDelayMs - requiredDecodeTimeMs - kRenderDelayMs);
|
||||
EXPECT_EQ(kMinTotalDelayMs - kDecodeTimeMs - kRenderDelayMs, wait_time_ms);
|
||||
// The total video delay should be equal to the min total delay.
|
||||
EXPECT_EQ(minTotalDelayMs, timing.TargetVideoDelay());
|
||||
EXPECT_EQ(kMinTotalDelayMs, timing.TargetVideoDelay());
|
||||
|
||||
// Reset playout delay.
|
||||
timing.set_min_playout_delay(0);
|
||||
clock.AdvanceTimeMilliseconds(5000);
|
||||
timeStamp += 5 * 90000;
|
||||
timing.UpdateCurrentDelay(timeStamp);
|
||||
timestamp += 5 * 90000;
|
||||
timing.UpdateCurrentDelay(timestamp);
|
||||
}
|
||||
|
||||
TEST(ReceiverTiming, WrapAround) {
|
||||
const int kFramerate = 25;
|
||||
SimulatedClock clock(0);
|
||||
VCMTiming timing(&clock);
|
||||
// Provoke a wrap-around. The forth frame will have wrapped at 25 fps.
|
||||
uint32_t timestamp = 0xFFFFFFFFu - 3 * 90000 / kFramerate;
|
||||
for (int i = 0; i < 4; ++i) {
|
||||
// Provoke a wrap-around. The fifth frame will have wrapped at 25 fps.
|
||||
uint32_t timestamp = 0xFFFFFFFFu - 3 * 90000 / kFps;
|
||||
for (int i = 0; i < 5; ++i) {
|
||||
timing.IncomingTimestamp(timestamp, clock.TimeInMilliseconds());
|
||||
clock.AdvanceTimeMilliseconds(1000 / kFramerate);
|
||||
timestamp += 90000 / kFramerate;
|
||||
int64_t render_time =
|
||||
timing.RenderTimeMs(0xFFFFFFFFu, clock.TimeInMilliseconds());
|
||||
EXPECT_EQ(3 * 1000 / kFramerate, render_time);
|
||||
render_time = timing.RenderTimeMs(89u, // One second later in 90 kHz.
|
||||
clock.TimeInMilliseconds());
|
||||
EXPECT_EQ(3 * 1000 / kFramerate + 1, render_time);
|
||||
clock.AdvanceTimeMilliseconds(1000 / kFps);
|
||||
timestamp += 90000 / kFps;
|
||||
EXPECT_EQ(3 * 1000 / kFps,
|
||||
timing.RenderTimeMs(0xFFFFFFFFu, clock.TimeInMilliseconds()));
|
||||
EXPECT_EQ(3 * 1000 / kFps + 1,
|
||||
timing.RenderTimeMs(89u, // One ms later in 90 kHz.
|
||||
clock.TimeInMilliseconds()));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user