VCMCodecTimer: Change filter from max to 95th percentile

The purpose with this change is to make the filter more robust against anomalies. googMaxDecodeMs is expected to drop a litte by this.

BUG=b/27306053

Review URL: https://codereview.webrtc.org/1742323002

Cr-Commit-Position: refs/heads/master@{#11952}
This commit is contained in:
magjed
2016-03-11 02:15:07 -08:00
committed by Commit bot
parent 43166b8adf
commit 4bf0c71774
11 changed files with 306 additions and 119 deletions

View File

@ -29,7 +29,7 @@ TEST(ReceiverTiming, Tests) {
VCMTiming timing(&clock);
uint32_t waitTime = 0;
uint32_t jitterDelayMs = 0;
uint32_t maxDecodeTimeMs = 0;
uint32_t requiredDecodeTimeMs = 0;
uint32_t timeStamp = 0;
timing.Reset();
@ -94,7 +94,7 @@ TEST(ReceiverTiming, Tests) {
clock.AdvanceTimeMilliseconds(1000 / 25 - 10);
timing.IncomingTimestamp(timeStamp, clock.TimeInMilliseconds());
}
maxDecodeTimeMs = 10;
requiredDecodeTimeMs = 10;
timing.SetJitterDelay(jitterDelayMs);
clock.AdvanceTimeMilliseconds(1000);
timeStamp += 90000;
@ -116,7 +116,7 @@ TEST(ReceiverTiming, Tests) {
clock.TimeInMilliseconds());
// We should at least have minTotalDelayMs - decodeTime (10) - renderTime
// (10) to wait.
EXPECT_EQ(waitTime, minTotalDelayMs - maxDecodeTimeMs - kRenderDelayMs);
EXPECT_EQ(waitTime, minTotalDelayMs - requiredDecodeTimeMs - kRenderDelayMs);
// The total video delay should be equal to the min total delay.
EXPECT_EQ(minTotalDelayMs, timing.TargetVideoDelay());