StreamSynchronizationTest: Replace class Time with SimulatedClock.
Remove unused constants and variables. Bug: none Change-Id: I7336bbe5bfecbaaf646c9704e4f75532629754d1 Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/159944 Reviewed-by: Rasmus Brandt <brandtr@webrtc.org> Commit-Queue: Åsa Persson <asapersson@webrtc.org> Cr-Commit-Position: refs/heads/master@{#29826}
This commit is contained in:
@ -12,65 +12,24 @@
|
||||
|
||||
#include <algorithm>
|
||||
|
||||
#include "system_wrappers/include/clock.h"
|
||||
#include "system_wrappers/include/ntp_time.h"
|
||||
#include "test/gtest.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// These correspond to the same constants defined in vie_sync_module.cc.
|
||||
enum { kMaxVideoDiffMs = 80 };
|
||||
enum { kMaxAudioDiffMs = 80 };
|
||||
enum { kMaxDelay = 1500 };
|
||||
|
||||
// Test constants.
|
||||
enum { kDefaultAudioFrequency = 8000 };
|
||||
enum { kDefaultVideoFrequency = 90000 };
|
||||
const double kNtpFracPerMs = 4.294967296E6;
|
||||
static const int kSmoothingFilter = 4 * 2;
|
||||
|
||||
class Time {
|
||||
public:
|
||||
explicit Time(int64_t offset)
|
||||
: kNtpJan1970(2208988800UL), time_now_ms_(offset) {}
|
||||
|
||||
NtpTime GetNowNtp() const {
|
||||
uint32_t ntp_secs = time_now_ms_ / 1000 + kNtpJan1970;
|
||||
int64_t remainder_ms = time_now_ms_ % 1000;
|
||||
uint32_t ntp_frac = static_cast<uint32_t>(
|
||||
static_cast<double>(remainder_ms) * kNtpFracPerMs + 0.5);
|
||||
return NtpTime(ntp_secs, ntp_frac);
|
||||
}
|
||||
|
||||
uint32_t GetNowRtp(int frequency, uint32_t offset) const {
|
||||
return frequency * time_now_ms_ / 1000 + offset;
|
||||
}
|
||||
|
||||
void IncreaseTimeMs(int64_t inc) { time_now_ms_ += inc; }
|
||||
|
||||
int64_t time_now_ms() const { return time_now_ms_; }
|
||||
|
||||
private:
|
||||
// January 1970, in NTP seconds.
|
||||
const uint32_t kNtpJan1970;
|
||||
int64_t time_now_ms_;
|
||||
};
|
||||
namespace {
|
||||
constexpr int kMaxAudioDiffMs = 80; // From stream_synchronization.cc
|
||||
constexpr int kDefaultAudioFrequency = 8000;
|
||||
constexpr int kDefaultVideoFrequency = 90000;
|
||||
constexpr int kSmoothingFilter = 4 * 2;
|
||||
} // namespace
|
||||
|
||||
class StreamSynchronizationTest : public ::testing::Test {
|
||||
public:
|
||||
StreamSynchronizationTest()
|
||||
: sync_(0, 0), clock_sender_(98765000), clock_receiver_(43210000) {}
|
||||
|
||||
protected:
|
||||
virtual void SetUp() {
|
||||
sync_ = new StreamSynchronization(0, 0);
|
||||
send_time_ = new Time(kSendTimeOffsetMs);
|
||||
receive_time_ = new Time(kReceiveTimeOffsetMs);
|
||||
audio_clock_drift_ = 1.0;
|
||||
video_clock_drift_ = 1.0;
|
||||
}
|
||||
|
||||
virtual void TearDown() {
|
||||
delete sync_;
|
||||
delete send_time_;
|
||||
delete receive_time_;
|
||||
}
|
||||
|
||||
// Generates the necessary RTCP measurements and RTP timestamps and computes
|
||||
// the audio and video delays needed to get the two streams in sync.
|
||||
// |audio_delay_ms| and |video_delay_ms| are the number of milliseconds after
|
||||
@ -84,66 +43,64 @@ class StreamSynchronizationTest : public ::testing::Test {
|
||||
int* total_video_delay_ms) {
|
||||
int audio_frequency =
|
||||
static_cast<int>(kDefaultAudioFrequency * audio_clock_drift_ + 0.5);
|
||||
int audio_offset = 0;
|
||||
int video_frequency =
|
||||
static_cast<int>(kDefaultVideoFrequency * video_clock_drift_ + 0.5);
|
||||
|
||||
// Generate NTP/RTP timestamp pair for both streams corresponding to RTCP.
|
||||
bool new_sr;
|
||||
int video_offset = 0;
|
||||
StreamSynchronization::Measurements audio;
|
||||
StreamSynchronization::Measurements video;
|
||||
// Generate NTP/RTP timestamp pair for both streams corresponding to RTCP.
|
||||
NtpTime ntp_time = send_time_->GetNowNtp();
|
||||
NtpTime ntp_time = clock_sender_.CurrentNtpTime();
|
||||
uint32_t rtp_timestamp =
|
||||
send_time_->GetNowRtp(audio_frequency, audio_offset);
|
||||
clock_sender_.CurrentTime().ms() * audio_frequency / 1000;
|
||||
EXPECT_TRUE(audio.rtp_to_ntp.UpdateMeasurements(
|
||||
ntp_time.seconds(), ntp_time.fractions(), rtp_timestamp, &new_sr));
|
||||
send_time_->IncreaseTimeMs(100);
|
||||
receive_time_->IncreaseTimeMs(100);
|
||||
ntp_time = send_time_->GetNowNtp();
|
||||
rtp_timestamp = send_time_->GetNowRtp(video_frequency, video_offset);
|
||||
clock_sender_.AdvanceTimeMilliseconds(100);
|
||||
clock_receiver_.AdvanceTimeMilliseconds(100);
|
||||
ntp_time = clock_sender_.CurrentNtpTime();
|
||||
rtp_timestamp = clock_sender_.CurrentTime().ms() * video_frequency / 1000;
|
||||
EXPECT_TRUE(video.rtp_to_ntp.UpdateMeasurements(
|
||||
ntp_time.seconds(), ntp_time.fractions(), rtp_timestamp, &new_sr));
|
||||
send_time_->IncreaseTimeMs(900);
|
||||
receive_time_->IncreaseTimeMs(900);
|
||||
ntp_time = send_time_->GetNowNtp();
|
||||
rtp_timestamp = send_time_->GetNowRtp(audio_frequency, audio_offset);
|
||||
clock_sender_.AdvanceTimeMilliseconds(900);
|
||||
clock_receiver_.AdvanceTimeMilliseconds(900);
|
||||
ntp_time = clock_sender_.CurrentNtpTime();
|
||||
rtp_timestamp = clock_sender_.CurrentTime().ms() * audio_frequency / 1000;
|
||||
EXPECT_TRUE(audio.rtp_to_ntp.UpdateMeasurements(
|
||||
ntp_time.seconds(), ntp_time.fractions(), rtp_timestamp, &new_sr));
|
||||
send_time_->IncreaseTimeMs(100);
|
||||
receive_time_->IncreaseTimeMs(100);
|
||||
ntp_time = send_time_->GetNowNtp();
|
||||
rtp_timestamp = send_time_->GetNowRtp(video_frequency, video_offset);
|
||||
clock_sender_.AdvanceTimeMilliseconds(100);
|
||||
clock_receiver_.AdvanceTimeMilliseconds(100);
|
||||
ntp_time = clock_sender_.CurrentNtpTime();
|
||||
rtp_timestamp = clock_sender_.CurrentTime().ms() * video_frequency / 1000;
|
||||
EXPECT_TRUE(video.rtp_to_ntp.UpdateMeasurements(
|
||||
ntp_time.seconds(), ntp_time.fractions(), rtp_timestamp, &new_sr));
|
||||
|
||||
send_time_->IncreaseTimeMs(900);
|
||||
receive_time_->IncreaseTimeMs(900);
|
||||
clock_sender_.AdvanceTimeMilliseconds(900);
|
||||
clock_receiver_.AdvanceTimeMilliseconds(900);
|
||||
|
||||
// Capture an audio and a video frame at the same time.
|
||||
audio.latest_timestamp =
|
||||
send_time_->GetNowRtp(audio_frequency, audio_offset);
|
||||
clock_sender_.CurrentTime().ms() * audio_frequency / 1000;
|
||||
video.latest_timestamp =
|
||||
send_time_->GetNowRtp(video_frequency, video_offset);
|
||||
clock_sender_.CurrentTime().ms() * video_frequency / 1000;
|
||||
|
||||
if (audio_delay_ms > video_delay_ms) {
|
||||
// Audio later than video.
|
||||
receive_time_->IncreaseTimeMs(video_delay_ms);
|
||||
video.latest_receive_time_ms = receive_time_->time_now_ms();
|
||||
receive_time_->IncreaseTimeMs(audio_delay_ms - video_delay_ms);
|
||||
audio.latest_receive_time_ms = receive_time_->time_now_ms();
|
||||
clock_receiver_.AdvanceTimeMilliseconds(video_delay_ms);
|
||||
video.latest_receive_time_ms = clock_receiver_.CurrentTime().ms();
|
||||
clock_receiver_.AdvanceTimeMilliseconds(audio_delay_ms - video_delay_ms);
|
||||
audio.latest_receive_time_ms = clock_receiver_.CurrentTime().ms();
|
||||
} else {
|
||||
// Video later than audio.
|
||||
receive_time_->IncreaseTimeMs(audio_delay_ms);
|
||||
audio.latest_receive_time_ms = receive_time_->time_now_ms();
|
||||
receive_time_->IncreaseTimeMs(video_delay_ms - audio_delay_ms);
|
||||
video.latest_receive_time_ms = receive_time_->time_now_ms();
|
||||
clock_receiver_.AdvanceTimeMilliseconds(audio_delay_ms);
|
||||
audio.latest_receive_time_ms = clock_receiver_.CurrentTime().ms();
|
||||
clock_receiver_.AdvanceTimeMilliseconds(video_delay_ms - audio_delay_ms);
|
||||
video.latest_receive_time_ms = clock_receiver_.CurrentTime().ms();
|
||||
}
|
||||
int relative_delay_ms;
|
||||
StreamSynchronization::ComputeRelativeDelay(audio, video,
|
||||
&relative_delay_ms);
|
||||
EXPECT_EQ(video_delay_ms - audio_delay_ms, relative_delay_ms);
|
||||
return sync_->ComputeDelays(relative_delay_ms, current_audio_delay_ms,
|
||||
extra_audio_delay_ms, total_video_delay_ms);
|
||||
return sync_.ComputeDelays(relative_delay_ms, current_audio_delay_ms,
|
||||
extra_audio_delay_ms, total_video_delay_ms);
|
||||
}
|
||||
|
||||
// Simulate audio playback 300 ms after capture and video rendering 100 ms
|
||||
@ -170,9 +127,9 @@ class StreamSynchronizationTest : public ::testing::Test {
|
||||
EXPECT_EQ(base_target_delay, extra_audio_delay_ms);
|
||||
current_audio_delay_ms = extra_audio_delay_ms;
|
||||
|
||||
send_time_->IncreaseTimeMs(1000);
|
||||
receive_time_->IncreaseTimeMs(1000 -
|
||||
std::max(audio_delay_ms, video_delay_ms));
|
||||
clock_sender_.AdvanceTimeMilliseconds(1000);
|
||||
clock_receiver_.AdvanceTimeMilliseconds(
|
||||
1000 - std::max(audio_delay_ms, video_delay_ms));
|
||||
// Simulate base_target_delay minimum delay in the VCM.
|
||||
total_video_delay_ms = base_target_delay;
|
||||
EXPECT_TRUE(DelayedStreams(audio_delay_ms, video_delay_ms,
|
||||
@ -182,9 +139,9 @@ class StreamSynchronizationTest : public ::testing::Test {
|
||||
EXPECT_EQ(base_target_delay, extra_audio_delay_ms);
|
||||
current_audio_delay_ms = extra_audio_delay_ms;
|
||||
|
||||
send_time_->IncreaseTimeMs(1000);
|
||||
receive_time_->IncreaseTimeMs(1000 -
|
||||
std::max(audio_delay_ms, video_delay_ms));
|
||||
clock_sender_.AdvanceTimeMilliseconds(1000);
|
||||
clock_receiver_.AdvanceTimeMilliseconds(
|
||||
1000 - std::max(audio_delay_ms, video_delay_ms));
|
||||
// Simulate base_target_delay minimum delay in the VCM.
|
||||
total_video_delay_ms = base_target_delay;
|
||||
EXPECT_TRUE(DelayedStreams(audio_delay_ms, video_delay_ms,
|
||||
@ -195,9 +152,9 @@ class StreamSynchronizationTest : public ::testing::Test {
|
||||
|
||||
// Simulate that NetEQ introduces some audio delay.
|
||||
current_audio_delay_ms = base_target_delay + kNeteqDelayIncrease;
|
||||
send_time_->IncreaseTimeMs(1000);
|
||||
receive_time_->IncreaseTimeMs(1000 -
|
||||
std::max(audio_delay_ms, video_delay_ms));
|
||||
clock_sender_.AdvanceTimeMilliseconds(1000);
|
||||
clock_receiver_.AdvanceTimeMilliseconds(
|
||||
1000 - std::max(audio_delay_ms, video_delay_ms));
|
||||
// Simulate base_target_delay minimum delay in the VCM.
|
||||
total_video_delay_ms = base_target_delay;
|
||||
EXPECT_TRUE(DelayedStreams(audio_delay_ms, video_delay_ms,
|
||||
@ -211,9 +168,9 @@ class StreamSynchronizationTest : public ::testing::Test {
|
||||
|
||||
// Simulate that NetEQ reduces its delay.
|
||||
current_audio_delay_ms = base_target_delay + kNeteqDelayDecrease;
|
||||
send_time_->IncreaseTimeMs(1000);
|
||||
receive_time_->IncreaseTimeMs(1000 -
|
||||
std::max(audio_delay_ms, video_delay_ms));
|
||||
clock_sender_.AdvanceTimeMilliseconds(1000);
|
||||
clock_receiver_.AdvanceTimeMilliseconds(
|
||||
1000 - std::max(audio_delay_ms, video_delay_ms));
|
||||
// Simulate base_target_delay minimum delay in the VCM.
|
||||
total_video_delay_ms = base_target_delay;
|
||||
EXPECT_TRUE(DelayedStreams(audio_delay_ms, video_delay_ms,
|
||||
@ -244,8 +201,8 @@ class StreamSynchronizationTest : public ::testing::Test {
|
||||
current_audio_delay_ms = extra_audio_delay_ms;
|
||||
int current_extra_delay_ms = extra_audio_delay_ms;
|
||||
|
||||
send_time_->IncreaseTimeMs(1000);
|
||||
receive_time_->IncreaseTimeMs(800);
|
||||
clock_sender_.AdvanceTimeMilliseconds(1000);
|
||||
clock_receiver_.AdvanceTimeMilliseconds(800);
|
||||
EXPECT_TRUE(DelayedStreams(audio_delay_ms, video_delay_ms,
|
||||
current_audio_delay_ms, &extra_audio_delay_ms,
|
||||
&total_video_delay_ms));
|
||||
@ -260,8 +217,8 @@ class StreamSynchronizationTest : public ::testing::Test {
|
||||
current_audio_delay_ms = extra_audio_delay_ms;
|
||||
current_extra_delay_ms = extra_audio_delay_ms;
|
||||
|
||||
send_time_->IncreaseTimeMs(1000);
|
||||
receive_time_->IncreaseTimeMs(800);
|
||||
clock_sender_.AdvanceTimeMilliseconds(1000);
|
||||
clock_receiver_.AdvanceTimeMilliseconds(800);
|
||||
EXPECT_TRUE(DelayedStreams(audio_delay_ms, video_delay_ms,
|
||||
current_audio_delay_ms, &extra_audio_delay_ms,
|
||||
&total_video_delay_ms));
|
||||
@ -277,8 +234,8 @@ class StreamSynchronizationTest : public ::testing::Test {
|
||||
|
||||
// Simulate that NetEQ for some reason reduced the delay.
|
||||
current_audio_delay_ms = base_target_delay + 10;
|
||||
send_time_->IncreaseTimeMs(1000);
|
||||
receive_time_->IncreaseTimeMs(800);
|
||||
clock_sender_.AdvanceTimeMilliseconds(1000);
|
||||
clock_receiver_.AdvanceTimeMilliseconds(800);
|
||||
EXPECT_TRUE(DelayedStreams(audio_delay_ms, video_delay_ms,
|
||||
current_audio_delay_ms, &extra_audio_delay_ms,
|
||||
&total_video_delay_ms));
|
||||
@ -295,8 +252,8 @@ class StreamSynchronizationTest : public ::testing::Test {
|
||||
|
||||
// Simulate that NetEQ for some reason significantly increased the delay.
|
||||
current_audio_delay_ms = base_target_delay + 350;
|
||||
send_time_->IncreaseTimeMs(1000);
|
||||
receive_time_->IncreaseTimeMs(800);
|
||||
clock_sender_.AdvanceTimeMilliseconds(1000);
|
||||
clock_receiver_.AdvanceTimeMilliseconds(800);
|
||||
EXPECT_TRUE(DelayedStreams(audio_delay_ms, video_delay_ms,
|
||||
current_audio_delay_ms, &extra_audio_delay_ms,
|
||||
&total_video_delay_ms));
|
||||
@ -312,7 +269,7 @@ class StreamSynchronizationTest : public ::testing::Test {
|
||||
|
||||
int MaxAudioDelayIncrease(int current_audio_delay_ms, int delay_ms) {
|
||||
return std::min((delay_ms - current_audio_delay_ms) / kSmoothingFilter,
|
||||
static_cast<int>(kMaxAudioDiffMs));
|
||||
kMaxAudioDiffMs);
|
||||
}
|
||||
|
||||
int MaxAudioDelayDecrease(int current_audio_delay_ms, int delay_ms) {
|
||||
@ -320,14 +277,11 @@ class StreamSynchronizationTest : public ::testing::Test {
|
||||
-kMaxAudioDiffMs);
|
||||
}
|
||||
|
||||
enum { kSendTimeOffsetMs = 98765 };
|
||||
enum { kReceiveTimeOffsetMs = 43210 };
|
||||
|
||||
StreamSynchronization* sync_;
|
||||
Time* send_time_; // The simulated clock at the sender.
|
||||
Time* receive_time_; // The simulated clock at the receiver.
|
||||
double audio_clock_drift_;
|
||||
double video_clock_drift_;
|
||||
StreamSynchronization sync_;
|
||||
SimulatedClock clock_sender_;
|
||||
SimulatedClock clock_receiver_;
|
||||
double audio_clock_drift_ = 1.0;
|
||||
double video_clock_drift_ = 1.0;
|
||||
};
|
||||
|
||||
TEST_F(StreamSynchronizationTest, NoDelay) {
|
||||
@ -353,8 +307,8 @@ TEST_F(StreamSynchronizationTest, VideoDelay) {
|
||||
// The video delay is not allowed to change more than this in 1 second.
|
||||
EXPECT_EQ(delay_ms / kSmoothingFilter, total_video_delay_ms);
|
||||
|
||||
send_time_->IncreaseTimeMs(1000);
|
||||
receive_time_->IncreaseTimeMs(800);
|
||||
clock_sender_.AdvanceTimeMilliseconds(1000);
|
||||
clock_receiver_.AdvanceTimeMilliseconds(800);
|
||||
// Simulate 0 minimum delay in the VCM.
|
||||
total_video_delay_ms = 0;
|
||||
EXPECT_TRUE(DelayedStreams(delay_ms, 0, current_audio_delay_ms,
|
||||
@ -363,8 +317,8 @@ TEST_F(StreamSynchronizationTest, VideoDelay) {
|
||||
// The video delay is not allowed to change more than this in 1 second.
|
||||
EXPECT_EQ(2 * delay_ms / kSmoothingFilter, total_video_delay_ms);
|
||||
|
||||
send_time_->IncreaseTimeMs(1000);
|
||||
receive_time_->IncreaseTimeMs(800);
|
||||
clock_sender_.AdvanceTimeMilliseconds(1000);
|
||||
clock_receiver_.AdvanceTimeMilliseconds(800);
|
||||
// Simulate 0 minimum delay in the VCM.
|
||||
total_video_delay_ms = 0;
|
||||
EXPECT_TRUE(DelayedStreams(delay_ms, 0, current_audio_delay_ms,
|
||||
@ -387,8 +341,8 @@ TEST_F(StreamSynchronizationTest, AudioDelay) {
|
||||
current_audio_delay_ms = extra_audio_delay_ms;
|
||||
int current_extra_delay_ms = extra_audio_delay_ms;
|
||||
|
||||
send_time_->IncreaseTimeMs(1000);
|
||||
receive_time_->IncreaseTimeMs(800);
|
||||
clock_sender_.AdvanceTimeMilliseconds(1000);
|
||||
clock_receiver_.AdvanceTimeMilliseconds(800);
|
||||
EXPECT_TRUE(DelayedStreams(0, delay_ms, current_audio_delay_ms,
|
||||
&extra_audio_delay_ms, &total_video_delay_ms));
|
||||
EXPECT_EQ(0, total_video_delay_ms);
|
||||
@ -400,8 +354,8 @@ TEST_F(StreamSynchronizationTest, AudioDelay) {
|
||||
current_audio_delay_ms = extra_audio_delay_ms;
|
||||
current_extra_delay_ms = extra_audio_delay_ms;
|
||||
|
||||
send_time_->IncreaseTimeMs(1000);
|
||||
receive_time_->IncreaseTimeMs(800);
|
||||
clock_sender_.AdvanceTimeMilliseconds(1000);
|
||||
clock_receiver_.AdvanceTimeMilliseconds(800);
|
||||
EXPECT_TRUE(DelayedStreams(0, delay_ms, current_audio_delay_ms,
|
||||
&extra_audio_delay_ms, &total_video_delay_ms));
|
||||
EXPECT_EQ(0, total_video_delay_ms);
|
||||
@ -414,8 +368,8 @@ TEST_F(StreamSynchronizationTest, AudioDelay) {
|
||||
|
||||
// Simulate that NetEQ for some reason reduced the delay.
|
||||
current_audio_delay_ms = 10;
|
||||
send_time_->IncreaseTimeMs(1000);
|
||||
receive_time_->IncreaseTimeMs(800);
|
||||
clock_sender_.AdvanceTimeMilliseconds(1000);
|
||||
clock_receiver_.AdvanceTimeMilliseconds(800);
|
||||
EXPECT_TRUE(DelayedStreams(0, delay_ms, current_audio_delay_ms,
|
||||
&extra_audio_delay_ms, &total_video_delay_ms));
|
||||
EXPECT_EQ(0, total_video_delay_ms);
|
||||
@ -429,8 +383,8 @@ TEST_F(StreamSynchronizationTest, AudioDelay) {
|
||||
|
||||
// Simulate that NetEQ for some reason significantly increased the delay.
|
||||
current_audio_delay_ms = 350;
|
||||
send_time_->IncreaseTimeMs(1000);
|
||||
receive_time_->IncreaseTimeMs(800);
|
||||
clock_sender_.AdvanceTimeMilliseconds(1000);
|
||||
clock_receiver_.AdvanceTimeMilliseconds(800);
|
||||
EXPECT_TRUE(DelayedStreams(0, delay_ms, current_audio_delay_ms,
|
||||
&extra_audio_delay_ms, &total_video_delay_ms));
|
||||
EXPECT_EQ(0, total_video_delay_ms);
|
||||
@ -474,7 +428,7 @@ TEST_F(StreamSynchronizationTest, BaseDelay) {
|
||||
int current_audio_delay_ms = 2000;
|
||||
int extra_audio_delay_ms = 0;
|
||||
int total_video_delay_ms = base_target_delay_ms;
|
||||
sync_->SetTargetBufferingDelay(base_target_delay_ms);
|
||||
sync_.SetTargetBufferingDelay(base_target_delay_ms);
|
||||
// We are in sync don't change.
|
||||
EXPECT_FALSE(DelayedStreams(base_target_delay_ms, base_target_delay_ms,
|
||||
current_audio_delay_ms, &extra_audio_delay_ms,
|
||||
@ -483,7 +437,7 @@ TEST_F(StreamSynchronizationTest, BaseDelay) {
|
||||
base_target_delay_ms = 2000;
|
||||
current_audio_delay_ms = base_target_delay_ms;
|
||||
total_video_delay_ms = base_target_delay_ms;
|
||||
sync_->SetTargetBufferingDelay(base_target_delay_ms);
|
||||
sync_.SetTargetBufferingDelay(base_target_delay_ms);
|
||||
// We are in sync don't change.
|
||||
EXPECT_FALSE(DelayedStreams(base_target_delay_ms, base_target_delay_ms,
|
||||
current_audio_delay_ms, &extra_audio_delay_ms,
|
||||
@ -493,7 +447,7 @@ TEST_F(StreamSynchronizationTest, BaseDelay) {
|
||||
base_target_delay_ms = 5000;
|
||||
current_audio_delay_ms = base_target_delay_ms;
|
||||
total_video_delay_ms = base_target_delay_ms;
|
||||
sync_->SetTargetBufferingDelay(base_target_delay_ms);
|
||||
sync_.SetTargetBufferingDelay(base_target_delay_ms);
|
||||
// We are in sync don't change.
|
||||
EXPECT_FALSE(DelayedStreams(base_target_delay_ms, base_target_delay_ms,
|
||||
current_audio_delay_ms, &extra_audio_delay_ms,
|
||||
@ -502,27 +456,27 @@ TEST_F(StreamSynchronizationTest, BaseDelay) {
|
||||
|
||||
TEST_F(StreamSynchronizationTest, BothDelayedAudioLaterWithBaseDelay) {
|
||||
int base_target_delay_ms = 3000;
|
||||
sync_->SetTargetBufferingDelay(base_target_delay_ms);
|
||||
sync_.SetTargetBufferingDelay(base_target_delay_ms);
|
||||
BothDelayedAudioLaterTest(base_target_delay_ms);
|
||||
}
|
||||
|
||||
TEST_F(StreamSynchronizationTest, BothDelayedAudioClockDriftWithBaseDelay) {
|
||||
int base_target_delay_ms = 3000;
|
||||
sync_->SetTargetBufferingDelay(base_target_delay_ms);
|
||||
sync_.SetTargetBufferingDelay(base_target_delay_ms);
|
||||
audio_clock_drift_ = 1.05;
|
||||
BothDelayedAudioLaterTest(base_target_delay_ms);
|
||||
}
|
||||
|
||||
TEST_F(StreamSynchronizationTest, BothDelayedVideoClockDriftWithBaseDelay) {
|
||||
int base_target_delay_ms = 3000;
|
||||
sync_->SetTargetBufferingDelay(base_target_delay_ms);
|
||||
sync_.SetTargetBufferingDelay(base_target_delay_ms);
|
||||
video_clock_drift_ = 1.05;
|
||||
BothDelayedAudioLaterTest(base_target_delay_ms);
|
||||
}
|
||||
|
||||
TEST_F(StreamSynchronizationTest, BothDelayedVideoLaterWithBaseDelay) {
|
||||
int base_target_delay_ms = 2000;
|
||||
sync_->SetTargetBufferingDelay(base_target_delay_ms);
|
||||
sync_.SetTargetBufferingDelay(base_target_delay_ms);
|
||||
BothDelayedVideoLaterTest(base_target_delay_ms);
|
||||
}
|
||||
|
||||
@ -530,7 +484,7 @@ TEST_F(StreamSynchronizationTest,
|
||||
BothDelayedVideoLaterAudioClockDriftWithBaseDelay) {
|
||||
int base_target_delay_ms = 2000;
|
||||
audio_clock_drift_ = 1.05;
|
||||
sync_->SetTargetBufferingDelay(base_target_delay_ms);
|
||||
sync_.SetTargetBufferingDelay(base_target_delay_ms);
|
||||
BothDelayedVideoLaterTest(base_target_delay_ms);
|
||||
}
|
||||
|
||||
@ -538,7 +492,7 @@ TEST_F(StreamSynchronizationTest,
|
||||
BothDelayedVideoLaterVideoClockDriftWithBaseDelay) {
|
||||
int base_target_delay_ms = 2000;
|
||||
video_clock_drift_ = 1.05;
|
||||
sync_->SetTargetBufferingDelay(base_target_delay_ms);
|
||||
sync_.SetTargetBufferingDelay(base_target_delay_ms);
|
||||
BothDelayedVideoLaterTest(base_target_delay_ms);
|
||||
}
|
||||
|
||||
|
Reference in New Issue
Block a user