Added configuration of max delay to ACM and NetEq

R=turaj@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/1964004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@4499 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
pwestin@webrtc.org
2013-08-06 21:01:36 +00:00
parent c883fdc273
commit 401ef361ac
11 changed files with 123 additions and 24 deletions

View File

@ -660,6 +660,19 @@ class AudioCodingModule: public Module {
//
virtual int SetMinimumPlayoutDelay(int time_ms) = 0;
///////////////////////////////////////////////////////////////////////////
// int SetMaximumPlayoutDelay()
// Set a maximum for the playout delay
//
// Input:
// -time_ms : maximum delay in milliseconds.
//
// Return value:
// -1 if failed to set the delay,
// 0 if the maximum delay is set.
//
virtual int SetMaximumPlayoutDelay(int time_ms) = 0;
//
// The shortest latency, in milliseconds, required by jitter buffer. This
// is computed based on inter-arrival times and playout mode of NetEq. The

View File

@ -49,7 +49,8 @@ ACMNetEQ::ACMNetEQ()
min_of_buffer_size_bytes_(0),
per_packet_overhead_bytes_(0),
av_sync_(false),
minimum_delay_ms_(0) {
minimum_delay_ms_(0),
maximum_delay_ms_(0) {
for (int n = 0; n < MAX_NUM_SLAVE_NETEQ + 1; n++) {
is_initialized_[n] = false;
ptr_vadinst_[n] = NULL;
@ -1074,6 +1075,10 @@ int16_t ACMNetEQ::AddSlave(const WebRtcNetEQDecoder* used_codecs,
// Set minimum delay.
if (minimum_delay_ms_ > 0)
WebRtcNetEQ_SetMinimumDelay(inst_[slave_idx], minimum_delay_ms_);
// Set maximum delay.
if (maximum_delay_ms_ > 0)
WebRtcNetEQ_SetMaximumDelay(inst_[slave_idx], maximum_delay_ms_);
}
return 0;
@ -1109,6 +1114,17 @@ int ACMNetEQ::SetMinimumDelay(int minimum_delay_ms) {
return 0;
}
int ACMNetEQ::SetMaximumDelay(int maximum_delay_ms) {
CriticalSectionScoped lock(neteq_crit_sect_);
for (int i = 0; i < num_slaves_ + 1; ++i) {
assert(is_initialized_[i]);
if (WebRtcNetEQ_SetMaximumDelay(inst_[i], maximum_delay_ms) < 0)
return -1;
}
maximum_delay_ms_ = maximum_delay_ms;
return 0;
}
int ACMNetEQ::LeastRequiredDelayMs() const {
CriticalSectionScoped lock(neteq_crit_sect_);
assert(is_initialized_[0]);

View File

@ -300,6 +300,11 @@ class ACMNetEQ {
//
int SetMinimumDelay(int minimum_delay_ms);
//
// Set a maximum delay in NetEq.
//
int SetMaximumDelay(int maximum_delay_ms);
//
// The shortest latency, in milliseconds, required by jitter buffer. This
// is computed based on inter-arrival times and playout mode of NetEq. The
@ -384,6 +389,7 @@ class ACMNetEQ {
bool av_sync_;
int minimum_delay_ms_;
int maximum_delay_ms_;
};
} // namespace webrtc

View File

@ -2205,13 +2205,7 @@ int AudioCodingModuleImpl::InitStereoSlave() {
return 0;
}
// Minimum playout delay (Used for lip-sync).
int AudioCodingModuleImpl::SetMinimumPlayoutDelay(int time_ms) {
if ((time_ms < 0) || (time_ms > 10000)) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
"Delay must be in the range of 0-10000 milliseconds.");
return -1;
}
{
CriticalSectionScoped lock(acm_crit_sect_);
// Don't let the extra delay modified while accumulating buffers in NetEq.
@ -2221,6 +2215,10 @@ int AudioCodingModuleImpl::SetMinimumPlayoutDelay(int time_ms) {
return neteq_.SetMinimumDelay(time_ms);
}
int AudioCodingModuleImpl::SetMaximumPlayoutDelay(int time_ms) {
return neteq_.SetMaximumDelay(time_ms);
}
// Get Dtmf playout status.
bool AudioCodingModuleImpl::DtmfPlayoutStatus() const {
#ifndef WEBRTC_CODEC_AVT

View File

@ -175,7 +175,10 @@ class AudioCodingModuleImpl : public AudioCodingModule {
// is the max of |time_ms| and the required delay dictated by the channel.
int SetMinimumPlayoutDelay(int time_ms);
//
// NetEq maximum playout delay. The actual target delay is the min of
// |time_ms| and the required delay dictated by the channel.
int SetMaximumPlayoutDelay(int time_ms);
// The shortest latency, in milliseconds, required by jitter buffer. This
// is computed based on inter-arrival times and playout mode of NetEq. The
// actual delay is the maximum of least-required-delay and the minimum-delay

View File

@ -94,6 +94,10 @@ class TargetDelayTest : public ::testing::Test {
return acm_->SetMinimumPlayoutDelay(delay_ms);
}
int SetMaximumDelay(int delay_ms) {
return acm_->SetMaximumPlayoutDelay(delay_ms);
}
int GetCurrentOptimalDelayMs() {
ACMNetworkStatistics stats;
acm_->NetworkStatistics(&stats);
@ -170,4 +174,21 @@ TEST_F(TargetDelayTest, DISABLED_ON_ANDROID(RequiredDelayAtCorrectRange)) {
required_delay, 1);
}
TEST_F(TargetDelayTest, DISABLED_ON_ANDROID(TargetDelayBufferMinMax)) {
const int kTargetMinDelayMs = kNum10msPerFrame * 10;
ASSERT_EQ(0, SetMinimumDelay(kTargetMinDelayMs));
for (int m = 0; m < 30; ++m) // Run enough iterations to fill up the buffer.
Run(true);
int clean_optimal_delay = GetCurrentOptimalDelayMs();
EXPECT_EQ(kTargetMinDelayMs, clean_optimal_delay);
const int kTargetMaxDelayMs = 2 * (kNum10msPerFrame * 10);
ASSERT_EQ(0, SetMaximumDelay(kTargetMaxDelayMs));
for (int n = 0; n < 30; ++n) // Run enough iterations to fill up the buffer.
Run(false);
int capped_optimal_delay = GetCurrentOptimalDelayMs();
EXPECT_EQ(kTargetMaxDelayMs, capped_optimal_delay);
}
} // webrtc