API to control target delay in NetEq jitter buffer. NetEq maintains the given delay unless channel conditions require a higher delay.

TEST=unit-test, manual, trybots.
R=henrik.lundin@webrtc.org, henrika@webrtc.org, mflodman@webrtc.org, mikhal@webrtc.org, stefan@webrtc.org, tina.legrand@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/1384005

git-svn-id: http://webrtc.googlecode.com/svn/trunk@4087 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
turaj@webrtc.org
2013-05-22 20:39:43 +00:00
parent 561990fd73
commit e46c8d3875
19 changed files with 405 additions and 74 deletions

View File

@ -44,12 +44,12 @@ ACMNetEQ::ACMNetEQ()
received_stereo_(false),
master_slave_info_(NULL),
previous_audio_activity_(AudioFrame::kVadUnknown),
extra_delay_(0),
callback_crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
min_of_max_num_packets_(0),
min_of_buffer_size_bytes_(0),
per_packet_overhead_bytes_(0),
av_sync_(false) {
av_sync_(false),
minimum_delay_ms_(0) {
for (int n = 0; n < MAX_NUM_SLAVE_NETEQ + 1; n++) {
is_initialized_[n] = false;
ptr_vadinst_[n] = NULL;
@ -270,24 +270,6 @@ int16_t ACMNetEQ::AllocatePacketBufferByIdxSafe(
return 0;
}
int32_t ACMNetEQ::SetExtraDelay(const int32_t delay_in_ms) {
CriticalSectionScoped lock(neteq_crit_sect_);
for (int16_t idx = 0; idx < num_slaves_ + 1; idx++) {
if (!is_initialized_[idx]) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
"SetExtraDelay: NetEq is not initialized.");
return -1;
}
if (WebRtcNetEQ_SetExtraDelay(inst_[idx], delay_in_ms) < 0) {
LogError("SetExtraDelay", idx);
return -1;
}
}
extra_delay_ = delay_in_ms;
return 0;
}
int32_t ACMNetEQ::SetAVTPlayout(const bool enable) {
CriticalSectionScoped lock(neteq_crit_sect_);
if (avt_playout_ != enable) {
@ -1037,14 +1019,6 @@ int16_t ACMNetEQ::AddSlave(const WebRtcNetEQDecoder* used_codecs,
num_slaves_ = 1;
is_initialized_[slave_idx] = true;
// Set Slave delay as all other instances.
if (WebRtcNetEQ_SetExtraDelay(inst_[slave_idx], extra_delay_) < 0) {
LogError("SetExtraDelay", slave_idx);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
"AddSlave: AddSlave Failed, Could not set delay");
return -1;
}
// Set AVT
if (WebRtcNetEQ_SetAVTPlayout(inst_[slave_idx],
(avt_playout_) ? 1 : 0) < 0) {
@ -1093,8 +1067,13 @@ int16_t ACMNetEQ::AddSlave(const WebRtcNetEQDecoder* used_codecs,
"AddSlave: AddSlave Failed, Could not Set Playout Mode.");
return -1;
}
// Set AV-sync for the slave.
WebRtcNetEQ_EnableAVSync(inst_[slave_idx], av_sync_ ? 1 : 0);
// Set minimum delay.
if (minimum_delay_ms_ > 0)
WebRtcNetEQ_SetMinimumDelay(inst_[slave_idx], minimum_delay_ms_);
}
return 0;
@ -1119,4 +1098,23 @@ void ACMNetEQ::EnableAVSync(bool enable) {
}
}
int ACMNetEQ::SetMinimumDelay(int minimum_delay_ms) {
CriticalSectionScoped lock(neteq_crit_sect_);
for (int i = 0; i < num_slaves_ + 1; ++i) {
assert(is_initialized_[i]);
if (WebRtcNetEQ_SetMinimumDelay(inst_[i], minimum_delay_ms) < 0)
return -1;
}
minimum_delay_ms_ = minimum_delay_ms;
return 0;
}
int ACMNetEQ::LeastRequiredDelayMs() const {
CriticalSectionScoped lock(neteq_crit_sect_);
assert(is_initialized_[0]);
// Sufficient to query the master.
return WebRtcNetEQ_GetRequiredDelayMs(inst_[0]);
}
} // namespace webrtc

View File

@ -129,18 +129,6 @@ class ACMNetEQ {
int32_t AllocatePacketBuffer(const WebRtcNetEQDecoder* used_codecs,
int16_t num_codecs);
//
// SetExtraDelay()
// Sets a |delay_in_ms| milliseconds extra delay in NetEQ.
//
// Input:
// - delay_in_ms : Extra delay in milliseconds.
//
// Return value : 0 if ok.
// <0 if NetEQ returned an error.
//
int32_t SetExtraDelay(const int32_t delay_in_ms);
//
// SetAVTPlayout()
// Enable/disable playout of AVT payloads.
@ -301,6 +289,20 @@ class ACMNetEQ {
//
void EnableAVSync(bool enable);
//
// Set a minimum delay in NetEq. Unless channel condition dictates a longer
// delay, the given delay is maintained by NetEq.
//
int SetMinimumDelay(int minimum_delay_ms);
//
// The shortest latency, in milliseconds, required by jitter buffer. This
// is computed based on inter-arrival times and playout mode of NetEq. The
// actual delay is the maximum of least-required-delay and the minimum-delay
// specified by SetMinumumPlayoutDelay() API.
//
int LeastRequiredDelayMs() const ;
private:
//
// RTPPack()
@ -365,7 +367,6 @@ class ACMNetEQ {
bool received_stereo_;
void* master_slave_info_;
AudioFrame::VADActivity previous_audio_activity_;
int32_t extra_delay_;
CriticalSectionWrapper* callback_crit_sect_;
// Minimum of "max number of packets," among all NetEq instances.
@ -376,6 +377,8 @@ class ACMNetEQ {
// Keep track of AV-sync. Just used to set the slave when a slave is added.
bool av_sync_;
int minimum_delay_ms_;
};
} // namespace webrtc

View File

@ -137,14 +137,15 @@
'../test/RTPFile.cc',
'../test/SpatialAudio.cc',
'../test/TestAllCodecs.cc',
'../test/target_delay_unittest.cc',
'../test/Tester.cc',
'../test/TestFEC.cc',
'../test/TestStereo.cc',
'../test/TestVADDTX.cc',
'../test/TimedTrace.cc',
'../test/TwoWayCommunication.cc',
'../test/utility.cc',
'../test/initial_delay_unittest.cc',
'../test/utility.cc',
],
},
{

View File

@ -2116,8 +2116,11 @@ int32_t AudioCodingModuleImpl::IncomingPacket(
if (av_sync_ || track_neteq_buffer_) {
last_incoming_send_timestamp_ = rtp_info.header.timestamp;
first_payload_received_ = true;
}
// Set the following regardless of tracking NetEq buffer or being in
// AV-sync mode.
first_payload_received_ = true;
}
return 0;
}
@ -2192,8 +2195,7 @@ int AudioCodingModuleImpl::InitStereoSlave() {
}
// Minimum playout delay (Used for lip-sync).
int32_t AudioCodingModuleImpl::SetMinimumPlayoutDelay(
const int32_t time_ms) {
int AudioCodingModuleImpl::SetMinimumPlayoutDelay(int time_ms) {
if ((time_ms < 0) || (time_ms > 10000)) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
"Delay must be in the range of 0-10000 milliseconds.");
@ -2205,7 +2207,7 @@ int32_t AudioCodingModuleImpl::SetMinimumPlayoutDelay(
if (track_neteq_buffer_ && first_payload_received_)
return 0;
}
return neteq_.SetExtraDelay(time_ms);
return neteq_.SetMinimumDelay(time_ms);
}
// Get Dtmf playout status.
@ -2937,7 +2939,7 @@ int AudioCodingModuleImpl::SetInitialPlayoutDelay(int delay_ms) {
}
av_sync_ = true;
neteq_.EnableAVSync(av_sync_);
return neteq_.SetExtraDelay(delay_ms);
return neteq_.SetMinimumDelay(delay_ms);
}
bool AudioCodingModuleImpl::GetSilence(int desired_sample_rate_hz,
@ -3041,4 +3043,8 @@ void AudioCodingModuleImpl::UpdateBufferingSafe(const WebRtcRTPHeader& rtp_info,
initial_delay_ms_ * in_sample_rate_khz));
}
int AudioCodingModuleImpl::LeastRequiredDelayMs() const {
return std::max(neteq_.LeastRequiredDelayMs(), initial_delay_ms_);
}
} // namespace webrtc

View File

@ -167,8 +167,17 @@ class AudioCodingModuleImpl : public AudioCodingModule {
const uint8_t payload_type,
const uint32_t timestamp = 0);
// Minimum playout delay (used for lip-sync).
int32_t SetMinimumPlayoutDelay(const int32_t time_ms);
// NetEq minimum playout delay (used for lip-sync). The actual target delay
// is the max of |time_ms| and the required delay dictated by the channel.
int SetMinimumPlayoutDelay(int time_ms);
//
// The shortest latency, in milliseconds, required by jitter buffer. This
// is computed based on inter-arrival times and playout mode of NetEq. The
// actual delay is the maximum of least-required-delay and the minimum-delay
// specified by SetMinumumPlayoutDelay() API.
//
int LeastRequiredDelayMs() const ;
// Configure Dtmf playout status i.e on/off playout the incoming outband Dtmf
// tone.