Change NetEq::GetPlayoutTimestamp to return an rtc::Optional<uint32_t>

This is in preparation for changes to when the playout timestamp is
valid.

BUG=webrtc:5669

Review URL: https://codereview.webrtc.org/1853183002

Cr-Commit-Position: refs/heads/master@{#12256}
This commit is contained in:
henrik.lundin
2016-04-06 01:39:22 -07:00
committed by Commit bot
parent 05255b0e8a
commit 9a410dd082
13 changed files with 77 additions and 58 deletions

View File

@ -196,9 +196,10 @@ int AcmReceiver::GetAudio(int desired_freq_hz, AudioFrame* audio_frame) {
// |GetPlayoutTimestamp|, which is the timestamp of the last sample of
// |audio_frame|.
// TODO(henrik.lundin) Move setting of audio_frame->timestamp_ inside NetEq.
uint32_t playout_timestamp = 0;
if (GetPlayoutTimestamp(&playout_timestamp)) {
audio_frame->timestamp_ = playout_timestamp -
rtc::Optional<uint32_t> playout_timestamp = GetPlayoutTimestamp();
if (playout_timestamp) {
audio_frame->timestamp_ =
*playout_timestamp -
static_cast<uint32_t>(audio_frame->samples_per_channel_);
} else {
// Remain 0 until we have a valid |playout_timestamp|.
@ -318,8 +319,8 @@ int AcmReceiver::RemoveCodec(uint8_t payload_type) {
return 0;
}
bool AcmReceiver::GetPlayoutTimestamp(uint32_t* timestamp) {
return neteq_->GetPlayoutTimestamp(timestamp);
rtc::Optional<uint32_t> AcmReceiver::GetPlayoutTimestamp() {
return neteq_->GetPlayoutTimestamp();
}
int AcmReceiver::LastAudioCodec(CodecInst* codec) const {

View File

@ -195,11 +195,9 @@ class AcmReceiver {
//
int RemoveAllCodecs();
//
// Gets the RTP timestamp of the last sample delivered by GetAudio().
// Returns true if the RTP timestamp is valid, otherwise false.
//
bool GetPlayoutTimestamp(uint32_t* timestamp);
// Returns the RTP timestamp for the last sample delivered by GetAudio().
// The return value will be empty if no valid timestamp is available.
rtc::Optional<uint32_t> GetPlayoutTimestamp();
//
// Get the audio codec associated with the last non-CNG/non-DTMF received

View File

@ -902,8 +902,16 @@ int AudioCodingModuleImpl::DisableOpusDtx() {
return encoder_stack_->SetDtx(false) ? 0 : -1;
}
int AudioCodingModuleImpl::PlayoutTimestamp(uint32_t* timestamp) {
return receiver_.GetPlayoutTimestamp(timestamp) ? 0 : -1;
int32_t AudioCodingModuleImpl::PlayoutTimestamp(uint32_t* timestamp) {
rtc::Optional<uint32_t> ts = PlayoutTimestamp();
if (!ts)
return -1;
*timestamp = *ts;
return 0;
}
rtc::Optional<uint32_t> AudioCodingModuleImpl::PlayoutTimestamp() {
return receiver_.GetPlayoutTimestamp();
}
bool AudioCodingModuleImpl::HaveValidEncoder(const char* caller_name) const {

View File

@ -157,8 +157,9 @@ class AudioCodingModuleImpl final : public AudioCodingModule {
// Smallest latency NetEq will maintain.
int LeastRequiredDelayMs() const override;
// Get playout timestamp.
int PlayoutTimestamp(uint32_t* timestamp) override;
RTC_DEPRECATED int32_t PlayoutTimestamp(uint32_t* timestamp) override;
rtc::Optional<uint32_t> PlayoutTimestamp() override;
// Get 10 milliseconds of raw audio data to play out, and
// automatic resample to the requested frequency if > 0.

View File

@ -14,6 +14,7 @@
#include <string>
#include <vector>
#include "webrtc/base/deprecation.h"
#include "webrtc/base/optional.h"
#include "webrtc/common_types.h"
#include "webrtc/modules/audio_coding/include/audio_coding_module_typedefs.h"
@ -647,7 +648,6 @@ class AudioCodingModule {
//
virtual int LeastRequiredDelayMs() const = 0;
///////////////////////////////////////////////////////////////////////////
// int32_t PlayoutTimestamp()
// The send timestamp of an RTP packet is associated with the decoded
// audio of the packet in question. This function returns the timestamp of
@ -660,8 +660,16 @@ class AudioCodingModule {
// 0 if the output is a correct timestamp.
// -1 if failed to output the correct timestamp.
//
// TODO(tlegrand): Change function to return the timestamp.
virtual int32_t PlayoutTimestamp(uint32_t* timestamp) = 0;
RTC_DEPRECATED virtual int32_t PlayoutTimestamp(uint32_t* timestamp) = 0;
///////////////////////////////////////////////////////////////////////////
// int32_t PlayoutTimestamp()
// The send timestamp of an RTP packet is associated with the decoded
// audio of the packet in question. This function returns the timestamp of
// the latest audio obtained by calling PlayoutData10ms(), or empty if no
// valid timestamp is available.
//
virtual rtc::Optional<uint32_t> PlayoutTimestamp() = 0;
///////////////////////////////////////////////////////////////////////////
// int32_t PlayoutData10Ms(

View File

@ -16,6 +16,7 @@
#include <string>
#include "webrtc/base/constructormagic.h"
#include "webrtc/base/optional.h"
#include "webrtc/common_types.h"
#include "webrtc/modules/audio_coding/neteq/audio_decoder_impl.h"
#include "webrtc/typedefs.h"
@ -243,9 +244,9 @@ class NetEq {
// Disables post-decode VAD.
virtual void DisableVad() = 0;
// Gets the RTP timestamp for the last sample delivered by GetAudio().
// Returns true if the RTP timestamp is valid, otherwise false.
virtual bool GetPlayoutTimestamp(uint32_t* timestamp) = 0;
// Returns the RTP timestamp for the last sample delivered by GetAudio().
// The return value will be empty if no valid timestamp is available.
virtual rtc::Optional<uint32_t> GetPlayoutTimestamp() = 0;
// Returns the sample rate in Hz of the audio produced in the last GetAudio
// call. If GetAudio has not been called yet, the configured sample rate

View File

@ -401,15 +401,15 @@ void NetEqImpl::DisableVad() {
vad_->Disable();
}
bool NetEqImpl::GetPlayoutTimestamp(uint32_t* timestamp) {
rtc::Optional<uint32_t> NetEqImpl::GetPlayoutTimestamp() {
rtc::CritScope lock(&crit_sect_);
if (first_packet_) {
// We don't have a valid RTP timestamp until we have decoded our first
// RTP packet.
return false;
return rtc::Optional<uint32_t>();
}
*timestamp = timestamp_scaler_->ToExternal(playout_timestamp_);
return true;
return rtc::Optional<uint32_t>(
timestamp_scaler_->ToExternal(playout_timestamp_));
}
int NetEqImpl::last_output_sample_rate_hz() const {

View File

@ -160,7 +160,7 @@ class NetEqImpl : public webrtc::NetEq {
// Disables post-decode VAD.
void DisableVad() override;
bool GetPlayoutTimestamp(uint32_t* timestamp) override;
rtc::Optional<uint32_t> GetPlayoutTimestamp() override;
int last_output_sample_rate_hz() const override;

View File

@ -478,11 +478,11 @@ TEST_F(NetEqImplTest, VerifyTimestampPropagation) {
// The value of the last of the output samples is the same as the number of
// samples played from the decoded packet. Thus, this number + the RTP
// timestamp should match the playout timestamp.
uint32_t timestamp = 0;
EXPECT_TRUE(neteq_->GetPlayoutTimestamp(&timestamp));
EXPECT_EQ(rtp_header.header.timestamp +
output.data_[output.samples_per_channel_ - 1],
timestamp);
// Wrap the expected value in an rtc::Optional to compare them as such.
EXPECT_EQ(
rtc::Optional<uint32_t>(rtp_header.header.timestamp +
output.data_[output.samples_per_channel_ - 1]),
neteq_->GetPlayoutTimestamp());
// Check the timestamp for the last value in the sync buffer. This should
// be one full frame length ahead of the RTP timestamp.
@ -714,8 +714,6 @@ TEST_F(NetEqImplTest, CodecInternalCng) {
const size_t kMaxOutputSize = static_cast<size_t>(10 * kSampleRateKhz);
AudioFrame output;
uint32_t timestamp;
uint32_t last_timestamp;
AudioFrame::SpeechType expected_type[8] = {
AudioFrame::kNormalSpeech, AudioFrame::kNormalSpeech,
AudioFrame::kCNG, AudioFrame::kCNG,
@ -731,16 +729,19 @@ TEST_F(NetEqImplTest, CodecInternalCng) {
};
EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output));
EXPECT_TRUE(neteq_->GetPlayoutTimestamp(&last_timestamp));
rtc::Optional<uint32_t> last_timestamp = neteq_->GetPlayoutTimestamp();
ASSERT_TRUE(last_timestamp);
for (size_t i = 1; i < 6; ++i) {
ASSERT_EQ(kMaxOutputSize, output.samples_per_channel_);
EXPECT_EQ(1u, output.num_channels_);
EXPECT_EQ(expected_type[i - 1], output.speech_type_);
EXPECT_TRUE(neteq_->GetPlayoutTimestamp(&timestamp));
rtc::Optional<uint32_t> timestamp = neteq_->GetPlayoutTimestamp();
EXPECT_TRUE(timestamp);
EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output));
EXPECT_TRUE(neteq_->GetPlayoutTimestamp(&timestamp));
EXPECT_EQ(timestamp, last_timestamp + expected_timestamp_increment[i]);
timestamp = neteq_->GetPlayoutTimestamp();
ASSERT_TRUE(timestamp);
EXPECT_EQ(*timestamp, *last_timestamp + expected_timestamp_increment[i]);
last_timestamp = timestamp;
}
@ -756,8 +757,9 @@ TEST_F(NetEqImplTest, CodecInternalCng) {
EXPECT_EQ(1u, output.num_channels_);
EXPECT_EQ(expected_type[i - 1], output.speech_type_);
EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output));
EXPECT_TRUE(neteq_->GetPlayoutTimestamp(&timestamp));
EXPECT_EQ(timestamp, last_timestamp + expected_timestamp_increment[i]);
rtc::Optional<uint32_t> timestamp = neteq_->GetPlayoutTimestamp();
ASSERT_TRUE(timestamp);
EXPECT_EQ(*timestamp, *last_timestamp + expected_timestamp_increment[i]);
last_timestamp = timestamp;
}

View File

@ -1522,9 +1522,9 @@ void NetEqDecodingTest::DuplicateCng() {
}
uint32_t NetEqDecodingTest::PlayoutTimestamp() {
uint32_t playout_timestamp = 0;
EXPECT_TRUE(neteq_->GetPlayoutTimestamp(&playout_timestamp));
return playout_timestamp;
rtc::Optional<uint32_t> playout_timestamp = neteq_->GetPlayoutTimestamp();
EXPECT_TRUE(playout_timestamp);
return playout_timestamp.value_or(0);
}
TEST_F(NetEqDecodingTest, DiscardDuplicateCng) { DuplicateCng(); }
@ -1570,5 +1570,4 @@ TEST_F(NetEqDecodingTest, CngFirst) {
// Verify speech output.
EXPECT_EQ(AudioFrame::kNormalSpeech, out_frame_.speech_type_);
}
} // namespace webrtc

View File

@ -666,7 +666,6 @@ void APITest::TestDelay(char side) {
EventTimerWrapper* myEvent = EventTimerWrapper::Create();
uint32_t inTimestamp = 0;
uint32_t outTimestamp = 0;
double estimDelay = 0;
double averageEstimDelay = 0;
@ -688,7 +687,8 @@ void APITest::TestDelay(char side) {
CHECK_ERROR_MT(myACM->SetMinimumPlayoutDelay(*myMinDelay));
inTimestamp = myChannel->LastInTimestamp();
CHECK_ERROR_MT(myACM->PlayoutTimestamp(&outTimestamp));
rtc::Optional<uint32_t> outTimestamp = myACM->PlayoutTimestamp();
CHECK_ERROR_MT(outTimestamp ? 0 : -1);
if (!_randomTest) {
myEvent->StartTimer(true, 30);
@ -698,11 +698,12 @@ void APITest::TestDelay(char side) {
myEvent->Wait(1000);
inTimestamp = myChannel->LastInTimestamp();
CHECK_ERROR_MT(myACM->PlayoutTimestamp(&outTimestamp));
outTimestamp = myACM->PlayoutTimestamp();
CHECK_ERROR_MT(outTimestamp ? 0 : -1);
//std::cout << outTimestamp << std::endl << std::flush;
estimDelay = (double) ((uint32_t)(inTimestamp - outTimestamp))
/ ((double) myACM->ReceiveFrequency() / 1000.0);
estimDelay = (double)((uint32_t)(inTimestamp - *outTimestamp)) /
((double)myACM->ReceiveFrequency() / 1000.0);
estimDelayCB.Update(estimDelay);

View File

@ -180,7 +180,6 @@ class DelayTest {
int num_frames = 0;
int in_file_frames = 0;
uint32_t playout_ts;
uint32_t received_ts;
double average_delay = 0;
double inst_delay_sec = 0;
@ -209,10 +208,11 @@ class DelayTest {
out_file_b_.Write10MsData(
audio_frame.data_,
audio_frame.samples_per_channel_ * audio_frame.num_channels_);
acm_b_->PlayoutTimestamp(&playout_ts);
received_ts = channel_a2b_->LastInTimestamp();
inst_delay_sec = static_cast<uint32_t>(received_ts - playout_ts)
/ static_cast<double>(encoding_sample_rate_hz_);
rtc::Optional<uint32_t> playout_timestamp = acm_b_->PlayoutTimestamp();
ASSERT_TRUE(playout_timestamp);
inst_delay_sec = static_cast<uint32_t>(received_ts - *playout_timestamp) /
static_cast<double>(encoding_sample_rate_hz_);
if (num_frames > 10)
average_delay = 0.95 * average_delay + 0.05 * inst_delay_sec;

View File

@ -3288,9 +3288,9 @@ int32_t Channel::MixAudioWithFile(AudioFrame& audioFrame, int mixingFrequency) {
}
void Channel::UpdatePlayoutTimestamp(bool rtcp) {
uint32_t playout_timestamp = 0;
rtc::Optional<uint32_t> playout_timestamp = audio_coding_->PlayoutTimestamp();
if (audio_coding_->PlayoutTimestamp(&playout_timestamp) == -1) {
if (!playout_timestamp) {
// This can happen if this channel has not been received any RTP packet. In
// this case, NetEq is not capable of computing playout timestamp.
return;
@ -3307,21 +3307,21 @@ void Channel::UpdatePlayoutTimestamp(bool rtcp) {
return;
}
jitter_buffer_playout_timestamp_ = playout_timestamp;
jitter_buffer_playout_timestamp_ = *playout_timestamp;
// Remove the playout delay.
playout_timestamp -= (delay_ms * (GetPlayoutFrequency() / 1000));
*playout_timestamp -= (delay_ms * (GetPlayoutFrequency() / 1000));
WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
"Channel::UpdatePlayoutTimestamp() => playoutTimestamp = %lu",
playout_timestamp);
*playout_timestamp);
{
rtc::CritScope lock(&video_sync_lock_);
if (rtcp) {
playout_timestamp_rtcp_ = playout_timestamp;
playout_timestamp_rtcp_ = *playout_timestamp;
} else {
playout_timestamp_rtp_ = playout_timestamp;
playout_timestamp_rtp_ = *playout_timestamp;
}
playout_delay_ms_ = delay_ms;
}