Removing AudioMixerStatusReceiver and ParticipantStatistics.

BUG=webrtc:497
R=ajm@chromium.org, andrew@webrtc.org, henrikg@webrtc.org

Review URL: https://codereview.webrtc.org/1216133004 .

Cr-Commit-Position: refs/heads/master@{#9647}
This commit is contained in:
Minyue
2015-07-28 11:07:27 +02:00
parent d40af69278
commit 4540ffacc7
9 changed files with 3 additions and 323 deletions

View File

@ -21,8 +21,6 @@
'interface/audio_conference_mixer_defines.h',
'source/audio_frame_manipulator.cc',
'source/audio_frame_manipulator.h',
'source/level_indicator.cc',
'source/level_indicator.h',
'source/memory_pool.h',
'source/memory_pool_posix.h',
'source/memory_pool_win.h',

View File

@ -17,7 +17,6 @@
namespace webrtc {
class AudioMixerOutputReceiver;
class AudioMixerStatusReceiver;
class MixerParticipant;
class Trace;
@ -48,12 +47,6 @@ public:
AudioMixerOutputReceiver& receiver) = 0;
virtual int32_t UnRegisterMixedStreamCallback() = 0;
// Register/unregister a callback class for receiving status information.
virtual int32_t RegisterMixerStatusCallback(
AudioMixerStatusReceiver& mixerStatusCallback,
const uint32_t amountOf10MsBetweenCallbacks) = 0;
virtual int32_t UnRegisterMixerStatusCallback() = 0;
// Add/remove participants as candidates for mixing.
virtual int32_t SetMixabilityStatus(MixerParticipant& participant,
bool mixable) = 0;

View File

@ -40,38 +40,6 @@ protected:
virtual ~MixerParticipant();
};
// Container struct for participant statistics.
struct ParticipantStatistics
{
int32_t participant;
int32_t level;
};
class AudioMixerStatusReceiver
{
public:
// Callback function that provides an array of ParticipantStatistics for the
// participants that were mixed last mix iteration.
virtual void MixedParticipants(
const int32_t id,
const ParticipantStatistics* participantStatistics,
const uint32_t size) = 0;
// Callback function that provides an array of the ParticipantStatistics for
// the participants that had a positiv VAD last mix iteration.
virtual void VADPositiveParticipants(
const int32_t id,
const ParticipantStatistics* participantStatistics,
const uint32_t size) = 0;
// Callback function that provides the audio level of the mixed audio frame
// from the last mix iteration.
virtual void MixedAudioLevel(
const int32_t id,
const uint32_t level) = 0;
protected:
AudioMixerStatusReceiver() {}
virtual ~AudioMixerStatusReceiver() {}
};
class AudioMixerOutputReceiver
{
public:

View File

@ -60,12 +60,6 @@ int MaxNumChannels(const AudioFrameList* list) {
return max_num_channels;
}
void SetParticipantStatistics(ParticipantStatistics* stats,
const AudioFrame& frame) {
stats->participant = frame.id_;
stats->level = 0; // TODO(andrew): to what should this be set?
}
} // namespace
MixerParticipant::MixerParticipant()
@ -117,17 +111,9 @@ AudioConferenceMixer* AudioConferenceMixer::Create(int id) {
}
AudioConferenceMixerImpl::AudioConferenceMixerImpl(int id)
: _scratchParticipantsToMixAmount(0),
_scratchMixedParticipants(),
_scratchVadPositiveParticipantsAmount(0),
_scratchVadPositiveParticipants(),
_id(id),
: _id(id),
_minimumMixingFreq(kLowestPossible),
_mixReceiver(NULL),
_mixerStatusCallback(NULL),
_amountOf10MsBetweenCallbacks(1),
_amountOf10MsUntilNextCallback(0),
_mixerStatusCb(false),
_outputFrequency(kDefaultFrequency),
_sampleSize(0),
_audioFramePool(NULL),
@ -137,7 +123,6 @@ AudioConferenceMixerImpl::AudioConferenceMixerImpl(int id)
use_limiter_(true),
_timeStamp(0),
_timeScheduler(kProcessPeriodicityInMs),
_mixedAudioLevel(),
_processCalls(0) {}
bool AudioConferenceMixerImpl::Init() {
@ -275,7 +260,6 @@ int32_t AudioConferenceMixerImpl::Process() {
GetAdditionalAudio(&additionalFramesList);
UpdateMixedStatus(mixedParticipantsMap);
_scratchParticipantsToMixAmount = mixedParticipantsMap.size();
}
// Get an AudioFrame for mixing from the memory pool.
@ -287,9 +271,7 @@ int32_t AudioConferenceMixerImpl::Process() {
return -1;
}
bool timeForMixerCallback = false;
int retval = 0;
int32_t audioLevel = 0;
{
CriticalSectionScoped cs(_crit.get());
@ -325,18 +307,6 @@ int32_t AudioConferenceMixerImpl::Process() {
if(!LimitMixedAudio(*mixedAudio))
retval = -1;
}
_mixedAudioLevel.ComputeLevel(mixedAudio->data_,_sampleSize);
audioLevel = _mixedAudioLevel.GetLevel();
if(_mixerStatusCb) {
_scratchVadPositiveParticipantsAmount = 0;
UpdateVADPositiveParticipants(&mixList);
if(_amountOf10MsUntilNextCallback-- == 0) {
_amountOf10MsUntilNextCallback = _amountOf10MsBetweenCallbacks;
timeForMixerCallback = true;
}
}
}
{
@ -349,20 +319,6 @@ int32_t AudioConferenceMixerImpl::Process() {
dummy,
0);
}
if((_mixerStatusCallback != NULL) &&
timeForMixerCallback) {
_mixerStatusCallback->MixedParticipants(
_id,
_scratchMixedParticipants,
static_cast<uint32_t>(_scratchParticipantsToMixAmount));
_mixerStatusCallback->VADPositiveParticipants(
_id,
_scratchVadPositiveParticipants,
_scratchVadPositiveParticipantsAmount);
_mixerStatusCallback->MixedAudioLevel(_id,audioLevel);
}
}
// Reclaim all outstanding memory.
@ -412,53 +368,6 @@ AudioConferenceMixerImpl::OutputFrequency() const {
return _outputFrequency;
}
int32_t AudioConferenceMixerImpl::RegisterMixerStatusCallback(
AudioMixerStatusReceiver& mixerStatusCallback,
const uint32_t amountOf10MsBetweenCallbacks) {
if(amountOf10MsBetweenCallbacks == 0) {
WEBRTC_TRACE(
kTraceWarning,
kTraceAudioMixerServer,
_id,
"amountOf10MsBetweenCallbacks(%d) needs to be larger than 0");
return -1;
}
{
CriticalSectionScoped cs(_cbCrit.get());
if(_mixerStatusCallback != NULL) {
WEBRTC_TRACE(kTraceWarning, kTraceAudioMixerServer, _id,
"Mixer status callback already registered");
return -1;
}
_mixerStatusCallback = &mixerStatusCallback;
}
{
CriticalSectionScoped cs(_crit.get());
_amountOf10MsBetweenCallbacks = amountOf10MsBetweenCallbacks;
_amountOf10MsUntilNextCallback = 0;
_mixerStatusCb = true;
}
return 0;
}
int32_t AudioConferenceMixerImpl::UnRegisterMixerStatusCallback() {
{
CriticalSectionScoped cs(_crit.get());
if(!_mixerStatusCb)
{
WEBRTC_TRACE(kTraceWarning, kTraceAudioMixerServer, _id,
"Mixer status callback not registered");
return -1;
}
_mixerStatusCb = false;
}
{
CriticalSectionScoped cs(_cbCrit.get());
_mixerStatusCallback = NULL;
}
return 0;
}
int32_t AudioConferenceMixerImpl::SetMixabilityStatus(
MixerParticipant& participant,
bool mixable) {
@ -886,15 +795,6 @@ void AudioConferenceMixerImpl::UpdateVADPositiveParticipants(
iter != mixList->end();
++iter) {
CalculateEnergy(**iter);
if((*iter)->vad_activity_ == AudioFrame::kVadActive) {
_scratchVadPositiveParticipants[
_scratchVadPositiveParticipantsAmount].participant =
(*iter)->id_;
// TODO(andrew): to what should this be set?
_scratchVadPositiveParticipants[
_scratchVadPositiveParticipantsAmount].level = 0;
_scratchVadPositiveParticipantsAmount++;
}
}
}
@ -977,9 +877,6 @@ int32_t AudioConferenceMixerImpl::MixFromList(
}
MixFrames(&mixedAudio, (*iter), use_limiter_);
SetParticipantStatistics(&_scratchMixedParticipants[position],
**iter);
position++;
}

View File

@ -17,7 +17,6 @@
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/engine_configurations.h"
#include "webrtc/modules/audio_conference_mixer/interface/audio_conference_mixer.h"
#include "webrtc/modules/audio_conference_mixer/source/level_indicator.h"
#include "webrtc/modules/audio_conference_mixer/source/memory_pool.h"
#include "webrtc/modules/audio_conference_mixer/source/time_scheduler.h"
#include "webrtc/modules/interface/module_common_types.h"
@ -71,10 +70,6 @@ public:
int32_t RegisterMixedStreamCallback(
AudioMixerOutputReceiver& mixReceiver) override;
int32_t UnRegisterMixedStreamCallback() override;
int32_t RegisterMixerStatusCallback(
AudioMixerStatusReceiver& mixerStatusCallback,
const uint32_t amountOf10MsBetweenCallbacks) override;
int32_t UnRegisterMixerStatusCallback() override;
int32_t SetMixabilityStatus(MixerParticipant& participant,
bool mixable) override;
int32_t MixabilityStatus(MixerParticipant& participant,
@ -93,8 +88,7 @@ private:
Frequency OutputFrequency() const;
// Fills mixList with the AudioFrames pointers that should be used when
// mixing. Fills mixParticipantList with ParticipantStatistics for the
// participants who's AudioFrames are inside mixList.
// mixing.
// maxAudioFrameCounter both input and output specifies how many more
// AudioFrames that are allowed to be mixed.
// rampOutList contain AudioFrames corresponding to an audio stream that
@ -154,16 +148,6 @@ private:
bool LimitMixedAudio(AudioFrame& mixedAudio);
// Scratch memory
// Note that the scratch memory may only be touched in the scope of
// Process().
size_t _scratchParticipantsToMixAmount;
ParticipantStatistics _scratchMixedParticipants[
kMaximumAmountOfMixedParticipants];
uint32_t _scratchVadPositiveParticipantsAmount;
ParticipantStatistics _scratchVadPositiveParticipants[
kMaximumAmountOfMixedParticipants];
rtc::scoped_ptr<CriticalSectionWrapper> _crit;
rtc::scoped_ptr<CriticalSectionWrapper> _cbCrit;
@ -174,11 +158,6 @@ private:
// Mix result callback
AudioMixerOutputReceiver* _mixReceiver;
AudioMixerStatusReceiver* _mixerStatusCallback;
uint32_t _amountOf10MsBetweenCallbacks;
uint32_t _amountOf10MsUntilNextCallback;
bool _mixerStatusCb;
// The current sample frequency and sample size when mixing.
Frequency _outputFrequency;
uint16_t _sampleSize;
@ -201,9 +180,6 @@ private:
// Metronome class.
TimeScheduler _timeScheduler;
// Smooth level indicator.
LevelIndicator _mixedAudioLevel;
// Counter keeping track of concurrent calls to process.
// Note: should never be higher than 1 or lower than 0.
int16_t _processCalls;

View File

@ -1,76 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/modules/audio_conference_mixer/source/level_indicator.h"
namespace webrtc {
// Array for adding smothing to level changes (ad-hoc).
const uint32_t perm[] =
{0,1,2,3,4,4,5,5,5,5,6,6,6,6,6,7,7,7,7,8,8,8,9,9,9,9,9,9,9,9,9,9,9};
LevelIndicator::LevelIndicator()
: _max(0),
_count(0),
_currentLevel(0)
{
}
LevelIndicator::~LevelIndicator()
{
}
// Level is based on the highest absolute value for all samples.
void LevelIndicator::ComputeLevel(const int16_t* speech,
const uint16_t nrOfSamples)
{
int32_t min = 0;
for(uint32_t i = 0; i < nrOfSamples; i++)
{
if(_max < speech[i])
{
_max = speech[i];
}
if(min > speech[i])
{
min = speech[i];
}
}
// Absolute max value.
if(-min > _max)
{
_max = -min;
}
if(_count == TICKS_BEFORE_CALCULATION)
{
// Highest sample value maps directly to a level.
int32_t position = _max / 1000;
if ((position == 0) &&
(_max > 250))
{
position = 1;
}
_currentLevel = perm[position];
// The max value is decayed and stored so that it can be reused to slow
// down decreases in level.
_max = _max >> 1;
_count = 0;
} else {
_count++;
}
}
int32_t LevelIndicator::GetLevel()
{
return _currentLevel;
}
} // namespace webrtc

View File

@ -1,37 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_LEVEL_INDICATOR_H_
#define WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_LEVEL_INDICATOR_H_
#include "webrtc/typedefs.h"
namespace webrtc {
class LevelIndicator
{
public:
enum{TICKS_BEFORE_CALCULATION = 10};
LevelIndicator();
~LevelIndicator();
// Updates the level.
void ComputeLevel(const int16_t* speech,
const uint16_t nrOfSamples);
int32_t GetLevel();
private:
int32_t _max;
uint32_t _count;
uint32_t _currentLevel;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_LEVEL_INDICATOR_H_

View File

@ -35,29 +35,6 @@ OutputMixer::NewMixedAudio(int32_t id,
_audioFrame.id_ = id;
}
void OutputMixer::MixedParticipants(
int32_t id,
const ParticipantStatistics* participantStatistics,
uint32_t size)
{
WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,-1),
"OutputMixer::MixedParticipants(id=%d, size=%u)", id, size);
}
void OutputMixer::VADPositiveParticipants(int32_t id,
const ParticipantStatistics* participantStatistics, uint32_t size)
{
WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,-1),
"OutputMixer::VADPositiveParticipants(id=%d, size=%u)",
id, size);
}
void OutputMixer::MixedAudioLevel(int32_t id, uint32_t level)
{
WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,-1),
"OutputMixer::MixedAudioLevel(id=%d, level=%u)", id, level);
}
void OutputMixer::PlayNotification(int32_t id, uint32_t durationMs)
{
WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,-1),
@ -131,8 +108,7 @@ OutputMixer::OutputMixer(uint32_t instanceId) :
WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,-1),
"OutputMixer::OutputMixer() - ctor");
if ((_mixerModule.RegisterMixedStreamCallback(*this) == -1) ||
(_mixerModule.RegisterMixerStatusCallback(*this, 100) == -1))
if (_mixerModule.RegisterMixedStreamCallback(*this) == -1)
{
WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
"OutputMixer::OutputMixer() failed to register mixer"
@ -170,7 +146,6 @@ OutputMixer::~OutputMixer()
_outputFileRecorderPtr = NULL;
}
}
_mixerModule.UnRegisterMixerStatusCallback();
_mixerModule.UnRegisterMixedStreamCallback();
delete &_mixerModule;
delete &_callbackCritSect;

View File

@ -32,7 +32,6 @@ namespace voe {
class Statistics;
class OutputMixer : public AudioMixerOutputReceiver,
public AudioMixerStatusReceiver,
public FileCallback
{
public:
@ -93,19 +92,6 @@ public:
const AudioFrame** uniqueAudioFrames,
uint32_t size);
// from AudioMixerStatusReceiver
virtual void MixedParticipants(
int32_t id,
const ParticipantStatistics* participantStatistics,
uint32_t size);
virtual void VADPositiveParticipants(
int32_t id,
const ParticipantStatistics* participantStatistics,
uint32_t size);
virtual void MixedAudioLevel(int32_t id, uint32_t level);
// For file recording
void PlayNotification(int32_t id, uint32_t durationMs);