Fix Chromium clang plugin warnings
NOTRY=true BUG=webrtc:163 Review-Url: https://codereview.webrtc.org/2288153002 Cr-Commit-Position: refs/heads/master@{#13964}
This commit is contained in:
@ -239,11 +239,6 @@ source_set("audio_processing") {
|
||||
# TODO(jschuh): Bug 1348: fix this warning.
|
||||
configs += [ "//build/config/compiler:no_size_t_to_int_warning" ]
|
||||
|
||||
if (is_clang) {
|
||||
# Suppress warnings from the Chromium Clang plugins (bugs.webrtc.org/163).
|
||||
configs -= [ "//build/config/clang:find_bad_constructs" ]
|
||||
}
|
||||
|
||||
deps += [
|
||||
"../../base:rtc_base_approved",
|
||||
"../../common_audio",
|
||||
|
||||
@ -28,6 +28,9 @@ extern "C" {
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
Aec::Aec() = default;
|
||||
Aec::~Aec() = default;
|
||||
|
||||
// Measured delays [ms]
|
||||
// Device Chrome GTP
|
||||
// MacBook Air 10
|
||||
|
||||
@ -65,6 +65,9 @@ struct AecCore;
|
||||
class ApmDataDumper;
|
||||
|
||||
typedef struct Aec {
|
||||
Aec();
|
||||
~Aec();
|
||||
|
||||
std::unique_ptr<ApmDataDumper> data_dumper;
|
||||
|
||||
int delayCtr;
|
||||
|
||||
@ -278,6 +278,20 @@ int AudioProcessingImpl::MaybeInitializeCapture(
|
||||
return MaybeInitialize(processing_config);
|
||||
}
|
||||
|
||||
#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
|
||||
|
||||
AudioProcessingImpl::ApmDebugDumpThreadState::ApmDebugDumpThreadState()
|
||||
: event_msg(new audioproc::Event()) {}
|
||||
|
||||
AudioProcessingImpl::ApmDebugDumpThreadState::~ApmDebugDumpThreadState() {}
|
||||
|
||||
AudioProcessingImpl::ApmDebugDumpState::ApmDebugDumpState()
|
||||
: debug_file(FileWrapper::Create()) {}
|
||||
|
||||
AudioProcessingImpl::ApmDebugDumpState::~ApmDebugDumpState() {}
|
||||
|
||||
#endif // WEBRTC_AUDIOPROC_DEBUG_DUMP
|
||||
|
||||
// Calls InitializeLocked() if any of the audio parameters have changed from
|
||||
// their current values (needs to be called while holding the crit_render_lock).
|
||||
int AudioProcessingImpl::MaybeInitialize(
|
||||
@ -1524,4 +1538,28 @@ int AudioProcessingImpl::WriteConfigMessage(bool forced) {
|
||||
}
|
||||
#endif // WEBRTC_AUDIOPROC_DEBUG_DUMP
|
||||
|
||||
AudioProcessingImpl::ApmCaptureState::ApmCaptureState(
|
||||
bool transient_suppressor_enabled,
|
||||
const std::vector<Point>& array_geometry,
|
||||
SphericalPointf target_direction)
|
||||
: aec_system_delay_jumps(-1),
|
||||
delay_offset_ms(0),
|
||||
was_stream_delay_set(false),
|
||||
last_stream_delay_ms(0),
|
||||
last_aec_system_delay_ms(0),
|
||||
stream_delay_jumps(-1),
|
||||
output_will_be_muted(false),
|
||||
key_pressed(false),
|
||||
transient_suppressor_enabled(transient_suppressor_enabled),
|
||||
array_geometry(array_geometry),
|
||||
target_direction(target_direction),
|
||||
fwd_proc_format(kSampleRate16kHz),
|
||||
split_rate(kSampleRate16kHz) {}
|
||||
|
||||
AudioProcessingImpl::ApmCaptureState::~ApmCaptureState() = default;
|
||||
|
||||
AudioProcessingImpl::ApmRenderState::ApmRenderState() = default;
|
||||
|
||||
AudioProcessingImpl::ApmRenderState::~ApmRenderState() = default;
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
@ -45,7 +45,7 @@ class AudioProcessingImpl : public AudioProcessing {
|
||||
explicit AudioProcessingImpl(const Config& config);
|
||||
// AudioProcessingImpl takes ownership of beamformer.
|
||||
AudioProcessingImpl(const Config& config, NonlinearBeamformer* beamformer);
|
||||
virtual ~AudioProcessingImpl();
|
||||
~AudioProcessingImpl() override;
|
||||
int Initialize() override;
|
||||
int Initialize(int input_sample_rate_hz,
|
||||
int output_sample_rate_hz,
|
||||
@ -133,7 +133,8 @@ class AudioProcessingImpl : public AudioProcessing {
|
||||
#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
|
||||
// State for the debug dump.
|
||||
struct ApmDebugDumpThreadState {
|
||||
ApmDebugDumpThreadState() : event_msg(new audioproc::Event()) {}
|
||||
ApmDebugDumpThreadState();
|
||||
~ApmDebugDumpThreadState();
|
||||
std::unique_ptr<audioproc::Event> event_msg; // Protobuf message.
|
||||
std::string event_str; // Memory for protobuf serialization.
|
||||
|
||||
@ -142,7 +143,8 @@ class AudioProcessingImpl : public AudioProcessing {
|
||||
};
|
||||
|
||||
struct ApmDebugDumpState {
|
||||
ApmDebugDumpState() : debug_file(FileWrapper::Create()) {}
|
||||
ApmDebugDumpState();
|
||||
~ApmDebugDumpState();
|
||||
// Number of bytes that can still be written to the log before the maximum
|
||||
// size is reached. A value of <= 0 indicates that no limit is used.
|
||||
int64_t num_bytes_left_for_log_ = -1;
|
||||
@ -287,20 +289,8 @@ class AudioProcessingImpl : public AudioProcessing {
|
||||
struct ApmCaptureState {
|
||||
ApmCaptureState(bool transient_suppressor_enabled,
|
||||
const std::vector<Point>& array_geometry,
|
||||
SphericalPointf target_direction)
|
||||
: aec_system_delay_jumps(-1),
|
||||
delay_offset_ms(0),
|
||||
was_stream_delay_set(false),
|
||||
last_stream_delay_ms(0),
|
||||
last_aec_system_delay_ms(0),
|
||||
stream_delay_jumps(-1),
|
||||
output_will_be_muted(false),
|
||||
key_pressed(false),
|
||||
transient_suppressor_enabled(transient_suppressor_enabled),
|
||||
array_geometry(array_geometry),
|
||||
target_direction(target_direction),
|
||||
fwd_proc_format(kSampleRate16kHz),
|
||||
split_rate(kSampleRate16kHz) {}
|
||||
SphericalPointf target_direction);
|
||||
~ApmCaptureState();
|
||||
int aec_system_delay_jumps;
|
||||
int delay_offset_ms;
|
||||
bool was_stream_delay_set;
|
||||
@ -342,6 +332,8 @@ class AudioProcessingImpl : public AudioProcessing {
|
||||
} capture_nonlocked_;
|
||||
|
||||
struct ApmRenderState {
|
||||
ApmRenderState();
|
||||
~ApmRenderState();
|
||||
std::unique_ptr<AudioConverter> render_converter;
|
||||
std::unique_ptr<AudioBuffer> render_audio;
|
||||
} render_ GUARDED_BY(crit_render_);
|
||||
|
||||
@ -28,7 +28,7 @@ class EchoCancellationImpl : public EchoCancellation {
|
||||
public:
|
||||
EchoCancellationImpl(rtc::CriticalSection* crit_render,
|
||||
rtc::CriticalSection* crit_capture);
|
||||
virtual ~EchoCancellationImpl();
|
||||
~EchoCancellationImpl() override;
|
||||
|
||||
int ProcessRenderAudio(const AudioBuffer* audio);
|
||||
int ProcessCaptureAudio(AudioBuffer* audio, int stream_delay_ms);
|
||||
|
||||
@ -29,7 +29,7 @@ class EchoControlMobileImpl : public EchoControlMobile {
|
||||
EchoControlMobileImpl(rtc::CriticalSection* crit_render,
|
||||
rtc::CriticalSection* crit_capture);
|
||||
|
||||
virtual ~EchoControlMobileImpl();
|
||||
~EchoControlMobileImpl() override;
|
||||
|
||||
int ProcessRenderAudio(const AudioBuffer* audio);
|
||||
int ProcessCaptureAudio(AudioBuffer* audio, int stream_delay_ms);
|
||||
|
||||
@ -93,6 +93,12 @@ webrtc::SignalClassifier::SignalType ClassifySignal(
|
||||
|
||||
} // namespace
|
||||
|
||||
SignalClassifier::FrameExtender::FrameExtender(size_t frame_size,
|
||||
size_t extended_frame_size)
|
||||
: x_old_(extended_frame_size - frame_size, 0.f) {}
|
||||
|
||||
SignalClassifier::FrameExtender::~FrameExtender() = default;
|
||||
|
||||
void SignalClassifier::FrameExtender::ExtendFrame(
|
||||
rtc::ArrayView<const float> x,
|
||||
rtc::ArrayView<float> x_extended) {
|
||||
|
||||
@ -37,8 +37,8 @@ class SignalClassifier {
|
||||
private:
|
||||
class FrameExtender {
|
||||
public:
|
||||
FrameExtender(size_t frame_size, size_t extended_frame_size)
|
||||
: x_old_(extended_frame_size - frame_size, 0.f) {}
|
||||
FrameExtender(size_t frame_size, size_t extended_frame_size);
|
||||
~FrameExtender();
|
||||
|
||||
void ExtendFrame(rtc::ArrayView<const float> x,
|
||||
rtc::ArrayView<float> x_extended);
|
||||
|
||||
Reference in New Issue
Block a user