Enable clang::find_bad_constructs for sdk/ (part 1).

This CL removes //build/config/clang:find_bad_constructs from the
suppressed_configs list, which means that clang:find_bad_constructs
is now enabled on these translation units.

Bug: webrtc:9251, webrtc:163
Change-Id: I6f03c46e772ccf4d15951a4b9d4e12015d539e58
Reviewed-on: https://webrtc-review.googlesource.com/90408
Reviewed-by: Fredrik Solenberg <solenberg@webrtc.org>
Reviewed-by: Kári Helgason <kthelgason@webrtc.org>
Commit-Queue: Mirko Bonadei <mbonadei@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#24113}
This commit is contained in:
Mirko Bonadei
2018-07-26 12:20:40 +02:00
committed by Commit Bot
parent a15fd0dee6
commit 17aff35e1d
21 changed files with 97 additions and 164 deletions

View File

@ -421,6 +421,7 @@ if (rtc_include_tests) {
"base/fakertp.h", "base/fakertp.h",
"base/fakevideocapturer.cc", "base/fakevideocapturer.cc",
"base/fakevideocapturer.h", "base/fakevideocapturer.h",
"base/fakevideorenderer.cc",
"base/fakevideorenderer.h", "base/fakevideorenderer.h",
"base/testutils.cc", "base/testutils.cc",
"base/testutils.h", "base/testutils.h",

View File

@ -0,0 +1,33 @@
/*
* Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "media/base/fakevideorenderer.h"
namespace cricket {
FakeVideoRenderer::FakeVideoRenderer() = default;
void FakeVideoRenderer::OnFrame(const webrtc::VideoFrame& frame) {
rtc::CritScope cs(&crit_);
// TODO(zhurunz) Check with VP8 team to see if we can remove this
// tolerance on Y values. Some unit tests produce Y values close
// to 16 rather than close to zero, for supposedly black frames.
// Largest value observed is 34, e.g., running
// PeerConnectionIntegrationTest.SendAndReceive16To9AspectRatio.
black_frame_ = CheckFrameColorYuv(0, 48, 128, 128, 128, 128, &frame);
// Treat unexpected frame size as error.
++num_rendered_frames_;
width_ = frame.width();
height_ = frame.height();
rotation_ = frame.rotation();
timestamp_us_ = frame.timestamp_us();
}
} // namespace cricket

View File

@ -21,30 +21,9 @@ namespace cricket {
// Faked video renderer that has a callback for actions on rendering. // Faked video renderer that has a callback for actions on rendering.
class FakeVideoRenderer : public rtc::VideoSinkInterface<webrtc::VideoFrame> { class FakeVideoRenderer : public rtc::VideoSinkInterface<webrtc::VideoFrame> {
public: public:
FakeVideoRenderer() FakeVideoRenderer();
: errors_(0),
width_(0),
height_(0),
rotation_(webrtc::kVideoRotation_0),
timestamp_us_(0),
num_rendered_frames_(0),
black_frame_(false) {}
virtual void OnFrame(const webrtc::VideoFrame& frame) { void OnFrame(const webrtc::VideoFrame& frame) override;
rtc::CritScope cs(&crit_);
// TODO(zhurunz) Check with VP8 team to see if we can remove this
// tolerance on Y values. Some unit tests produce Y values close
// to 16 rather than close to zero, for supposedly black frames.
// Largest value observed is 34, e.g., running
// PeerConnectionIntegrationTest.SendAndReceive16To9AspectRatio.
black_frame_ = CheckFrameColorYuv(0, 48, 128, 128, 128, 128, &frame);
// Treat unexpected frame size as error.
++num_rendered_frames_;
width_ = frame.width();
height_ = frame.height();
rotation_ = frame.rotation();
timestamp_us_ = frame.timestamp_us();
}
int errors() const { return errors_; } int errors() const { return errors_; }
int width() const { int width() const {
@ -127,13 +106,13 @@ class FakeVideoRenderer : public rtc::VideoSinkInterface<webrtc::VideoFrame> {
return true; return true;
} }
int errors_; int errors_ = 0;
int width_; int width_ = 0;
int height_; int height_ = 0;
webrtc::VideoRotation rotation_; webrtc::VideoRotation rotation_ = webrtc::kVideoRotation_0;
int64_t timestamp_us_; int64_t timestamp_us_ = 0;
int num_rendered_frames_; int num_rendered_frames_ = 0;
bool black_frame_; bool black_frame_ = false;
rtc::CriticalSection crit_; rtc::CriticalSection crit_;
}; };

View File

@ -199,12 +199,6 @@ if (is_ios || is_mac) {
"../rtc_base:rtc_base", "../rtc_base:rtc_base",
"../rtc_base:rtc_base_approved", "../rtc_base:rtc_base_approved",
] ]
if (is_clang) {
# Suppress warnings from the Chromium Clang plugin
# (bugs.webrtc.org/163).
suppressed_configs += [ "//build/config/clang:find_bad_constructs" ]
}
} }
} }
@ -234,11 +228,6 @@ if (is_ios || is_mac) {
"..:common_objc", "..:common_objc",
":used_from_extension", ":used_from_extension",
] ]
if (!build_with_chromium && is_clang) {
# Suppress warnings from the Chromium Clang plugin
# (bugs.webrtc.org/163).
suppressed_configs += [ "//build/config/clang:find_bad_constructs" ]
}
} }
rtc_static_library("videoframebuffer_objc") { rtc_static_library("videoframebuffer_objc") {
@ -316,11 +305,6 @@ if (is_ios || is_mac) {
"..:common_objc", "..:common_objc",
":used_from_extension", ":used_from_extension",
] ]
if (!build_with_chromium && is_clang) {
# Suppress warnings from the Chromium Clang plugin
# (bugs.webrtc.org/163).
suppressed_configs += [ "//build/config/clang:find_bad_constructs" ]
}
} }
rtc_static_library("ui_objc") { rtc_static_library("ui_objc") {
@ -442,12 +426,6 @@ if (is_ios || is_mac) {
public_configs = [ ":common_config_objc" ] public_configs = [ ":common_config_objc" ]
if (!build_with_chromium && is_clang) {
# Suppress warnings from the Chromium Clang plugin
# (bugs.webrtc.org/163).
suppressed_configs += [ "//build/config/clang:find_bad_constructs" ]
}
deps = [ deps = [
":common_objc", ":common_objc",
":video_objc", ":video_objc",
@ -519,12 +497,6 @@ if (is_ios || is_mac) {
"objc/Framework/Headers/WebRTC/RTCVideoEncoderVP8.h", "objc/Framework/Headers/WebRTC/RTCVideoEncoderVP8.h",
] ]
if (!build_with_chromium && is_clang) {
# Suppress warnings from the Chromium Clang plugin
# (bugs.webrtc.org/163).
suppressed_configs += [ "//build/config/clang:find_bad_constructs" ]
}
deps = [ deps = [
":wrapped_native_codec_objc", ":wrapped_native_codec_objc",
"../modules/video_coding:webrtc_vp8", "../modules/video_coding:webrtc_vp8",
@ -541,12 +513,6 @@ if (is_ios || is_mac) {
"objc/Framework/Headers/WebRTC/RTCVideoEncoderVP9.h", "objc/Framework/Headers/WebRTC/RTCVideoEncoderVP9.h",
] ]
if (!build_with_chromium && is_clang) {
# Suppress warnings from the Chromium Clang plugin
# (bugs.webrtc.org/163).
suppressed_configs += [ "//build/config/clang:find_bad_constructs" ]
}
deps = [ deps = [
":wrapped_native_codec_objc", ":wrapped_native_codec_objc",
"../modules/video_coding:webrtc_vp9", "../modules/video_coding:webrtc_vp9",
@ -575,12 +541,6 @@ if (is_ios || is_mac) {
public_configs = [ ":common_config_objc" ] public_configs = [ ":common_config_objc" ]
if (!build_with_chromium && is_clang) {
# Suppress warnings from the Chromium Clang plugin
# (bugs.webrtc.org/163).
suppressed_configs += [ "//build/config/clang:find_bad_constructs" ]
}
deps = [ deps = [
":native_api", ":native_api",
":native_video", ":native_video",
@ -596,11 +556,6 @@ if (is_ios || is_mac) {
"objc/Framework/Classes/PeerConnection/RTCMediaConstraints.mm", "objc/Framework/Classes/PeerConnection/RTCMediaConstraints.mm",
"objc/Framework/Headers/WebRTC/RTCMediaConstraints.h", "objc/Framework/Headers/WebRTC/RTCMediaConstraints.h",
] ]
if (!build_with_chromium && is_clang) {
# Suppress warnings from the Chromium Clang plugin
# (bugs.webrtc.org/163).
suppressed_configs += [ "//build/config/clang:find_bad_constructs" ]
}
public_configs = [ ":common_config_objc" ] public_configs = [ ":common_config_objc" ]
deps = [ deps = [
@ -771,12 +726,6 @@ if (is_ios || is_mac) {
] ]
public_configs = [ ":common_config_objc" ] public_configs = [ ":common_config_objc" ]
if (!build_with_chromium && is_clang) {
# Suppress warnings from the Chromium Clang plugin
# (bugs.webrtc.org/163).
suppressed_configs += [ "//build/config/clang:find_bad_constructs" ]
}
deps = [ deps = [
":common_objc", ":common_objc",
":mediaconstraints_objc", ":mediaconstraints_objc",
@ -879,12 +828,6 @@ if (is_ios || is_mac) {
] ]
include_dirs += [ "$root_out_dir/WebRTC.framework/Headers/" ] include_dirs += [ "$root_out_dir/WebRTC.framework/Headers/" ]
if (!build_with_chromium && is_clang) {
# Suppress warnings from the Chromium Clang plugin
# (bugs.webrtc.org/163).
suppressed_configs += [ "//build/config/clang:find_bad_constructs" ]
}
} }
bundle_data("sdk_unittests_bundle_data") { bundle_data("sdk_unittests_bundle_data") {
@ -915,12 +858,6 @@ if (is_ios || is_mac) {
":sdk_unittests_sources", ":sdk_unittests_sources",
] ]
ldflags = [ "-all_load" ] ldflags = [ "-all_load" ]
if (is_clang) {
# Suppress warnings from the Chromium Clang plugin
# (bugs.webrtc.org/163).
suppressed_configs += [ "//build/config/clang:find_bad_constructs" ]
}
} }
} }
@ -986,12 +923,6 @@ if (is_ios || is_mac) {
sources += [ "objc/Framework/UnitTests/RTCAudioSessionTest.mm" ] sources += [ "objc/Framework/UnitTests/RTCAudioSessionTest.mm" ]
deps += [ ":audio_objc" ] deps += [ ":audio_objc" ]
} }
if (!build_with_chromium && is_clang) {
# Suppress warnings from the Chromium Clang plugin
# (bugs.webrtc.org/163).
suppressed_configs += [ "//build/config/clang:find_bad_constructs" ]
}
} }
} }
@ -1111,12 +1042,6 @@ if (is_ios || is_mac) {
] ]
public_configs = [ ":common_config_objc" ] public_configs = [ ":common_config_objc" ]
if (!build_with_chromium && is_clang) {
# Suppress warnings from the Chromium Clang plugin
# (bugs.webrtc.org/163).
configs -= [ "//build/config/clang:find_bad_constructs" ]
}
} }
} }
@ -1131,12 +1056,6 @@ if (is_ios || is_mac) {
configs += [ "..:common_objc" ] configs += [ "..:common_objc" ]
public_configs = [ ":common_config_objc" ] public_configs = [ ":common_config_objc" ]
if (!build_with_chromium && is_clang) {
# Suppress warnings from the Chromium Clang plugin
# (bugs.webrtc.org/163).
suppressed_configs += [ "//build/config/clang:find_bad_constructs" ]
}
deps = [ deps = [
":common_objc", ":common_objc",
":videocodec_objc", ":videocodec_objc",
@ -1171,12 +1090,6 @@ if (is_ios || is_mac) {
public_configs = [ ":common_config_objc" ] public_configs = [ ":common_config_objc" ]
if (!build_with_chromium && is_clang) {
# Suppress warnings from the Chromium Clang plugin
# (bugs.webrtc.org/163).
suppressed_configs += [ "//build/config/clang:find_bad_constructs" ]
}
deps = [ deps = [
":native_video", ":native_video",
":videocapturebase_objc", ":videocapturebase_objc",
@ -1254,11 +1167,6 @@ if (is_ios || is_mac) {
"../rtc_base:checks", "../rtc_base:checks",
"../rtc_base:rtc_base_approved", "../rtc_base:rtc_base_approved",
] ]
if (!build_with_chromium && is_clang) {
# Suppress warnings from the Chromium Clang plugin
# (bugs.webrtc.org/163).
suppressed_configs += [ "//build/config/clang:find_bad_constructs" ]
}
} }
rtc_static_library("videotoolbox_objc") { rtc_static_library("videotoolbox_objc") {
@ -1309,12 +1217,6 @@ if (is_ios || is_mac) {
"CoreVideo.framework", "CoreVideo.framework",
"VideoToolbox.framework", "VideoToolbox.framework",
] ]
if (!build_with_chromium && is_clang) {
# Suppress warnings from the Chromium Clang plugin
# (bugs.webrtc.org/163).
suppressed_configs += [ "//build/config/clang:find_bad_constructs" ]
}
} }
} }
} }

View File

@ -18,12 +18,12 @@ namespace webrtc {
class MediaConstraints : public MediaConstraintsInterface { class MediaConstraints : public MediaConstraintsInterface {
public: public:
virtual ~MediaConstraints(); ~MediaConstraints() override;
MediaConstraints(); MediaConstraints();
MediaConstraints(const MediaConstraintsInterface::Constraints& mandatory, MediaConstraints(const MediaConstraintsInterface::Constraints& mandatory,
const MediaConstraintsInterface::Constraints& optional); const MediaConstraintsInterface::Constraints& optional);
virtual const Constraints& GetMandatory() const; const Constraints& GetMandatory() const override;
virtual const Constraints& GetOptional() const; const Constraints& GetOptional() const override;
private: private:
MediaConstraintsInterface::Constraints mandatory_; MediaConstraintsInterface::Constraints mandatory_;

View File

@ -23,7 +23,7 @@ namespace webrtc {
class PeerConnectionDelegateAdapter : public PeerConnectionObserver { class PeerConnectionDelegateAdapter : public PeerConnectionObserver {
public: public:
PeerConnectionDelegateAdapter(RTCPeerConnection *peerConnection); PeerConnectionDelegateAdapter(RTCPeerConnection *peerConnection);
virtual ~PeerConnectionDelegateAdapter(); ~PeerConnectionDelegateAdapter() override;
void OnSignalingChange(PeerConnectionInterface::SignalingState new_state) override; void OnSignalingChange(PeerConnectionInterface::SignalingState new_state) override;

View File

@ -25,9 +25,7 @@ class StatsObserverAdapter : public StatsObserver {
completion_handler_ = completionHandler; completion_handler_ = completionHandler;
} }
~StatsObserverAdapter() { ~StatsObserverAdapter() override { completion_handler_ = nil; }
completion_handler_ = nil;
}
void OnComplete(const StatsReports& reports) override { void OnComplete(const StatsReports& reports) override {
RTC_DCHECK(completion_handler_); RTC_DCHECK(completion_handler_);

View File

@ -46,9 +46,7 @@ class CreateSessionDescriptionObserverAdapter
completion_handler_ = completionHandler; completion_handler_ = completionHandler;
} }
~CreateSessionDescriptionObserverAdapter() { ~CreateSessionDescriptionObserverAdapter() override { completion_handler_ = nil; }
completion_handler_ = nil;
}
void OnSuccess(SessionDescriptionInterface *desc) override { void OnSuccess(SessionDescriptionInterface *desc) override {
RTC_DCHECK(completion_handler_); RTC_DCHECK(completion_handler_);
@ -86,9 +84,7 @@ class SetSessionDescriptionObserverAdapter :
completion_handler_ = completionHandler; completion_handler_ = completionHandler;
} }
~SetSessionDescriptionObserverAdapter() { ~SetSessionDescriptionObserverAdapter() override { completion_handler_ = nil; }
completion_handler_ = nil;
}
void OnSuccess() override { void OnSuccess() override {
RTC_DCHECK(completion_handler_); RTC_DCHECK(completion_handler_);

View File

@ -286,6 +286,8 @@ AnnexBBufferReader::AnnexBBufferReader(const uint8_t* annexb_buffer,
offset_ = offsets_.begin(); offset_ = offsets_.begin();
} }
AnnexBBufferReader::~AnnexBBufferReader() = default;
bool AnnexBBufferReader::ReadNalu(const uint8_t** out_nalu, bool AnnexBBufferReader::ReadNalu(const uint8_t** out_nalu,
size_t* out_length) { size_t* out_length) {
RTC_DCHECK(out_nalu); RTC_DCHECK(out_nalu);

View File

@ -57,7 +57,7 @@ CMVideoFormatDescriptionRef CreateVideoFormatDescription(
class AnnexBBufferReader final { class AnnexBBufferReader final {
public: public:
AnnexBBufferReader(const uint8_t* annexb_buffer, size_t length); AnnexBBufferReader(const uint8_t* annexb_buffer, size_t length);
~AnnexBBufferReader() {} ~AnnexBBufferReader();
AnnexBBufferReader(const AnnexBBufferReader& other) = delete; AnnexBBufferReader(const AnnexBBufferReader& other) = delete;
void operator=(const AnnexBBufferReader& other) = delete; void operator=(const AnnexBBufferReader& other) = delete;

View File

@ -49,7 +49,7 @@ class AudioDeviceIOS : public AudioDeviceGeneric,
public rtc::MessageHandler { public rtc::MessageHandler {
public: public:
AudioDeviceIOS(); AudioDeviceIOS();
~AudioDeviceIOS(); ~AudioDeviceIOS() override;
void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) override; void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) override;
@ -65,11 +65,11 @@ class AudioDeviceIOS : public AudioDeviceGeneric,
int32_t StartPlayout() override; int32_t StartPlayout() override;
int32_t StopPlayout() override; int32_t StopPlayout() override;
bool Playing() const override { return playing_; } bool Playing() const override;
int32_t StartRecording() override; int32_t StartRecording() override;
int32_t StopRecording() override; int32_t StopRecording() override;
bool Recording() const override { return recording_; } bool Recording() const override;
// These methods returns hard-coded delay values and not dynamic delay // These methods returns hard-coded delay values and not dynamic delay
// estimates. The reason is that iOS supports a built-in AEC and the WebRTC // estimates. The reason is that iOS supports a built-in AEC and the WebRTC

View File

@ -271,6 +271,10 @@ int32_t AudioDeviceIOS::StopPlayout() {
return 0; return 0;
} }
bool AudioDeviceIOS::Playing() const {
return playing_;
}
int32_t AudioDeviceIOS::StartRecording() { int32_t AudioDeviceIOS::StartRecording() {
LOGI() << "StartRecording"; LOGI() << "StartRecording";
RTC_DCHECK_RUN_ON(&thread_checker_); RTC_DCHECK_RUN_ON(&thread_checker_);
@ -305,6 +309,10 @@ int32_t AudioDeviceIOS::StopRecording() {
return 0; return 0;
} }
bool AudioDeviceIOS::Recording() const {
return recording_;
}
int32_t AudioDeviceIOS::PlayoutDelay(uint16_t& delayMS) const { int32_t AudioDeviceIOS::PlayoutDelay(uint16_t& delayMS) const {
delayMS = kFixedPlayoutDelayEstimate; delayMS = kFixedPlayoutDelayEstimate;
return 0; return 0;

View File

@ -21,7 +21,7 @@ namespace webrtc {
class ObjCVideoDecoderFactory : public VideoDecoderFactory { class ObjCVideoDecoderFactory : public VideoDecoderFactory {
public: public:
explicit ObjCVideoDecoderFactory(id<RTCVideoDecoderFactory>); explicit ObjCVideoDecoderFactory(id<RTCVideoDecoderFactory>);
~ObjCVideoDecoderFactory(); ~ObjCVideoDecoderFactory() override;
id<RTCVideoDecoderFactory> wrapped_decoder_factory() const; id<RTCVideoDecoderFactory> wrapped_decoder_factory() const;

View File

@ -22,7 +22,7 @@ namespace webrtc {
class ObjCVideoEncoderFactory : public VideoEncoderFactory { class ObjCVideoEncoderFactory : public VideoEncoderFactory {
public: public:
explicit ObjCVideoEncoderFactory(id<RTCVideoEncoderFactory>); explicit ObjCVideoEncoderFactory(id<RTCVideoEncoderFactory>);
~ObjCVideoEncoderFactory(); ~ObjCVideoEncoderFactory() override;
id<RTCVideoEncoderFactory> wrapped_encoder_factory() const; id<RTCVideoEncoderFactory> wrapped_encoder_factory() const;

View File

@ -31,16 +31,16 @@ class ObjCVideoTrackSource : public rtc::AdaptedVideoTrackSource {
// This class can not be used for implementing screen casting. Hopefully, this // This class can not be used for implementing screen casting. Hopefully, this
// function will be removed before we add that to iOS/Mac. // function will be removed before we add that to iOS/Mac.
bool is_screencast() const override { return false; } bool is_screencast() const override;
// Indicates that the encoder should denoise video before encoding it. // Indicates that the encoder should denoise video before encoding it.
// If it is not set, the default configuration is used which is different // If it is not set, the default configuration is used which is different
// depending on video codec. // depending on video codec.
absl::optional<bool> needs_denoising() const override { return false; } absl::optional<bool> needs_denoising() const override;
SourceState state() const override { return SourceState::kLive; } SourceState state() const override;
bool remote() const override { return false; } bool remote() const override;
void OnCapturedFrame(RTCVideoFrame* frame); void OnCapturedFrame(RTCVideoFrame* frame);

View File

@ -39,6 +39,22 @@ ObjCVideoTrackSource::ObjCVideoTrackSource(RTCObjCVideoSourceAdapter *adapter) :
adapter_.objCVideoTrackSource = this; adapter_.objCVideoTrackSource = this;
} }
bool ObjCVideoTrackSource::is_screencast() const {
return false;
}
absl::optional<bool> ObjCVideoTrackSource::needs_denoising() const {
return false;
}
MediaSourceInterface::SourceState ObjCVideoTrackSource::state() const {
return SourceState::kLive;
}
bool ObjCVideoTrackSource::remote() const {
return false;
}
void ObjCVideoTrackSource::OnOutputFormatRequest(int width, int height, int fps) { void ObjCVideoTrackSource::OnOutputFormatRequest(int width, int height, int fps) {
cricket::VideoFormat format(width, height, cricket::VideoFormat::FpsToInterval(fps), 0); cricket::VideoFormat format(width, height, cricket::VideoFormat::FpsToInterval(fps), 0);
video_adapter()->OnOutputFormatRequest(format); video_adapter()->OnOutputFormatRequest(format);

View File

@ -31,7 +31,7 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface<webrtc::VideoFrame>
public: public:
ObjCCallbackVideoSink(VideoSinkCallback callback) : callback_(callback) {} ObjCCallbackVideoSink(VideoSinkCallback callback) : callback_(callback) {}
virtual void OnFrame(const webrtc::VideoFrame &frame) { void OnFrame(const webrtc::VideoFrame &frame) override {
callback_(NativeToObjCVideoFrame(frame)); callback_(NativeToObjCVideoFrame(frame));
} }

View File

@ -43,7 +43,7 @@ typedef int32_t(^RecordedDataIsAvailableBlock)(const void* audioSamples,
class MockAudioTransport : public webrtc::AudioTransport { class MockAudioTransport : public webrtc::AudioTransport {
public: public:
MockAudioTransport() {} MockAudioTransport() {}
~MockAudioTransport() {} ~MockAudioTransport() override {}
void expectNeedMorePlayData(NeedMorePlayDataBlock block) { void expectNeedMorePlayData(NeedMorePlayDataBlock block) {
needMorePlayDataBlock = block; needMorePlayDataBlock = block;
@ -60,7 +60,7 @@ public:
void* audioSamples, void* audioSamples,
size_t& nSamplesOut, size_t& nSamplesOut,
int64_t* elapsed_time_ms, int64_t* elapsed_time_ms,
int64_t* ntp_time_ms) { int64_t* ntp_time_ms) override {
return needMorePlayDataBlock(nSamples, return needMorePlayDataBlock(nSamples,
nBytesPerSample, nBytesPerSample,
nChannels, nChannels,
@ -80,7 +80,7 @@ public:
const int32_t clockDrift, const int32_t clockDrift,
const uint32_t currentMicLevel, const uint32_t currentMicLevel,
const bool keyPressed, const bool keyPressed,
uint32_t& newMicLevel) { uint32_t& newMicLevel) override {
return recordedDataIsAvailableBlock(audioSamples, return recordedDataIsAvailableBlock(audioSamples,
nSamples, nSamples,
nBytesPerSample, nBytesPerSample,
@ -99,11 +99,9 @@ public:
size_t number_of_frames, size_t number_of_frames,
void* audio_data, void* audio_data,
int64_t* elapsed_time_ms, int64_t* elapsed_time_ms,
int64_t* ntp_time_ms) { int64_t* ntp_time_ms) override {}
} private:
private:
NeedMorePlayDataBlock needMorePlayDataBlock; NeedMorePlayDataBlock needMorePlayDataBlock;
RecordedDataIsAvailableBlock recordedDataIsAvailableBlock; RecordedDataIsAvailableBlock recordedDataIsAvailableBlock;
}; };

View File

@ -299,7 +299,7 @@ namespace webrtc {
class AudioSessionTest : public ::testing::Test { class AudioSessionTest : public ::testing::Test {
protected: protected:
void TearDown() { void TearDown() override {
RTCAudioSession *session = [RTCAudioSession sharedInstance]; RTCAudioSession *session = [RTCAudioSession sharedInstance];
for (id<RTCAudioSessionDelegate> delegate : session.delegates) { for (id<RTCAudioSessionDelegate> delegate : session.delegates) {
[session removeDelegate:delegate]; [session removeDelegate:delegate];

View File

@ -56,14 +56,14 @@ std::unique_ptr<webrtc::VideoDecoder> GetObjCDecoder(id<RTCVideoDecoderFactory>
TEST(ObjCVideoDecoderFactoryTest, InitDecodeReturnsOKOnSuccess) { TEST(ObjCVideoDecoderFactoryTest, InitDecodeReturnsOKOnSuccess) {
std::unique_ptr<webrtc::VideoDecoder> decoder = GetObjCDecoder(CreateOKDecoderFactory()); std::unique_ptr<webrtc::VideoDecoder> decoder = GetObjCDecoder(CreateOKDecoderFactory());
auto settings = new webrtc::VideoCodec(); auto* settings = new webrtc::VideoCodec();
EXPECT_EQ(decoder->InitDecode(settings, 1), WEBRTC_VIDEO_CODEC_OK); EXPECT_EQ(decoder->InitDecode(settings, 1), WEBRTC_VIDEO_CODEC_OK);
} }
TEST(ObjCVideoDecoderFactoryTest, InitDecodeReturnsErrorOnFail) { TEST(ObjCVideoDecoderFactoryTest, InitDecodeReturnsErrorOnFail) {
std::unique_ptr<webrtc::VideoDecoder> decoder = GetObjCDecoder(CreateErrorDecoderFactory()); std::unique_ptr<webrtc::VideoDecoder> decoder = GetObjCDecoder(CreateErrorDecoderFactory());
auto settings = new webrtc::VideoCodec(); auto* settings = new webrtc::VideoCodec();
EXPECT_EQ(decoder->InitDecode(settings, 1), WEBRTC_VIDEO_CODEC_ERROR); EXPECT_EQ(decoder->InitDecode(settings, 1), WEBRTC_VIDEO_CODEC_ERROR);
} }

View File

@ -58,14 +58,14 @@ std::unique_ptr<webrtc::VideoEncoder> GetObjCEncoder(id<RTCVideoEncoderFactory>
TEST(ObjCVideoEncoderFactoryTest, InitEncodeReturnsOKOnSuccess) { TEST(ObjCVideoEncoderFactoryTest, InitEncodeReturnsOKOnSuccess) {
std::unique_ptr<webrtc::VideoEncoder> encoder = GetObjCEncoder(CreateOKEncoderFactory()); std::unique_ptr<webrtc::VideoEncoder> encoder = GetObjCEncoder(CreateOKEncoderFactory());
auto settings = new webrtc::VideoCodec(); auto* settings = new webrtc::VideoCodec();
EXPECT_EQ(encoder->InitEncode(settings, 1, 0), WEBRTC_VIDEO_CODEC_OK); EXPECT_EQ(encoder->InitEncode(settings, 1, 0), WEBRTC_VIDEO_CODEC_OK);
} }
TEST(ObjCVideoEncoderFactoryTest, InitEncodeReturnsErrorOnFail) { TEST(ObjCVideoEncoderFactoryTest, InitEncodeReturnsErrorOnFail) {
std::unique_ptr<webrtc::VideoEncoder> encoder = GetObjCEncoder(CreateErrorEncoderFactory()); std::unique_ptr<webrtc::VideoEncoder> encoder = GetObjCEncoder(CreateErrorEncoderFactory());
auto settings = new webrtc::VideoCodec(); auto* settings = new webrtc::VideoCodec();
EXPECT_EQ(encoder->InitEncode(settings, 1, 0), WEBRTC_VIDEO_CODEC_ERROR); EXPECT_EQ(encoder->InitEncode(settings, 1, 0), WEBRTC_VIDEO_CODEC_ERROR);
} }