Fixing some of the issues found by clang static analyzer.

Bug: webrtc:8737
Change-Id: Ib436449c493336e7c35a72a96dc88cccdbb5bbaf
Reviewed-on: https://webrtc-review.googlesource.com/39200
Commit-Queue: Peter Hanspers <peterhanspers@webrtc.org>
Reviewed-by: Henrik Andreassson <henrika@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#21607}
This commit is contained in:
Peter Hanspers
2018-01-12 16:16:18 +01:00
committed by Commit Bot
parent 55536f27ee
commit d9b64cdd32
22 changed files with 35 additions and 37 deletions

View File

@ -58,7 +58,6 @@ static NSInteger const kARDAppEngineClientErrorBadResponse = -1;
__weak ARDAppEngineClient *weakSelf = self;
[NSURLConnection sendAsyncRequest:request
completionHandler:^(NSURLResponse *response, NSData *data, NSError *error) {
ARDAppEngineClient *strongSelf = weakSelf;
if (error) {
if (completionHandler) {
completionHandler(nil, error);
@ -102,7 +101,6 @@ static NSInteger const kARDAppEngineClientErrorBadResponse = -1;
completionHandler:^(NSURLResponse *response,
NSData *data,
NSError *error) {
ARDAppEngineClient *strongSelf = weakSelf;
if (error) {
if (completionHandler) {
completionHandler(nil, error);

View File

@ -21,7 +21,7 @@
- (instancetype)initWithCapturer:(RTCCameraVideoCapturer *)capturer
settings:(ARDSettingsModel *)settings {
if ([super init]) {
if (self = [super init]) {
_capturer = capturer;
_settings = settings;
_usingFrontCamera = YES;

View File

@ -168,8 +168,6 @@ NS_ASSUME_NONNULL_BEGIN
}
- (void)registerStoreDefaults {
NSString *defaultVideoResolutionSetting = [self defaultVideoResolutionSetting];
NSData *codecData = [NSKeyedArchiver archivedDataWithRootObject:[self defaultVideoCodecSetting]];
[ARDSettingsStore setDefaultsForVideoResolution:[self defaultVideoResolutionSetting]
videoCodec:codecData

View File

@ -42,7 +42,7 @@ using namespace webrtc::videocapturemodule;
@synthesize frameRotation = _framRotation;
- (id)initWithOwner:(VideoCaptureIos*)owner {
if (self == [super init]) {
if (self = [super init]) {
_owner = owner;
_captureSession = [[AVCaptureSession alloc] init];
#if defined(WEBRTC_IOS)

View File

@ -124,7 +124,7 @@ const char *kRTCFileLoggerRotatingLogPrefix = "rotating_log";
_logSink.reset();
}
- (NSData *)logData {
- (nullable NSData *)logData {
if (_hasStarted) {
return nil;
}

View File

@ -31,7 +31,7 @@
std::string nativeId = [NSString stdStringForString:trackId];
rtc::scoped_refptr<webrtc::AudioTrackInterface> track =
factory.nativeFactory->CreateAudioTrack(nativeId, source.nativeAudioSource);
if ([self initWithNativeTrack:track type:RTCMediaStreamTrackTypeAudio]) {
if (self = [self initWithNativeTrack:track type:RTCMediaStreamTrackTypeAudio]) {
_source = source;
}
return self;

View File

@ -62,8 +62,7 @@ NS_ASSUME_NONNULL_BEGIN
* RTCConfiguration struct representation of this RTCConfiguration. This is
* needed to pass to the underlying C++ APIs.
*/
- (webrtc::PeerConnectionInterface::RTCConfiguration *)
createNativeConfiguration;
- (nullable webrtc::PeerConnectionInterface::RTCConfiguration *)createNativeConfiguration;
- (instancetype)initWithNativeConfiguration:
(const webrtc::PeerConnectionInterface::RTCConfiguration &)config NS_DESIGNATED_INITIALIZER;

View File

@ -48,7 +48,7 @@
_frameType = static_cast<RTCFrameType>(encodedImage._frameType);
_rotation = static_cast<RTCVideoRotation>(encodedImage.rotation_);
_completeFrame = encodedImage._completeFrame;
_qp = encodedImage.qp_ == -1 ? nil : @(encodedImage.qp_);
_qp = @(encodedImage.qp_);
_contentType = (encodedImage.content_type_ == webrtc::VideoContentType::SCREENSHARE) ?
RTCVideoContentTypeScreenshare :
RTCVideoContentTypeUnspecified;

View File

@ -139,6 +139,7 @@ typedef NS_ENUM(NSInteger, RTCFileVideoCapturerStatus) {
}
if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(sampleBuffer) ||
!CMSampleBufferDataIsReady(sampleBuffer)) {
CFRelease(sampleBuffer);
[self readNextBuffer];
return;
}

View File

@ -16,9 +16,8 @@
@implementation RTCPeerConnection (DataChannel)
- (RTCDataChannel *)dataChannelForLabel:(NSString *)label
configuration:
(RTCDataChannelConfiguration *)configuration {
- (nullable RTCDataChannel *)dataChannelForLabel:(NSString *)label
configuration:(RTCDataChannelConfiguration *)configuration {
std::string labelString = [NSString stdStringForString:label];
const webrtc::DataChannelInit nativeInit =
configuration.nativeDataChannelInit;

View File

@ -58,7 +58,7 @@ void RtpReceiverDelegateAdapter::OnFirstPacketReceived(
}
}
- (RTCMediaStreamTrack *)track {
- (nullable RTCMediaStreamTrack *)track {
rtc::scoped_refptr<webrtc::MediaStreamTrackInterface> nativeTrack(
_nativeRtpReceiver->track());
if (nativeTrack) {

View File

@ -21,8 +21,7 @@ NS_ASSUME_NONNULL_BEGIN
* RTCSessionDescription object. This is needed to pass to the underlying C++
* APIs.
*/
@property(nonatomic, readonly)
webrtc::SessionDescriptionInterface *nativeDescription;
@property(nonatomic, readonly, nullable) webrtc::SessionDescriptionInterface *nativeDescription;
/**
* Initialize an RTCSessionDescription from a native

View File

@ -32,7 +32,7 @@
rtc::scoped_refptr<webrtc::VideoTrackInterface> track =
factory.nativeFactory->CreateVideoTrack(nativeId,
source.nativeVideoSource);
if ([self initWithNativeTrack:track type:RTCMediaStreamTrackTypeVideo]) {
if (self = [self initWithNativeTrack:track type:RTCMediaStreamTrackTypeVideo]) {
_source = source;
}
return self;

View File

@ -49,7 +49,7 @@
- (NSInteger)decode:(RTCEncodedImage *)encodedImage
missingFrames:(BOOL)missingFrames
fragmentationHeader:(RTCRtpFragmentationHeader *)fragmentationHeader
codecSpecificInfo:(__nullable id<RTCCodecSpecificInfo>)info
codecSpecificInfo:(nullable id<RTCCodecSpecificInfo>)info
renderTimeMs:(int64_t)renderTimeMs {
RTC_NOTREACHED();
return 0;

View File

@ -47,7 +47,7 @@
}
- (NSInteger)encode:(RTCVideoFrame *)frame
codecSpecificInfo:(id<RTCCodecSpecificInfo>)info
codecSpecificInfo:(nullable id<RTCCodecSpecificInfo>)info
frameTypes:(NSArray<NSNumber *> *)frameTypes {
RTC_NOTREACHED();
return 0;

View File

@ -22,6 +22,7 @@
#import "WebRTC/RTCVideoFrame.h"
#import "WebRTC/RTCVideoFrameBuffer.h"
#import "helpers.h"
#import "scoped_cftyperef.h"
#if defined(WEBRTC_IOS)
#import "Common/RTCUIApplicationStatusObserver.h"
@ -99,7 +100,7 @@ void decompressionOutputCallback(void *decoderRef,
- (NSInteger)decode:(RTCEncodedImage *)inputImage
missingFrames:(BOOL)missingFrames
fragmentationHeader:(RTCRtpFragmentationHeader *)fragmentationHeader
codecSpecificInfo:(__nullable id<RTCCodecSpecificInfo>)info
codecSpecificInfo:(nullable id<RTCCodecSpecificInfo>)info
renderTimeMs:(int64_t)renderTimeMs {
RTC_DCHECK(inputImage.buffer);
@ -119,19 +120,22 @@ void decompressionOutputCallback(void *decoderRef,
return WEBRTC_VIDEO_CODEC_NO_OUTPUT;
}
#endif
CMVideoFormatDescriptionRef inputFormat = nullptr;
if (webrtc::H264AnnexBBufferHasVideoFormatDescription((uint8_t *)inputImage.buffer.bytes,
inputImage.buffer.length)) {
inputFormat = webrtc::CreateVideoFormatDescription((uint8_t *)inputImage.buffer.bytes,
inputImage.buffer.length);
rtc::ScopedCFTypeRef<CMVideoFormatDescriptionRef> inputFormat =
rtc::ScopedCF(webrtc::CreateVideoFormatDescription((uint8_t *)inputImage.buffer.bytes,
inputImage.buffer.length));
if (inputFormat) {
// Check if the video format has changed, and reinitialize decoder if
// needed.
if (!CMFormatDescriptionEqual(inputFormat, _videoFormat)) {
[self setVideoFormat:inputFormat];
[self resetDecompressionSession];
if (!CMFormatDescriptionEqual(inputFormat.get(), _videoFormat)) {
[self setVideoFormat:inputFormat.get()];
int resetDecompressionSessionError = [self resetDecompressionSession];
if (resetDecompressionSessionError != WEBRTC_VIDEO_CODEC_OK) {
return resetDecompressionSessionError;
}
}
CFRelease(inputFormat);
}
}
if (!_videoFormat) {

View File

@ -340,7 +340,7 @@ CFStringRef ExtractProfile(webrtc::SdpVideoFormat videoFormat) {
}
- (NSInteger)encode:(RTCVideoFrame *)frame
codecSpecificInfo:(id<RTCCodecSpecificInfo>)codecSpecificInfo
codecSpecificInfo:(nullable id<RTCCodecSpecificInfo>)codecSpecificInfo
frameTypes:(NSArray<NSNumber *> *)frameTypes {
RTC_DCHECK_EQ(frame.width, _width);
RTC_DCHECK_EQ(frame.height, _height);

View File

@ -69,7 +69,7 @@ RTC_EXPORT
// Returns the current contents of the logs, or nil if start has been called
// without a stop.
- (NSData *)logData;
- (nullable NSData *)logData;
@end

View File

@ -212,8 +212,8 @@ RTC_EXPORT
@interface RTCPeerConnection (DataChannel)
/** Create a new data channel with the given label and configuration. */
- (RTCDataChannel *)dataChannelForLabel:(NSString *)label
configuration:(RTCDataChannelConfiguration *)configuration;
- (nullable RTCDataChannel *)dataChannelForLabel:(NSString *)label
configuration:(RTCDataChannelConfiguration *)configuration;
@end

View File

@ -65,7 +65,7 @@ RTC_EXPORT
* RTCMediaStreamTrack. Use isEqual: instead of == to compare
* RTCMediaStreamTrack instances.
*/
@property(nonatomic, readonly) RTCMediaStreamTrack *track;
@property(nonatomic, readonly, nullable) RTCMediaStreamTrack *track;
/** The delegate for this RtpReceiver. */
@property(nonatomic, weak) id<RTCRtpReceiverDelegate> delegate;

View File

@ -148,7 +148,7 @@ RTC_EXPORT
numberOfCores:(int)numberOfCores;
- (NSInteger)releaseEncoder;
- (NSInteger)encode:(RTCVideoFrame *)frame
codecSpecificInfo:(id<RTCCodecSpecificInfo>)info
codecSpecificInfo:(nullable id<RTCCodecSpecificInfo>)info
frameTypes:(NSArray<NSNumber *> *)frameTypes;
- (int)setBitrate:(uint32_t)bitrateKbit framerate:(uint32_t)framerate;
- (NSString *)implementationName;
@ -171,7 +171,7 @@ RTC_EXPORT
- (NSInteger)decode:(RTCEncodedImage *)encodedImage
missingFrames:(BOOL)missingFrames
fragmentationHeader:(RTCRtpFragmentationHeader *)fragmentationHeader
codecSpecificInfo:(__nullable id<RTCCodecSpecificInfo>)info
codecSpecificInfo:(nullable id<RTCCodecSpecificInfo>)info
renderTimeMs:(int64_t)renderTimeMs;
- (NSString *)implementationName;

View File

@ -19,7 +19,7 @@ NS_ASSUME_NONNULL_BEGIN
RTC_EXPORT
@protocol RTCVideoEncoderFactory <NSObject>
- (id<RTCVideoEncoder>)createEncoder:(RTCVideoCodecInfo *)info;
- (nullable id<RTCVideoEncoder>)createEncoder:(RTCVideoCodecInfo *)info;
- (NSArray<RTCVideoCodecInfo *> *)supportedCodecs; // TODO(andersc): "supportedFormats" instead?
@end
@ -28,7 +28,7 @@ RTC_EXPORT
RTC_EXPORT
@protocol RTCVideoDecoderFactory <NSObject>
- (id<RTCVideoDecoder>)createDecoder:(RTCVideoCodecInfo *)info;
- (nullable id<RTCVideoDecoder>)createDecoder:(RTCVideoCodecInfo *)info;
- (NSArray<RTCVideoCodecInfo *> *)supportedCodecs; // TODO(andersc): "supportedFormats" instead?
@end