Fixing some of the issues found by clang static analyzer.
Bug: webrtc:8737 Change-Id: Ib436449c493336e7c35a72a96dc88cccdbb5bbaf Reviewed-on: https://webrtc-review.googlesource.com/39200 Commit-Queue: Peter Hanspers <peterhanspers@webrtc.org> Reviewed-by: Henrik Andreassson <henrika@webrtc.org> Cr-Commit-Position: refs/heads/master@{#21607}
This commit is contained in:
committed by
Commit Bot
parent
55536f27ee
commit
d9b64cdd32
@ -58,7 +58,6 @@ static NSInteger const kARDAppEngineClientErrorBadResponse = -1;
|
|||||||
__weak ARDAppEngineClient *weakSelf = self;
|
__weak ARDAppEngineClient *weakSelf = self;
|
||||||
[NSURLConnection sendAsyncRequest:request
|
[NSURLConnection sendAsyncRequest:request
|
||||||
completionHandler:^(NSURLResponse *response, NSData *data, NSError *error) {
|
completionHandler:^(NSURLResponse *response, NSData *data, NSError *error) {
|
||||||
ARDAppEngineClient *strongSelf = weakSelf;
|
|
||||||
if (error) {
|
if (error) {
|
||||||
if (completionHandler) {
|
if (completionHandler) {
|
||||||
completionHandler(nil, error);
|
completionHandler(nil, error);
|
||||||
@ -102,7 +101,6 @@ static NSInteger const kARDAppEngineClientErrorBadResponse = -1;
|
|||||||
completionHandler:^(NSURLResponse *response,
|
completionHandler:^(NSURLResponse *response,
|
||||||
NSData *data,
|
NSData *data,
|
||||||
NSError *error) {
|
NSError *error) {
|
||||||
ARDAppEngineClient *strongSelf = weakSelf;
|
|
||||||
if (error) {
|
if (error) {
|
||||||
if (completionHandler) {
|
if (completionHandler) {
|
||||||
completionHandler(nil, error);
|
completionHandler(nil, error);
|
||||||
|
|||||||
@ -21,7 +21,7 @@
|
|||||||
|
|
||||||
- (instancetype)initWithCapturer:(RTCCameraVideoCapturer *)capturer
|
- (instancetype)initWithCapturer:(RTCCameraVideoCapturer *)capturer
|
||||||
settings:(ARDSettingsModel *)settings {
|
settings:(ARDSettingsModel *)settings {
|
||||||
if ([super init]) {
|
if (self = [super init]) {
|
||||||
_capturer = capturer;
|
_capturer = capturer;
|
||||||
_settings = settings;
|
_settings = settings;
|
||||||
_usingFrontCamera = YES;
|
_usingFrontCamera = YES;
|
||||||
|
|||||||
@ -168,8 +168,6 @@ NS_ASSUME_NONNULL_BEGIN
|
|||||||
}
|
}
|
||||||
|
|
||||||
- (void)registerStoreDefaults {
|
- (void)registerStoreDefaults {
|
||||||
NSString *defaultVideoResolutionSetting = [self defaultVideoResolutionSetting];
|
|
||||||
|
|
||||||
NSData *codecData = [NSKeyedArchiver archivedDataWithRootObject:[self defaultVideoCodecSetting]];
|
NSData *codecData = [NSKeyedArchiver archivedDataWithRootObject:[self defaultVideoCodecSetting]];
|
||||||
[ARDSettingsStore setDefaultsForVideoResolution:[self defaultVideoResolutionSetting]
|
[ARDSettingsStore setDefaultsForVideoResolution:[self defaultVideoResolutionSetting]
|
||||||
videoCodec:codecData
|
videoCodec:codecData
|
||||||
|
|||||||
@ -42,7 +42,7 @@ using namespace webrtc::videocapturemodule;
|
|||||||
@synthesize frameRotation = _framRotation;
|
@synthesize frameRotation = _framRotation;
|
||||||
|
|
||||||
- (id)initWithOwner:(VideoCaptureIos*)owner {
|
- (id)initWithOwner:(VideoCaptureIos*)owner {
|
||||||
if (self == [super init]) {
|
if (self = [super init]) {
|
||||||
_owner = owner;
|
_owner = owner;
|
||||||
_captureSession = [[AVCaptureSession alloc] init];
|
_captureSession = [[AVCaptureSession alloc] init];
|
||||||
#if defined(WEBRTC_IOS)
|
#if defined(WEBRTC_IOS)
|
||||||
|
|||||||
@ -124,7 +124,7 @@ const char *kRTCFileLoggerRotatingLogPrefix = "rotating_log";
|
|||||||
_logSink.reset();
|
_logSink.reset();
|
||||||
}
|
}
|
||||||
|
|
||||||
- (NSData *)logData {
|
- (nullable NSData *)logData {
|
||||||
if (_hasStarted) {
|
if (_hasStarted) {
|
||||||
return nil;
|
return nil;
|
||||||
}
|
}
|
||||||
|
|||||||
@ -31,7 +31,7 @@
|
|||||||
std::string nativeId = [NSString stdStringForString:trackId];
|
std::string nativeId = [NSString stdStringForString:trackId];
|
||||||
rtc::scoped_refptr<webrtc::AudioTrackInterface> track =
|
rtc::scoped_refptr<webrtc::AudioTrackInterface> track =
|
||||||
factory.nativeFactory->CreateAudioTrack(nativeId, source.nativeAudioSource);
|
factory.nativeFactory->CreateAudioTrack(nativeId, source.nativeAudioSource);
|
||||||
if ([self initWithNativeTrack:track type:RTCMediaStreamTrackTypeAudio]) {
|
if (self = [self initWithNativeTrack:track type:RTCMediaStreamTrackTypeAudio]) {
|
||||||
_source = source;
|
_source = source;
|
||||||
}
|
}
|
||||||
return self;
|
return self;
|
||||||
|
|||||||
@ -62,8 +62,7 @@ NS_ASSUME_NONNULL_BEGIN
|
|||||||
* RTCConfiguration struct representation of this RTCConfiguration. This is
|
* RTCConfiguration struct representation of this RTCConfiguration. This is
|
||||||
* needed to pass to the underlying C++ APIs.
|
* needed to pass to the underlying C++ APIs.
|
||||||
*/
|
*/
|
||||||
- (webrtc::PeerConnectionInterface::RTCConfiguration *)
|
- (nullable webrtc::PeerConnectionInterface::RTCConfiguration *)createNativeConfiguration;
|
||||||
createNativeConfiguration;
|
|
||||||
|
|
||||||
- (instancetype)initWithNativeConfiguration:
|
- (instancetype)initWithNativeConfiguration:
|
||||||
(const webrtc::PeerConnectionInterface::RTCConfiguration &)config NS_DESIGNATED_INITIALIZER;
|
(const webrtc::PeerConnectionInterface::RTCConfiguration &)config NS_DESIGNATED_INITIALIZER;
|
||||||
|
|||||||
@ -48,7 +48,7 @@
|
|||||||
_frameType = static_cast<RTCFrameType>(encodedImage._frameType);
|
_frameType = static_cast<RTCFrameType>(encodedImage._frameType);
|
||||||
_rotation = static_cast<RTCVideoRotation>(encodedImage.rotation_);
|
_rotation = static_cast<RTCVideoRotation>(encodedImage.rotation_);
|
||||||
_completeFrame = encodedImage._completeFrame;
|
_completeFrame = encodedImage._completeFrame;
|
||||||
_qp = encodedImage.qp_ == -1 ? nil : @(encodedImage.qp_);
|
_qp = @(encodedImage.qp_);
|
||||||
_contentType = (encodedImage.content_type_ == webrtc::VideoContentType::SCREENSHARE) ?
|
_contentType = (encodedImage.content_type_ == webrtc::VideoContentType::SCREENSHARE) ?
|
||||||
RTCVideoContentTypeScreenshare :
|
RTCVideoContentTypeScreenshare :
|
||||||
RTCVideoContentTypeUnspecified;
|
RTCVideoContentTypeUnspecified;
|
||||||
|
|||||||
@ -139,6 +139,7 @@ typedef NS_ENUM(NSInteger, RTCFileVideoCapturerStatus) {
|
|||||||
}
|
}
|
||||||
if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(sampleBuffer) ||
|
if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(sampleBuffer) ||
|
||||||
!CMSampleBufferDataIsReady(sampleBuffer)) {
|
!CMSampleBufferDataIsReady(sampleBuffer)) {
|
||||||
|
CFRelease(sampleBuffer);
|
||||||
[self readNextBuffer];
|
[self readNextBuffer];
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|||||||
@ -16,9 +16,8 @@
|
|||||||
|
|
||||||
@implementation RTCPeerConnection (DataChannel)
|
@implementation RTCPeerConnection (DataChannel)
|
||||||
|
|
||||||
- (RTCDataChannel *)dataChannelForLabel:(NSString *)label
|
- (nullable RTCDataChannel *)dataChannelForLabel:(NSString *)label
|
||||||
configuration:
|
configuration:(RTCDataChannelConfiguration *)configuration {
|
||||||
(RTCDataChannelConfiguration *)configuration {
|
|
||||||
std::string labelString = [NSString stdStringForString:label];
|
std::string labelString = [NSString stdStringForString:label];
|
||||||
const webrtc::DataChannelInit nativeInit =
|
const webrtc::DataChannelInit nativeInit =
|
||||||
configuration.nativeDataChannelInit;
|
configuration.nativeDataChannelInit;
|
||||||
|
|||||||
@ -58,7 +58,7 @@ void RtpReceiverDelegateAdapter::OnFirstPacketReceived(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
- (RTCMediaStreamTrack *)track {
|
- (nullable RTCMediaStreamTrack *)track {
|
||||||
rtc::scoped_refptr<webrtc::MediaStreamTrackInterface> nativeTrack(
|
rtc::scoped_refptr<webrtc::MediaStreamTrackInterface> nativeTrack(
|
||||||
_nativeRtpReceiver->track());
|
_nativeRtpReceiver->track());
|
||||||
if (nativeTrack) {
|
if (nativeTrack) {
|
||||||
|
|||||||
@ -21,8 +21,7 @@ NS_ASSUME_NONNULL_BEGIN
|
|||||||
* RTCSessionDescription object. This is needed to pass to the underlying C++
|
* RTCSessionDescription object. This is needed to pass to the underlying C++
|
||||||
* APIs.
|
* APIs.
|
||||||
*/
|
*/
|
||||||
@property(nonatomic, readonly)
|
@property(nonatomic, readonly, nullable) webrtc::SessionDescriptionInterface *nativeDescription;
|
||||||
webrtc::SessionDescriptionInterface *nativeDescription;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Initialize an RTCSessionDescription from a native
|
* Initialize an RTCSessionDescription from a native
|
||||||
|
|||||||
@ -32,7 +32,7 @@
|
|||||||
rtc::scoped_refptr<webrtc::VideoTrackInterface> track =
|
rtc::scoped_refptr<webrtc::VideoTrackInterface> track =
|
||||||
factory.nativeFactory->CreateVideoTrack(nativeId,
|
factory.nativeFactory->CreateVideoTrack(nativeId,
|
||||||
source.nativeVideoSource);
|
source.nativeVideoSource);
|
||||||
if ([self initWithNativeTrack:track type:RTCMediaStreamTrackTypeVideo]) {
|
if (self = [self initWithNativeTrack:track type:RTCMediaStreamTrackTypeVideo]) {
|
||||||
_source = source;
|
_source = source;
|
||||||
}
|
}
|
||||||
return self;
|
return self;
|
||||||
|
|||||||
@ -49,7 +49,7 @@
|
|||||||
- (NSInteger)decode:(RTCEncodedImage *)encodedImage
|
- (NSInteger)decode:(RTCEncodedImage *)encodedImage
|
||||||
missingFrames:(BOOL)missingFrames
|
missingFrames:(BOOL)missingFrames
|
||||||
fragmentationHeader:(RTCRtpFragmentationHeader *)fragmentationHeader
|
fragmentationHeader:(RTCRtpFragmentationHeader *)fragmentationHeader
|
||||||
codecSpecificInfo:(__nullable id<RTCCodecSpecificInfo>)info
|
codecSpecificInfo:(nullable id<RTCCodecSpecificInfo>)info
|
||||||
renderTimeMs:(int64_t)renderTimeMs {
|
renderTimeMs:(int64_t)renderTimeMs {
|
||||||
RTC_NOTREACHED();
|
RTC_NOTREACHED();
|
||||||
return 0;
|
return 0;
|
||||||
|
|||||||
@ -47,7 +47,7 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
- (NSInteger)encode:(RTCVideoFrame *)frame
|
- (NSInteger)encode:(RTCVideoFrame *)frame
|
||||||
codecSpecificInfo:(id<RTCCodecSpecificInfo>)info
|
codecSpecificInfo:(nullable id<RTCCodecSpecificInfo>)info
|
||||||
frameTypes:(NSArray<NSNumber *> *)frameTypes {
|
frameTypes:(NSArray<NSNumber *> *)frameTypes {
|
||||||
RTC_NOTREACHED();
|
RTC_NOTREACHED();
|
||||||
return 0;
|
return 0;
|
||||||
|
|||||||
@ -22,6 +22,7 @@
|
|||||||
#import "WebRTC/RTCVideoFrame.h"
|
#import "WebRTC/RTCVideoFrame.h"
|
||||||
#import "WebRTC/RTCVideoFrameBuffer.h"
|
#import "WebRTC/RTCVideoFrameBuffer.h"
|
||||||
#import "helpers.h"
|
#import "helpers.h"
|
||||||
|
#import "scoped_cftyperef.h"
|
||||||
|
|
||||||
#if defined(WEBRTC_IOS)
|
#if defined(WEBRTC_IOS)
|
||||||
#import "Common/RTCUIApplicationStatusObserver.h"
|
#import "Common/RTCUIApplicationStatusObserver.h"
|
||||||
@ -99,7 +100,7 @@ void decompressionOutputCallback(void *decoderRef,
|
|||||||
- (NSInteger)decode:(RTCEncodedImage *)inputImage
|
- (NSInteger)decode:(RTCEncodedImage *)inputImage
|
||||||
missingFrames:(BOOL)missingFrames
|
missingFrames:(BOOL)missingFrames
|
||||||
fragmentationHeader:(RTCRtpFragmentationHeader *)fragmentationHeader
|
fragmentationHeader:(RTCRtpFragmentationHeader *)fragmentationHeader
|
||||||
codecSpecificInfo:(__nullable id<RTCCodecSpecificInfo>)info
|
codecSpecificInfo:(nullable id<RTCCodecSpecificInfo>)info
|
||||||
renderTimeMs:(int64_t)renderTimeMs {
|
renderTimeMs:(int64_t)renderTimeMs {
|
||||||
RTC_DCHECK(inputImage.buffer);
|
RTC_DCHECK(inputImage.buffer);
|
||||||
|
|
||||||
@ -119,19 +120,22 @@ void decompressionOutputCallback(void *decoderRef,
|
|||||||
return WEBRTC_VIDEO_CODEC_NO_OUTPUT;
|
return WEBRTC_VIDEO_CODEC_NO_OUTPUT;
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
CMVideoFormatDescriptionRef inputFormat = nullptr;
|
|
||||||
if (webrtc::H264AnnexBBufferHasVideoFormatDescription((uint8_t *)inputImage.buffer.bytes,
|
if (webrtc::H264AnnexBBufferHasVideoFormatDescription((uint8_t *)inputImage.buffer.bytes,
|
||||||
inputImage.buffer.length)) {
|
inputImage.buffer.length)) {
|
||||||
inputFormat = webrtc::CreateVideoFormatDescription((uint8_t *)inputImage.buffer.bytes,
|
rtc::ScopedCFTypeRef<CMVideoFormatDescriptionRef> inputFormat =
|
||||||
inputImage.buffer.length);
|
rtc::ScopedCF(webrtc::CreateVideoFormatDescription((uint8_t *)inputImage.buffer.bytes,
|
||||||
|
inputImage.buffer.length));
|
||||||
if (inputFormat) {
|
if (inputFormat) {
|
||||||
// Check if the video format has changed, and reinitialize decoder if
|
// Check if the video format has changed, and reinitialize decoder if
|
||||||
// needed.
|
// needed.
|
||||||
if (!CMFormatDescriptionEqual(inputFormat, _videoFormat)) {
|
if (!CMFormatDescriptionEqual(inputFormat.get(), _videoFormat)) {
|
||||||
[self setVideoFormat:inputFormat];
|
[self setVideoFormat:inputFormat.get()];
|
||||||
[self resetDecompressionSession];
|
|
||||||
|
int resetDecompressionSessionError = [self resetDecompressionSession];
|
||||||
|
if (resetDecompressionSessionError != WEBRTC_VIDEO_CODEC_OK) {
|
||||||
|
return resetDecompressionSessionError;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
CFRelease(inputFormat);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (!_videoFormat) {
|
if (!_videoFormat) {
|
||||||
|
|||||||
@ -340,7 +340,7 @@ CFStringRef ExtractProfile(webrtc::SdpVideoFormat videoFormat) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
- (NSInteger)encode:(RTCVideoFrame *)frame
|
- (NSInteger)encode:(RTCVideoFrame *)frame
|
||||||
codecSpecificInfo:(id<RTCCodecSpecificInfo>)codecSpecificInfo
|
codecSpecificInfo:(nullable id<RTCCodecSpecificInfo>)codecSpecificInfo
|
||||||
frameTypes:(NSArray<NSNumber *> *)frameTypes {
|
frameTypes:(NSArray<NSNumber *> *)frameTypes {
|
||||||
RTC_DCHECK_EQ(frame.width, _width);
|
RTC_DCHECK_EQ(frame.width, _width);
|
||||||
RTC_DCHECK_EQ(frame.height, _height);
|
RTC_DCHECK_EQ(frame.height, _height);
|
||||||
|
|||||||
@ -69,7 +69,7 @@ RTC_EXPORT
|
|||||||
|
|
||||||
// Returns the current contents of the logs, or nil if start has been called
|
// Returns the current contents of the logs, or nil if start has been called
|
||||||
// without a stop.
|
// without a stop.
|
||||||
- (NSData *)logData;
|
- (nullable NSData *)logData;
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
||||||
|
|||||||
@ -212,8 +212,8 @@ RTC_EXPORT
|
|||||||
@interface RTCPeerConnection (DataChannel)
|
@interface RTCPeerConnection (DataChannel)
|
||||||
|
|
||||||
/** Create a new data channel with the given label and configuration. */
|
/** Create a new data channel with the given label and configuration. */
|
||||||
- (RTCDataChannel *)dataChannelForLabel:(NSString *)label
|
- (nullable RTCDataChannel *)dataChannelForLabel:(NSString *)label
|
||||||
configuration:(RTCDataChannelConfiguration *)configuration;
|
configuration:(RTCDataChannelConfiguration *)configuration;
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
||||||
|
|||||||
@ -65,7 +65,7 @@ RTC_EXPORT
|
|||||||
* RTCMediaStreamTrack. Use isEqual: instead of == to compare
|
* RTCMediaStreamTrack. Use isEqual: instead of == to compare
|
||||||
* RTCMediaStreamTrack instances.
|
* RTCMediaStreamTrack instances.
|
||||||
*/
|
*/
|
||||||
@property(nonatomic, readonly) RTCMediaStreamTrack *track;
|
@property(nonatomic, readonly, nullable) RTCMediaStreamTrack *track;
|
||||||
|
|
||||||
/** The delegate for this RtpReceiver. */
|
/** The delegate for this RtpReceiver. */
|
||||||
@property(nonatomic, weak) id<RTCRtpReceiverDelegate> delegate;
|
@property(nonatomic, weak) id<RTCRtpReceiverDelegate> delegate;
|
||||||
|
|||||||
@ -148,7 +148,7 @@ RTC_EXPORT
|
|||||||
numberOfCores:(int)numberOfCores;
|
numberOfCores:(int)numberOfCores;
|
||||||
- (NSInteger)releaseEncoder;
|
- (NSInteger)releaseEncoder;
|
||||||
- (NSInteger)encode:(RTCVideoFrame *)frame
|
- (NSInteger)encode:(RTCVideoFrame *)frame
|
||||||
codecSpecificInfo:(id<RTCCodecSpecificInfo>)info
|
codecSpecificInfo:(nullable id<RTCCodecSpecificInfo>)info
|
||||||
frameTypes:(NSArray<NSNumber *> *)frameTypes;
|
frameTypes:(NSArray<NSNumber *> *)frameTypes;
|
||||||
- (int)setBitrate:(uint32_t)bitrateKbit framerate:(uint32_t)framerate;
|
- (int)setBitrate:(uint32_t)bitrateKbit framerate:(uint32_t)framerate;
|
||||||
- (NSString *)implementationName;
|
- (NSString *)implementationName;
|
||||||
@ -171,7 +171,7 @@ RTC_EXPORT
|
|||||||
- (NSInteger)decode:(RTCEncodedImage *)encodedImage
|
- (NSInteger)decode:(RTCEncodedImage *)encodedImage
|
||||||
missingFrames:(BOOL)missingFrames
|
missingFrames:(BOOL)missingFrames
|
||||||
fragmentationHeader:(RTCRtpFragmentationHeader *)fragmentationHeader
|
fragmentationHeader:(RTCRtpFragmentationHeader *)fragmentationHeader
|
||||||
codecSpecificInfo:(__nullable id<RTCCodecSpecificInfo>)info
|
codecSpecificInfo:(nullable id<RTCCodecSpecificInfo>)info
|
||||||
renderTimeMs:(int64_t)renderTimeMs;
|
renderTimeMs:(int64_t)renderTimeMs;
|
||||||
- (NSString *)implementationName;
|
- (NSString *)implementationName;
|
||||||
|
|
||||||
|
|||||||
@ -19,7 +19,7 @@ NS_ASSUME_NONNULL_BEGIN
|
|||||||
RTC_EXPORT
|
RTC_EXPORT
|
||||||
@protocol RTCVideoEncoderFactory <NSObject>
|
@protocol RTCVideoEncoderFactory <NSObject>
|
||||||
|
|
||||||
- (id<RTCVideoEncoder>)createEncoder:(RTCVideoCodecInfo *)info;
|
- (nullable id<RTCVideoEncoder>)createEncoder:(RTCVideoCodecInfo *)info;
|
||||||
- (NSArray<RTCVideoCodecInfo *> *)supportedCodecs; // TODO(andersc): "supportedFormats" instead?
|
- (NSArray<RTCVideoCodecInfo *> *)supportedCodecs; // TODO(andersc): "supportedFormats" instead?
|
||||||
|
|
||||||
@end
|
@end
|
||||||
@ -28,7 +28,7 @@ RTC_EXPORT
|
|||||||
RTC_EXPORT
|
RTC_EXPORT
|
||||||
@protocol RTCVideoDecoderFactory <NSObject>
|
@protocol RTCVideoDecoderFactory <NSObject>
|
||||||
|
|
||||||
- (id<RTCVideoDecoder>)createDecoder:(RTCVideoCodecInfo *)info;
|
- (nullable id<RTCVideoDecoder>)createDecoder:(RTCVideoCodecInfo *)info;
|
||||||
- (NSArray<RTCVideoCodecInfo *> *)supportedCodecs; // TODO(andersc): "supportedFormats" instead?
|
- (NSArray<RTCVideoCodecInfo *> *)supportedCodecs; // TODO(andersc): "supportedFormats" instead?
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|||||||
Reference in New Issue
Block a user