Update output pixel format when starting video capture on iOS.

Update the output pixel format to the client supplied format when
starting camera capture.

Also add a new API method to get the preferred output pixel format
according to the
AVCaptureVideoDataOutput#availableVideoCVPixelFormatTypes method and
use it in AppRTCMobile.

Bug: webrtc:8505
Change-Id: Ia24eaf91d70d0703a34d38b06bb6eea28fb922b8
Reviewed-on: https://webrtc-review.googlesource.com/22680
Commit-Queue: Anders Carlsson <andersc@webrtc.org>
Reviewed-by: Zeke Chin <tkchin@webrtc.org>
Reviewed-by: Magnus Jedvert <magjed@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#20697}
This commit is contained in:
Anders Carlsson
2017-11-15 15:59:50 +01:00
committed by Commit Bot
parent 4e3832124f
commit 7dad255ce1
3 changed files with 31 additions and 0 deletions

View File

@ -79,10 +79,13 @@
for (AVCaptureDeviceFormat *format in formats) {
CMVideoDimensions dimension = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
FourCharCode pixelFormat = CMFormatDescriptionGetMediaSubType(format.formatDescription);
int diff = abs(targetWidth - dimension.width) + abs(targetHeight - dimension.height);
if (diff < currentDiff) {
selectedFormat = format;
currentDiff = diff;
} else if (diff == currentDiff && pixelFormat == [_capturer preferredOutputPixelFormat]) {
selectedFormat = format;
}
}

View File

@ -31,6 +31,8 @@ const int64_t kNanosecondsPerSecond = 1000000000;
AVCaptureVideoDataOutput *_videoDataOutput;
AVCaptureSession *_captureSession;
AVCaptureDevice *_currentDevice;
FourCharCode _preferredOutputPixelFormat;
FourCharCode _outputPixelFormat;
BOOL _hasRetriedOnFatalError;
BOOL _isRunning;
// Will the session be running once all asynchronous operations have been completed?
@ -105,6 +107,10 @@ const int64_t kNanosecondsPerSecond = 1000000000;
return device.formats;
}
- (FourCharCode)preferredOutputPixelFormat {
return _preferredOutputPixelFormat;
}
- (void)startCaptureWithDevice:(AVCaptureDevice *)device
format:(AVCaptureDeviceFormat *)format
fps:(NSInteger)fps {
@ -129,6 +135,7 @@ const int64_t kNanosecondsPerSecond = 1000000000;
[self reconfigureCaptureSessionInput];
[self updateOrientation];
[self updateDeviceCaptureFormat:format fps:fps];
[self updateVideoDataOutputPixelFormat:format];
[_captureSession startRunning];
[_currentDevice unlockForConfiguration];
_isRunning = YES;
@ -385,12 +392,27 @@ const int64_t kNanosecondsPerSecond = 1000000000;
NSNumber *pixelFormat = availablePixelFormats.firstObject;
NSAssert(pixelFormat, @"Output device has no supported formats.");
_preferredOutputPixelFormat = [pixelFormat unsignedIntValue];
_outputPixelFormat = _preferredOutputPixelFormat;
videoDataOutput.videoSettings = @{(NSString *)kCVPixelBufferPixelFormatTypeKey : pixelFormat};
videoDataOutput.alwaysDiscardsLateVideoFrames = NO;
[videoDataOutput setSampleBufferDelegate:self queue:self.frameQueue];
_videoDataOutput = videoDataOutput;
}
- (void)updateVideoDataOutputPixelFormat:(AVCaptureDeviceFormat *)format {
FourCharCode mediaSubType = CMFormatDescriptionGetMediaSubType(format.formatDescription);
if (![[RTCCVPixelBuffer supportedPixelFormats] containsObject:@(mediaSubType)]) {
mediaSubType = _preferredOutputPixelFormat;
}
if (mediaSubType != _outputPixelFormat) {
_outputPixelFormat = mediaSubType;
_videoDataOutput.videoSettings =
@{ (NSString *)kCVPixelBufferPixelFormatTypeKey : @(mediaSubType) };
}
}
#pragma mark - Private, called inside capture queue
- (void)updateDeviceCaptureFormat:(AVCaptureDeviceFormat *)format fps:(NSInteger)fps {

View File

@ -29,7 +29,13 @@ RTC_EXPORT
// Returns list of formats that are supported by this class for this device.
+ (NSArray<AVCaptureDeviceFormat *> *)supportedFormatsForDevice:(AVCaptureDevice *)device;
// Returns the most efficient supported output pixel format for this capturer.
- (FourCharCode)preferredOutputPixelFormat;
// Starts and stops the capture session asynchronously.
// The device will capture video in the format given in the `format` parameter. If the pixel format
// in `format` is supported by the WebRTC pipeline, the same pixel format will be used for the
// output. Otherwise, the format returned by `preferredOutputPixelFormat` will be used.
- (void)startCaptureWithDevice:(AVCaptureDevice *)device
format:(AVCaptureDeviceFormat *)format
fps:(NSInteger)fps;