Support more formats in RTCVideoFrame

Implement Obj-C version of webrtc::VideoFrameBuffer and use that in
RTCVideoFrame.

Bug: webrtc:7785
Change-Id: I49f42bcf451dd6769b3a79a65fe7b400dce22677
Reviewed-on: https://chromium-review.googlesource.com/536773
Commit-Queue: Anders Carlsson <andersc@webrtc.org>
Reviewed-by: Magnus Jedvert <magjed@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#18691}
This commit is contained in:
Anders Carlsson
2017-06-20 11:01:34 +02:00
committed by Commit Bot
parent 7f84aeaef6
commit bd2220a9c4
24 changed files with 783 additions and 169 deletions

View File

@ -18,9 +18,11 @@
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
#include "webrtc/common_video/include/video_frame.h"
#include "webrtc/sdk/objc/Framework/Classes/Video/corevideo_frame_buffer.h"
#include "webrtc/sdk/objc/Framework/Classes/Video/objc_frame_buffer.h"
#include "webrtc/sdk/objc/Framework/Classes/VideoToolbox/nalu_rewriter.h"
#import "WebRTC/RTCVideoFrameBuffer.h"
#if defined(WEBRTC_IOS)
#import "Common/RTCUIApplicationStatusObserver.h"
#endif
@ -64,8 +66,8 @@ void VTDecompressionOutputCallback(void* decoder,
return;
}
// TODO(tkchin): Handle CVO properly.
rtc::scoped_refptr<VideoFrameBuffer> buffer =
new rtc::RefCountedObject<CoreVideoFrameBuffer>(image_buffer);
rtc::scoped_refptr<VideoFrameBuffer> buffer = new rtc::RefCountedObject<ObjCFrameBuffer>(
[[RTCCVPixelBuffer alloc] initWithPixelBuffer:image_buffer]);
VideoFrame decoded_frame(buffer, decode_params->timestamp,
CMTimeGetSeconds(timestamp) * kMsPerSec,
kVideoRotation_0);

View File

@ -19,11 +19,12 @@
#import "Common/RTCUIApplicationStatusObserver.h"
#import "WebRTC/UIDevice+RTCDevice.h"
#endif
#import "WebRTC/RTCVideoFrameBuffer.h"
#include "libyuv/convert_from.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
#include "webrtc/common_video/h264/profile_level_id.h"
#include "webrtc/sdk/objc/Framework/Classes/Video/corevideo_frame_buffer.h"
#include "webrtc/sdk/objc/Framework/Classes/Video/objc_frame_buffer.h"
#include "webrtc/sdk/objc/Framework/Classes/VideoToolbox/nalu_rewriter.h"
#include "webrtc/system_wrappers/include/clock.h"
@ -411,29 +412,49 @@ int H264VideoToolboxEncoder::Encode(
}
#endif
CVPixelBufferRef pixel_buffer;
CVPixelBufferRef pixel_buffer = nullptr;
if (frame.video_frame_buffer()->type() == VideoFrameBuffer::Type::kNative) {
rtc::scoped_refptr<CoreVideoFrameBuffer> core_video_frame_buffer(
static_cast<CoreVideoFrameBuffer*>(frame.video_frame_buffer().get()));
if (!core_video_frame_buffer->RequiresCropping()) {
pixel_buffer = core_video_frame_buffer->pixel_buffer();
// This pixel buffer might have a higher resolution than what the
// compression session is configured to. The compression session can
// handle that and will output encoded frames in the configured
// resolution regardless of the input pixel buffer resolution.
CVBufferRetain(pixel_buffer);
} else {
// Cropping required, we need to crop and scale to a new pixel buffer.
pixel_buffer = internal::CreatePixelBuffer(pixel_buffer_pool);
if (!pixel_buffer) {
return WEBRTC_VIDEO_CODEC_ERROR;
}
if (!core_video_frame_buffer->CropAndScaleTo(&nv12_scale_buffer_,
pixel_buffer)) {
return WEBRTC_VIDEO_CODEC_ERROR;
// Native frame.
rtc::scoped_refptr<ObjCFrameBuffer> objc_frame_buffer(
static_cast<ObjCFrameBuffer*>(frame.video_frame_buffer().get()));
id<RTCVideoFrameBuffer> wrapped_frame_buffer =
(id<RTCVideoFrameBuffer>)objc_frame_buffer->wrapped_frame_buffer();
if ([wrapped_frame_buffer isKindOfClass:[RTCCVPixelBuffer class]]) {
RTCCVPixelBuffer* rtc_pixel_buffer = (RTCCVPixelBuffer*)wrapped_frame_buffer;
if (![rtc_pixel_buffer requiresCropping]) {
// This pixel buffer might have a higher resolution than what the
// compression session is configured to. The compression session can
// handle that and will output encoded frames in the configured
// resolution regardless of the input pixel buffer resolution.
pixel_buffer = rtc_pixel_buffer.pixelBuffer;
CVBufferRetain(pixel_buffer);
} else {
// Cropping required, we need to crop and scale to a new pixel buffer.
pixel_buffer = internal::CreatePixelBuffer(pixel_buffer_pool);
if (!pixel_buffer) {
return WEBRTC_VIDEO_CODEC_ERROR;
}
int dst_width = CVPixelBufferGetWidth(pixel_buffer);
int dst_height = CVPixelBufferGetHeight(pixel_buffer);
if ([rtc_pixel_buffer requiresScalingToWidth:dst_width height:dst_height]) {
int size =
[rtc_pixel_buffer bufferSizeForCroppingAndScalingToWidth:dst_width height:dst_height];
nv12_scale_buffer_.resize(size);
} else {
nv12_scale_buffer_.clear();
}
nv12_scale_buffer_.shrink_to_fit();
if (![rtc_pixel_buffer cropAndScaleTo:pixel_buffer
withTempBuffer:nv12_scale_buffer_.data()]) {
return WEBRTC_VIDEO_CODEC_ERROR;
}
}
}
} else {
}
if (!pixel_buffer) {
// We did not have a native frame, or the ObjCVideoFrame wrapped a non-native frame
pixel_buffer = internal::CreatePixelBuffer(pixel_buffer_pool);
if (!pixel_buffer) {
return WEBRTC_VIDEO_CODEC_ERROR;