Revert of Move CoreVideoFrameBuffer from webrtc/common_video/ to webrtc/sdk/objc/ (patchset #2 id:60001 of https://codereview.webrtc.org/2851563003/ )

Reason for revert:
Breaks downstream targets.

Original issue's description:
> Move CoreVideoFrameBuffer from webrtc/common_video/ to webrtc/sdk/objc/
>
> CoreVideoFrameBuffer is Mac/iPhone specific and should be moved into
> the webrtc/sdk/objc/ folder.
>
> BUG=None
>
> Review-Url: https://codereview.webrtc.org/2851563003
> Cr-Commit-Position: refs/heads/master@{#17998}
> Committed: d41631aa27

TBR=kthelgason@webrtc.org,magjed@webrtc.org
# Skipping CQ checks because original CL landed less than 1 days ago.
NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
BUG=None

Review-Url: https://codereview.webrtc.org/2862663003
Cr-Commit-Position: refs/heads/master@{#18004}
This commit is contained in:
ehmaldonado
2017-05-03 13:25:38 -07:00
committed by Commit bot
parent 642b0f8ffd
commit c34e730896
12 changed files with 14 additions and 28 deletions

View File

@ -10,7 +10,7 @@
#import "RTCVideoFrame+Private.h"
#include "webrtc/sdk/objc/Framework/Classes/corevideo_frame_buffer.h"
#include "webrtc/common_video/include/corevideo_frame_buffer.h"
@implementation RTCVideoFrame {
rtc::scoped_refptr<webrtc::VideoFrameBuffer> _videoBuffer;

View File

@ -23,7 +23,7 @@
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/thread.h"
#include "webrtc/sdk/objc/Framework/Classes/corevideo_frame_buffer.h"
#include "webrtc/common_video/include/corevideo_frame_buffer.h"
namespace webrtc {

View File

@ -1,148 +0,0 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/sdk/objc/Framework/Classes/corevideo_frame_buffer.h"
#include "libyuv/convert.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
namespace webrtc {
CoreVideoFrameBuffer::CoreVideoFrameBuffer(CVPixelBufferRef pixel_buffer,
int adapted_width,
int adapted_height,
int crop_width,
int crop_height,
int crop_x,
int crop_y)
: NativeHandleBuffer(pixel_buffer, adapted_width, adapted_height),
pixel_buffer_(pixel_buffer),
buffer_width_(CVPixelBufferGetWidth(pixel_buffer)),
buffer_height_(CVPixelBufferGetHeight(pixel_buffer)),
crop_width_(crop_width), crop_height_(crop_height),
// Can only crop at even pixels.
crop_x_(crop_x & ~1), crop_y_(crop_y & ~1) {
CVBufferRetain(pixel_buffer_);
}
CoreVideoFrameBuffer::CoreVideoFrameBuffer(CVPixelBufferRef pixel_buffer)
: NativeHandleBuffer(pixel_buffer,
CVPixelBufferGetWidth(pixel_buffer),
CVPixelBufferGetHeight(pixel_buffer)),
pixel_buffer_(pixel_buffer),
buffer_width_(width_), buffer_height_(height_),
crop_width_(width_), crop_height_(height_),
crop_x_(0), crop_y_(0) {
CVBufferRetain(pixel_buffer_);
}
CoreVideoFrameBuffer::~CoreVideoFrameBuffer() {
CVBufferRelease(pixel_buffer_);
}
rtc::scoped_refptr<VideoFrameBuffer>
CoreVideoFrameBuffer::NativeToI420Buffer() {
const OSType pixel_format = CVPixelBufferGetPixelFormatType(pixel_buffer_);
RTC_DCHECK(pixel_format == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange ||
pixel_format == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange);
CVPixelBufferLockBaseAddress(pixel_buffer_, kCVPixelBufferLock_ReadOnly);
const uint8_t* src_y = static_cast<const uint8_t*>(
CVPixelBufferGetBaseAddressOfPlane(pixel_buffer_, 0));
const int src_y_stride = CVPixelBufferGetBytesPerRowOfPlane(pixel_buffer_, 0);
const uint8_t* src_uv = static_cast<const uint8_t*>(
CVPixelBufferGetBaseAddressOfPlane(pixel_buffer_, 1));
const int src_uv_stride =
CVPixelBufferGetBytesPerRowOfPlane(pixel_buffer_, 1);
// Crop just by modifying pointers.
src_y += src_y_stride * crop_y_ + crop_x_;
src_uv += src_uv_stride * (crop_y_ / 2) + crop_x_;
// TODO(magjed): Use a frame buffer pool.
NV12ToI420Scaler nv12_to_i420_scaler;
rtc::scoped_refptr<I420Buffer> buffer =
new rtc::RefCountedObject<I420Buffer>(width_, height_);
nv12_to_i420_scaler.NV12ToI420Scale(
src_y, src_y_stride,
src_uv, src_uv_stride,
crop_width_, crop_height_,
buffer->MutableDataY(), buffer->StrideY(),
buffer->MutableDataU(), buffer->StrideU(),
buffer->MutableDataV(), buffer->StrideV(),
buffer->width(), buffer->height());
CVPixelBufferUnlockBaseAddress(pixel_buffer_, kCVPixelBufferLock_ReadOnly);
return buffer;
}
bool CoreVideoFrameBuffer::RequiresCropping() const {
return crop_width_ != buffer_width_ || crop_height_ != buffer_height_;
}
bool CoreVideoFrameBuffer::CropAndScaleTo(
std::vector<uint8_t>* tmp_buffer,
CVPixelBufferRef output_pixel_buffer) const {
// Prepare output pointers.
RTC_DCHECK_EQ(CVPixelBufferGetPixelFormatType(output_pixel_buffer),
kCVPixelFormatType_420YpCbCr8BiPlanarFullRange);
CVReturn cv_ret = CVPixelBufferLockBaseAddress(output_pixel_buffer, 0);
if (cv_ret != kCVReturnSuccess) {
LOG(LS_ERROR) << "Failed to lock base address: " << cv_ret;
return false;
}
const int dst_width = CVPixelBufferGetWidth(output_pixel_buffer);
const int dst_height = CVPixelBufferGetHeight(output_pixel_buffer);
uint8_t* dst_y = reinterpret_cast<uint8_t*>(
CVPixelBufferGetBaseAddressOfPlane(output_pixel_buffer, 0));
const int dst_y_stride =
CVPixelBufferGetBytesPerRowOfPlane(output_pixel_buffer, 0);
uint8_t* dst_uv = reinterpret_cast<uint8_t*>(
CVPixelBufferGetBaseAddressOfPlane(output_pixel_buffer, 1));
const int dst_uv_stride =
CVPixelBufferGetBytesPerRowOfPlane(output_pixel_buffer, 1);
// Prepare source pointers.
const OSType src_pixel_format =
CVPixelBufferGetPixelFormatType(pixel_buffer_);
RTC_DCHECK(
src_pixel_format == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange ||
src_pixel_format == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange);
CVPixelBufferLockBaseAddress(pixel_buffer_, kCVPixelBufferLock_ReadOnly);
const uint8_t* src_y = static_cast<const uint8_t*>(
CVPixelBufferGetBaseAddressOfPlane(pixel_buffer_, 0));
const int src_y_stride = CVPixelBufferGetBytesPerRowOfPlane(pixel_buffer_, 0);
const uint8_t* src_uv = static_cast<const uint8_t*>(
CVPixelBufferGetBaseAddressOfPlane(pixel_buffer_, 1));
const int src_uv_stride =
CVPixelBufferGetBytesPerRowOfPlane(pixel_buffer_, 1);
// Crop just by modifying pointers.
src_y += src_y_stride * crop_y_ + crop_x_;
src_uv += src_uv_stride * (crop_y_ / 2) + crop_x_;
NV12Scale(tmp_buffer,
src_y, src_y_stride,
src_uv, src_uv_stride,
crop_width_, crop_height_,
dst_y, dst_y_stride,
dst_uv, dst_uv_stride,
dst_width, dst_height);
CVPixelBufferUnlockBaseAddress(pixel_buffer_, kCVPixelBufferLock_ReadOnly);
CVPixelBufferUnlockBaseAddress(output_pixel_buffer, 0);
return true;
}
} // namespace webrtc

View File

@ -1,59 +0,0 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_COMMON_VIDEO_INCLUDE_COREVIDEO_FRAME_BUFFER_H_
#define WEBRTC_COMMON_VIDEO_INCLUDE_COREVIDEO_FRAME_BUFFER_H_
#include <CoreVideo/CoreVideo.h>
#include <vector>
#include "webrtc/common_video/include/video_frame_buffer.h"
namespace webrtc {
class CoreVideoFrameBuffer : public NativeHandleBuffer {
public:
explicit CoreVideoFrameBuffer(CVPixelBufferRef pixel_buffer);
CoreVideoFrameBuffer(CVPixelBufferRef pixel_buffer,
int adapted_width,
int adapted_height,
int crop_width,
int crop_height,
int crop_x,
int crop_y);
~CoreVideoFrameBuffer() override;
rtc::scoped_refptr<VideoFrameBuffer> NativeToI420Buffer() override;
// Returns true if the internal pixel buffer needs to be cropped.
bool RequiresCropping() const;
// Crop and scales the internal pixel buffer to the output pixel buffer. The
// tmp buffer is used for intermediary splitting the UV channels. This
// function returns true if successful.
bool CropAndScaleTo(std::vector<uint8_t>* tmp_buffer,
CVPixelBufferRef output_pixel_buffer) const;
private:
CVPixelBufferRef pixel_buffer_;
// buffer_width/height is the actual pixel buffer resolution. The width/height
// in NativeHandleBuffer, i.e. width()/height(), is the resolution we will
// scale to in NativeToI420Buffer(). Cropping happens before scaling, so:
// buffer_width >= crop_width >= width().
const int buffer_width_;
const int buffer_height_;
const int crop_width_;
const int crop_height_;
const int crop_x_;
const int crop_y_;
};
} // namespace webrtc
#endif // WEBRTC_COMMON_VIDEO_INCLUDE_COREVIDEO_FRAME_BUFFER_H_

View File

@ -20,7 +20,7 @@
#include "webrtc/api/video/video_frame.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
#include "webrtc/sdk/objc/Framework/Classes/corevideo_frame_buffer.h"
#include "webrtc/common_video/include/corevideo_frame_buffer.h"
#include "webrtc/sdk/objc/Framework/Classes/h264_video_toolbox_nalu.h"
#include "webrtc/video_frame.h"

View File

@ -23,7 +23,7 @@
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
#include "webrtc/common_video/h264/profile_level_id.h"
#include "webrtc/sdk/objc/Framework/Classes/corevideo_frame_buffer.h"
#include "webrtc/common_video/include/corevideo_frame_buffer.h"
#include "webrtc/sdk/objc/Framework/Classes/h264_video_toolbox_nalu.h"
#include "webrtc/system_wrappers/include/clock.h"

View File

@ -12,7 +12,7 @@
#import "RTCVideoFrame+Private.h"
#include "webrtc/sdk/objc/Framework/Classes/corevideo_frame_buffer.h"
#include "webrtc/common_video/include/corevideo_frame_buffer.h"
namespace webrtc {