Files
platform-external-webrtc/webrtc/video_frame.h
magjed@webrtc.org c8895aa2f3 Unify underlying frame buffer in I420VideoFrame and WebRtcVideoFrame
Currently, I420VideoFrame uses three webrtc::Plane to store pixel data, and WebRtcVideoFrame uses WebRtcVideoFrame::FrameBuffer/webrtc::VideoFrame. The two subclasses WebRtcTextureVideoFrame and TextureVideoFrame use a NativeHandle to store pixel data, and there is also a class WebRtcVideoRenderFrame that wraps an I420VideoFrame.

This CL replaces these classes with a new interface VideoFrameBuffer that provides the common functionality. This makes it possible to remove deep frame copies between cricket::VideoFrame and I420VideoFrame.

Some additional minor changes are:
* Disallow creation of 0x0 texture frames.
* Remove the half-implemented ref count functions in I420VideoFrame.
* Remove the Alias functionality in WebRtcVideoFrame

The final goal is to eliminate all frame copies, but to limit the scope of this CL, some planned changes are postponed to follow-up CL:s (see planned changes in https://webrtc-codereview.appspot.com/38879004, or https://docs.google.com/document/d/1bxoJZNmlo-Z9GnQwIaWpEG6hDlL_W-bzka8Zb_K2NbA/preview). Specifically, this CL:
* Keeps empty subclasses WebRtcTextureVideoFrame and TextureVideoFrame, and just delegates the construction to the superclass.
* Keeps the deep copies from cricket::VideoFrame to I420VideoFrame.

BUG=1128
R=mflodman@webrtc.org, pbos@webrtc.org, perkj@webrtc.org, tommi@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/42469004

Cr-Commit-Position: refs/heads/master@{#8580}
git-svn-id: http://webrtc.googlecode.com/svn/trunk@8580 4adac7df-926f-26a2-2b94-8c16560cd09d
2015-03-03 21:22:26 +00:00

209 lines
6.8 KiB
C++

/*
* Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_VIDEO_FRAME_H_
#define WEBRTC_VIDEO_FRAME_H_
#include "webrtc/base/scoped_ref_ptr.h"
#include "webrtc/common_video/interface/video_frame_buffer.h"
#include "webrtc/typedefs.h"
#include "webrtc/common_video/rotation.h"
namespace webrtc {
class I420VideoFrame {
public:
I420VideoFrame();
I420VideoFrame(const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer,
uint32_t timestamp,
int64_t render_time_ms,
VideoRotation rotation);
virtual ~I420VideoFrame();
// CreateEmptyFrame: Sets frame dimensions and allocates buffers based
// on set dimensions - height and plane stride.
// If required size is bigger than the allocated one, new buffers of adequate
// size will be allocated.
// Return value: 0 on success, -1 on error.
virtual int CreateEmptyFrame(int width,
int height,
int stride_y,
int stride_u,
int stride_v);
// CreateFrame: Sets the frame's members and buffers. If required size is
// bigger than allocated one, new buffers of adequate size will be allocated.
// Return value: 0 on success, -1 on error.
// TODO(magjed): Remove unnecessary buffer size arguments.
virtual int CreateFrame(int size_y,
const uint8_t* buffer_y,
int size_u,
const uint8_t* buffer_u,
int size_v,
const uint8_t* buffer_v,
int width,
int height,
int stride_y,
int stride_u,
int stride_v);
// TODO(guoweis): remove the previous CreateFrame when chromium has this code.
virtual int CreateFrame(int size_y,
const uint8_t* buffer_y,
int size_u,
const uint8_t* buffer_u,
int size_v,
const uint8_t* buffer_v,
int width,
int height,
int stride_y,
int stride_u,
int stride_v,
VideoRotation rotation);
// Copy frame: If required size is bigger than allocated one, new buffers of
// adequate size will be allocated.
// Return value: 0 on success, -1 on error.
virtual int CopyFrame(const I420VideoFrame& videoFrame);
// Make a copy of |this|. The caller owns the returned frame.
// Return value: a new frame on success, NULL on error.
virtual I420VideoFrame* CloneFrame() const;
// Swap Frame.
virtual void SwapFrame(I420VideoFrame* videoFrame);
// Get pointer to buffer per plane.
virtual uint8_t* buffer(PlaneType type);
// Overloading with const.
virtual const uint8_t* buffer(PlaneType type) const;
// Get allocated size per plane.
virtual int allocated_size(PlaneType type) const;
// Get allocated stride per plane.
virtual int stride(PlaneType type) const;
// Get frame width.
virtual int width() const;
// Get frame height.
virtual int height() const;
// Set frame timestamp (90kHz).
virtual void set_timestamp(uint32_t timestamp) { timestamp_ = timestamp; }
// Get frame timestamp (90kHz).
virtual uint32_t timestamp() const { return timestamp_; }
// Set capture ntp time in miliseconds.
virtual void set_ntp_time_ms(int64_t ntp_time_ms) {
ntp_time_ms_ = ntp_time_ms;
}
// Get capture ntp time in miliseconds.
virtual int64_t ntp_time_ms() const { return ntp_time_ms_; }
// Naming convention for Coordination of Video Orientation. Please see
// http://www.etsi.org/deliver/etsi_ts/126100_126199/126114/12.07.00_60/ts_126114v120700p.pdf
//
// "pending rotation" or "pending" = a frame that has a VideoRotation > 0.
//
// "not pending" = a frame that has a VideoRotation == 0.
//
// "apply rotation" = modify a frame from being "pending" to being "not
// pending" rotation (a no-op for "unrotated").
//
virtual VideoRotation rotation() const { return rotation_; }
virtual void set_rotation(VideoRotation rotation) {
rotation_ = rotation;
}
// Set render time in miliseconds.
virtual void set_render_time_ms(int64_t render_time_ms) {
render_time_ms_ = render_time_ms;
}
// Get render time in miliseconds.
virtual int64_t render_time_ms() const { return render_time_ms_; }
// Return true if underlying plane buffers are of zero size, false if not.
virtual bool IsZeroSize() const;
// Return the handle of the underlying video frame. This is used when the
// frame is backed by a texture. The object should be destroyed when it is no
// longer in use, so the underlying resource can be freed.
virtual void* native_handle() const;
// Return the underlying buffer.
virtual rtc::scoped_refptr<webrtc::VideoFrameBuffer> video_frame_buffer()
const;
private:
// An opaque reference counted handle that stores the pixel data.
rtc::scoped_refptr<webrtc::VideoFrameBuffer> video_frame_buffer_;
uint32_t timestamp_;
int64_t ntp_time_ms_;
int64_t render_time_ms_;
VideoRotation rotation_;
};
enum VideoFrameType {
kKeyFrame = 0,
kDeltaFrame = 1,
kGoldenFrame = 2,
kAltRefFrame = 3,
kSkipFrame = 4
};
// TODO(pbos): Rename EncodedFrame and reformat this class' members.
class EncodedImage {
public:
EncodedImage()
: _encodedWidth(0),
_encodedHeight(0),
_timeStamp(0),
capture_time_ms_(0),
_frameType(kDeltaFrame),
_buffer(NULL),
_length(0),
_size(0),
_completeFrame(false) {}
EncodedImage(uint8_t* buffer, size_t length, size_t size)
: _encodedWidth(0),
_encodedHeight(0),
_timeStamp(0),
ntp_time_ms_(0),
capture_time_ms_(0),
_frameType(kDeltaFrame),
_buffer(buffer),
_length(length),
_size(size),
_completeFrame(false) {}
uint32_t _encodedWidth;
uint32_t _encodedHeight;
uint32_t _timeStamp;
// NTP time of the capture time in local timebase in milliseconds.
int64_t ntp_time_ms_;
int64_t capture_time_ms_;
// TODO(pbos): Use webrtc::FrameType directly (and remove VideoFrameType).
VideoFrameType _frameType;
uint8_t* _buffer;
size_t _length;
size_t _size;
bool _completeFrame;
};
} // namespace webrtc
#endif // WEBRTC_VIDEO_FRAME_H_