Reland of Delete cricket::VideoFrame methods GetYPlane and GetYPitch. (patchset #1 id:1 of https://codereview.webrtc.org/1921493004/ )
Reason for revert: Chrome has been updated, cl https://codereview.chromium.org/1919283005/ Original issue's description: > Revert of Delete cricket::VideoFrame methods GetYPlane and GetYPitch. (patchset #5 id:80001 of https://codereview.webrtc.org/1901973002/ ) > > Reason for revert: > GetYPlane, GetYPitch etc is used by Chromium. > > Original issue's description: > > Delete cricket::VideoFrame methods GetYPlane and GetYPitch. > > > > (And similarly for U and V). Also change video_frame_buffer method to > > return a const ref to a scoped_ref_ptr. > > > > This cl is analogous to https://codereview.webrtc.org/1900673002/, > > which delete corresponding methods in webrtc::VideoFrame. > > > > BUG=webrtc:5682 > > > > Committed: https://crrev.com/1c27c6bf4cf0476dd2f09425509afaae4cdfe599 > > Cr-Commit-Position: refs/heads/master@{#12492} > > TBR=magjed@webrtc.org,perkj@webrtc.org,pbos@webrtc.org,pthatcher@webrtc.org,nisse@webrtc.org > # Skipping CQ checks because original CL landed less than 1 days ago. > NOPRESUBMIT=true > NOTREECHECKS=true > NOTRY=true > BUG=webrtc:5682 > > Committed: https://crrev.com/b05f994bb6f3055c852891c8acb531aee916a668 > Cr-Commit-Position: refs/heads/master@{#12494} TBR=magjed@webrtc.org,perkj@webrtc.org,pbos@webrtc.org,pthatcher@webrtc.org,terelius@webrtc.org # Not skipping CQ checks because original CL landed more than 1 days ago. BUG=webrtc:5682 Review-Url: https://codereview.webrtc.org/1923903002 Cr-Commit-Position: refs/heads/master@{#12559}
This commit is contained in:
@ -55,30 +55,39 @@
|
||||
}
|
||||
|
||||
- (const uint8_t*)yPlane {
|
||||
const cricket::VideoFrame* const_frame = _videoFrame.get();
|
||||
return const_frame->GetYPlane();
|
||||
const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer =
|
||||
_videoFrame->video_frame_buffer();
|
||||
return buffer ? buffer->DataY() : nullptr;
|
||||
}
|
||||
|
||||
- (const uint8_t*)uPlane {
|
||||
const cricket::VideoFrame* const_frame = _videoFrame.get();
|
||||
return const_frame->GetUPlane();
|
||||
const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer =
|
||||
_videoFrame->video_frame_buffer();
|
||||
return buffer ? buffer->DataU() : nullptr;
|
||||
}
|
||||
|
||||
- (const uint8_t*)vPlane {
|
||||
const cricket::VideoFrame* const_frame = _videoFrame.get();
|
||||
return const_frame->GetVPlane();
|
||||
const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer =
|
||||
_videoFrame->video_frame_buffer();
|
||||
return buffer ? buffer->DataV() : nullptr;
|
||||
}
|
||||
|
||||
- (NSInteger)yPitch {
|
||||
return _videoFrame->GetYPitch();
|
||||
const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer =
|
||||
_videoFrame->video_frame_buffer();
|
||||
return buffer ? buffer->StrideY() : 0;
|
||||
}
|
||||
|
||||
- (NSInteger)uPitch {
|
||||
return _videoFrame->GetUPitch();
|
||||
const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer =
|
||||
_videoFrame->video_frame_buffer();
|
||||
return buffer ? buffer->StrideU() : 0;
|
||||
}
|
||||
|
||||
- (NSInteger)vPitch {
|
||||
return _videoFrame->GetVPitch();
|
||||
const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer =
|
||||
_videoFrame->video_frame_buffer();
|
||||
return buffer ? buffer->StrideV() : 0;
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
@ -28,6 +28,8 @@
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
// RTCI420Frame is an ObjectiveC version of cricket::VideoFrame.
|
||||
// TODO(nisse): It appears it doesn't support any VideoFrame methods,
|
||||
// so let it wrap an webrtc::VideoFrameBuffer instead?
|
||||
@interface RTCI420Frame : NSObject
|
||||
|
||||
@property(nonatomic, readonly) NSUInteger width;
|
||||
|
||||
@ -762,20 +762,23 @@ class JavaVideoRendererWrapper
|
||||
jobject CricketToJavaI420Frame(const cricket::VideoFrame* frame) {
|
||||
jintArray strides = jni()->NewIntArray(3);
|
||||
jint* strides_array = jni()->GetIntArrayElements(strides, NULL);
|
||||
strides_array[0] = frame->GetYPitch();
|
||||
strides_array[1] = frame->GetUPitch();
|
||||
strides_array[2] = frame->GetVPitch();
|
||||
strides_array[0] = frame->video_frame_buffer()->StrideY();
|
||||
strides_array[1] = frame->video_frame_buffer()->StrideU();
|
||||
strides_array[2] = frame->video_frame_buffer()->StrideV();
|
||||
jni()->ReleaseIntArrayElements(strides, strides_array, 0);
|
||||
jobjectArray planes = jni()->NewObjectArray(3, *j_byte_buffer_class_, NULL);
|
||||
jobject y_buffer =
|
||||
jni()->NewDirectByteBuffer(const_cast<uint8_t*>(frame->GetYPlane()),
|
||||
frame->GetYPitch() * frame->GetHeight());
|
||||
jobject y_buffer = jni()->NewDirectByteBuffer(
|
||||
const_cast<uint8_t*>(frame->video_frame_buffer()->DataY()),
|
||||
frame->video_frame_buffer()->StrideY() *
|
||||
frame->video_frame_buffer()->height());
|
||||
size_t chroma_size =
|
||||
((frame->width() + 1) / 2) * ((frame->height() + 1) / 2);
|
||||
jobject u_buffer = jni()->NewDirectByteBuffer(
|
||||
const_cast<uint8_t*>(frame->GetUPlane()), chroma_size);
|
||||
const_cast<uint8_t*>(frame->video_frame_buffer()->DataU()),
|
||||
chroma_size);
|
||||
jobject v_buffer = jni()->NewDirectByteBuffer(
|
||||
const_cast<uint8_t*>(frame->GetVPlane()), chroma_size);
|
||||
const_cast<uint8_t*>(frame->video_frame_buffer()->DataV()),
|
||||
chroma_size);
|
||||
jni()->SetObjectArrayElement(planes, 0, y_buffer);
|
||||
jni()->SetObjectArrayElement(planes, 1, u_buffer);
|
||||
jni()->SetObjectArrayElement(planes, 2, v_buffer);
|
||||
|
||||
@ -82,15 +82,15 @@ class FakeVideoRenderer : public rtc::VideoSinkInterface<cricket::VideoFrame> {
|
||||
uint8_t v_min,
|
||||
uint8_t v_max,
|
||||
const cricket::VideoFrame* frame) {
|
||||
if (!frame) {
|
||||
if (!frame || !frame->video_frame_buffer()) {
|
||||
return false;
|
||||
}
|
||||
// Y
|
||||
int y_width = frame->width();
|
||||
int y_height = frame->height();
|
||||
const uint8_t* y_plane = frame->GetYPlane();
|
||||
const uint8_t* y_plane = frame->video_frame_buffer()->DataY();
|
||||
const uint8_t* y_pos = y_plane;
|
||||
int32_t y_pitch = frame->GetYPitch();
|
||||
int32_t y_pitch = frame->video_frame_buffer()->StrideY();
|
||||
for (int i = 0; i < y_height; ++i) {
|
||||
for (int j = 0; j < y_width; ++j) {
|
||||
uint8_t y_value = *(y_pos + j);
|
||||
@ -103,12 +103,12 @@ class FakeVideoRenderer : public rtc::VideoSinkInterface<cricket::VideoFrame> {
|
||||
// U and V
|
||||
int chroma_width = (frame->width() + 1)/2;
|
||||
int chroma_height = (frame->height() + 1)/2;
|
||||
const uint8_t* u_plane = frame->GetUPlane();
|
||||
const uint8_t* v_plane = frame->GetVPlane();
|
||||
const uint8_t* u_plane = frame->video_frame_buffer()->DataU();
|
||||
const uint8_t* v_plane = frame->video_frame_buffer()->DataV();
|
||||
const uint8_t* u_pos = u_plane;
|
||||
const uint8_t* v_pos = v_plane;
|
||||
int32_t u_pitch = frame->GetUPitch();
|
||||
int32_t v_pitch = frame->GetVPitch();
|
||||
int32_t u_pitch = frame->video_frame_buffer()->StrideU();
|
||||
int32_t v_pitch = frame->video_frame_buffer()->StrideV();
|
||||
for (int i = 0; i < chroma_height; ++i) {
|
||||
for (int j = 0; j < chroma_width; ++j) {
|
||||
uint8_t u_value = *(u_pos + j);
|
||||
|
||||
@ -137,7 +137,7 @@ TEST(VideoBroadcasterTest, SinkWantsBlackFrames) {
|
||||
cricket::WebRtcVideoFrame frame1;
|
||||
frame1.InitToBlack(100, 200, 10000 /*ts*/);
|
||||
// Make it not all-black
|
||||
frame1.GetUPlane()[0] = 0;
|
||||
frame1.video_frame_buffer()->MutableDataU()[0] = 0;
|
||||
broadcaster.OnFrame(frame1);
|
||||
EXPECT_TRUE(sink1.black_frame());
|
||||
EXPECT_EQ(10000, sink1.timestamp());
|
||||
@ -153,7 +153,7 @@ TEST(VideoBroadcasterTest, SinkWantsBlackFrames) {
|
||||
cricket::WebRtcVideoFrame frame2;
|
||||
frame2.InitToBlack(100, 200, 30000 /*ts*/);
|
||||
// Make it not all-black
|
||||
frame2.GetUPlane()[0] = 0;
|
||||
frame2.video_frame_buffer()->MutableDataU()[0] = 0;
|
||||
broadcaster.OnFrame(frame2);
|
||||
EXPECT_FALSE(sink1.black_frame());
|
||||
EXPECT_EQ(30000, sink1.timestamp());
|
||||
|
||||
@ -31,15 +31,17 @@ bool VideoFrame::CopyToPlanes(uint8_t* dst_y,
|
||||
int32_t dst_pitch_y,
|
||||
int32_t dst_pitch_u,
|
||||
int32_t dst_pitch_v) const {
|
||||
if (!GetYPlane() || !GetUPlane() || !GetVPlane()) {
|
||||
LOG(LS_ERROR) << "NULL plane pointer.";
|
||||
const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer =
|
||||
video_frame_buffer();
|
||||
if (!buffer) {
|
||||
LOG(LS_ERROR) << "NULL video buffer.";
|
||||
return false;
|
||||
}
|
||||
int32_t src_width = width();
|
||||
int32_t src_height = height();
|
||||
return libyuv::I420Copy(GetYPlane(), GetYPitch(),
|
||||
GetUPlane(), GetUPitch(),
|
||||
GetVPlane(), GetVPitch(),
|
||||
return libyuv::I420Copy(buffer->DataY(), buffer->StrideY(),
|
||||
buffer->DataU(), buffer->StrideU(),
|
||||
buffer->DataV(), buffer->StrideV(),
|
||||
dst_y, dst_pitch_y,
|
||||
dst_u, dst_pitch_u,
|
||||
dst_v, dst_pitch_v,
|
||||
@ -56,9 +58,11 @@ size_t VideoFrame::ConvertToRgbBuffer(uint32_t to_fourcc,
|
||||
return needed;
|
||||
}
|
||||
|
||||
if (libyuv::ConvertFromI420(GetYPlane(), GetYPitch(), GetUPlane(),
|
||||
GetUPitch(), GetVPlane(), GetVPitch(), buffer,
|
||||
stride_rgb, width(), height(), to_fourcc)) {
|
||||
if (libyuv::ConvertFromI420(
|
||||
video_frame_buffer()->DataY(), video_frame_buffer()->StrideY(),
|
||||
video_frame_buffer()->DataU(), video_frame_buffer()->StrideU(),
|
||||
video_frame_buffer()->DataV(), video_frame_buffer()->StrideV(),
|
||||
buffer, stride_rgb, width(), height(), to_fourcc)) {
|
||||
LOG(LS_ERROR) << "RGB type not supported: " << to_fourcc;
|
||||
return 0; // 0 indicates error
|
||||
}
|
||||
@ -78,8 +82,8 @@ void VideoFrame::StretchToPlanes(uint8_t* dst_y,
|
||||
size_t dst_height,
|
||||
bool interpolate,
|
||||
bool vert_crop) const {
|
||||
if (!GetYPlane() || !GetUPlane() || !GetVPlane()) {
|
||||
LOG(LS_ERROR) << "NULL plane pointer.";
|
||||
if (!video_frame_buffer()) {
|
||||
LOG(LS_ERROR) << "NULL frame buffer.";
|
||||
return;
|
||||
}
|
||||
|
||||
@ -89,9 +93,9 @@ void VideoFrame::StretchToPlanes(uint8_t* dst_y,
|
||||
CopyToPlanes(dst_y, dst_u, dst_v, dst_pitch_y, dst_pitch_u, dst_pitch_v);
|
||||
return;
|
||||
}
|
||||
const uint8_t* src_y = GetYPlane();
|
||||
const uint8_t* src_u = GetUPlane();
|
||||
const uint8_t* src_v = GetVPlane();
|
||||
const uint8_t* src_y = video_frame_buffer()->DataY();
|
||||
const uint8_t* src_u = video_frame_buffer()->DataU();
|
||||
const uint8_t* src_v = video_frame_buffer()->DataV();
|
||||
|
||||
if (vert_crop) {
|
||||
// Adjust the input width:height ratio to be the same as the output ratio.
|
||||
@ -108,15 +112,16 @@ void VideoFrame::StretchToPlanes(uint8_t* dst_y,
|
||||
int32_t iheight_offset =
|
||||
static_cast<int32_t>((height() - src_height) >> 2);
|
||||
iheight_offset <<= 1; // Ensure that iheight_offset is even.
|
||||
src_y += iheight_offset * GetYPitch();
|
||||
src_u += iheight_offset / 2 * GetUPitch();
|
||||
src_v += iheight_offset / 2 * GetVPitch();
|
||||
src_y += iheight_offset * video_frame_buffer()->StrideY();
|
||||
src_u += iheight_offset / 2 * video_frame_buffer()->StrideU();
|
||||
src_v += iheight_offset / 2 * video_frame_buffer()->StrideV();
|
||||
}
|
||||
}
|
||||
|
||||
// Scale to the output I420 frame.
|
||||
libyuv::Scale(src_y, src_u, src_v,
|
||||
GetYPitch(), GetUPitch(), GetVPitch(),
|
||||
libyuv::Scale(src_y, src_u, src_v, video_frame_buffer()->StrideY(),
|
||||
video_frame_buffer()->StrideU(),
|
||||
video_frame_buffer()->StrideV(),
|
||||
static_cast<int>(src_width), static_cast<int>(src_height),
|
||||
dst_y, dst_u, dst_v, dst_pitch_y, dst_pitch_u, dst_pitch_v,
|
||||
static_cast<int>(dst_width), static_cast<int>(dst_height),
|
||||
@ -130,8 +135,12 @@ void VideoFrame::StretchToFrame(VideoFrame* dst,
|
||||
return;
|
||||
}
|
||||
|
||||
StretchToPlanes(dst->GetYPlane(), dst->GetUPlane(), dst->GetVPlane(),
|
||||
dst->GetYPitch(), dst->GetUPitch(), dst->GetVPitch(),
|
||||
StretchToPlanes(dst->video_frame_buffer()->MutableDataY(),
|
||||
dst->video_frame_buffer()->MutableDataU(),
|
||||
dst->video_frame_buffer()->MutableDataV(),
|
||||
dst->video_frame_buffer()->StrideY(),
|
||||
dst->video_frame_buffer()->StrideU(),
|
||||
dst->video_frame_buffer()->StrideV(),
|
||||
dst->width(), dst->height(),
|
||||
interpolate, vert_crop);
|
||||
dst->SetTimeStamp(GetTimeStamp());
|
||||
@ -151,9 +160,12 @@ VideoFrame* VideoFrame::Stretch(size_t dst_width, size_t dst_height,
|
||||
}
|
||||
|
||||
bool VideoFrame::SetToBlack() {
|
||||
return libyuv::I420Rect(GetYPlane(), GetYPitch(),
|
||||
GetUPlane(), GetUPitch(),
|
||||
GetVPlane(), GetVPitch(),
|
||||
return libyuv::I420Rect(video_frame_buffer()->MutableDataY(),
|
||||
video_frame_buffer()->StrideY(),
|
||||
video_frame_buffer()->MutableDataU(),
|
||||
video_frame_buffer()->StrideU(),
|
||||
video_frame_buffer()->MutableDataV(),
|
||||
video_frame_buffer()->StrideV(),
|
||||
0, 0,
|
||||
width(), height(),
|
||||
16, 128, 128) == 0;
|
||||
|
||||
@ -35,18 +35,6 @@ class VideoFrame {
|
||||
virtual size_t GetWidth() const final { return width(); }
|
||||
virtual size_t GetHeight() const final { return height(); }
|
||||
|
||||
// These can return NULL if the object is not backed by a buffer.
|
||||
virtual const uint8_t* GetYPlane() const = 0;
|
||||
virtual const uint8_t* GetUPlane() const = 0;
|
||||
virtual const uint8_t* GetVPlane() const = 0;
|
||||
virtual uint8_t* GetYPlane() = 0;
|
||||
virtual uint8_t* GetUPlane() = 0;
|
||||
virtual uint8_t* GetVPlane() = 0;
|
||||
|
||||
virtual int32_t GetYPitch() const = 0;
|
||||
virtual int32_t GetUPitch() const = 0;
|
||||
virtual int32_t GetVPitch() const = 0;
|
||||
|
||||
// Returns the handle of the underlying video frame. This is used when the
|
||||
// frame is backed by a texture. The object should be destroyed when it is no
|
||||
// longer in use, so the underlying resource can be freed.
|
||||
@ -54,8 +42,8 @@ class VideoFrame {
|
||||
|
||||
// Returns the underlying video frame buffer. This function is ok to call
|
||||
// multiple times, but the returned object will refer to the same memory.
|
||||
virtual rtc::scoped_refptr<webrtc::VideoFrameBuffer> video_frame_buffer()
|
||||
const = 0;
|
||||
virtual const rtc::scoped_refptr<webrtc::VideoFrameBuffer>&
|
||||
video_frame_buffer() const = 0;
|
||||
|
||||
// System monotonic clock, same timebase as rtc::TimeMicros().
|
||||
virtual int64_t timestamp_us() const = 0;
|
||||
|
||||
@ -267,24 +267,27 @@ class VideoFrameTest : public testing::Test {
|
||||
const uint8_t* start = reinterpret_cast<const uint8_t*>(ms->GetBuffer());
|
||||
int awidth = (width + 1) & ~1;
|
||||
frame->InitToBlack(width, height, 0);
|
||||
int stride_y = frame->GetYPitch();
|
||||
int stride_u = frame->GetUPitch();
|
||||
int stride_v = frame->GetVPitch();
|
||||
int stride_y = frame->video_frame_buffer()->StrideY();
|
||||
int stride_u = frame->video_frame_buffer()->StrideU();
|
||||
int stride_v = frame->video_frame_buffer()->StrideV();
|
||||
uint8_t* plane_y = frame->video_frame_buffer()->MutableDataY();
|
||||
uint8_t* plane_u = frame->video_frame_buffer()->MutableDataU();
|
||||
uint8_t* plane_v = frame->video_frame_buffer()->MutableDataV();
|
||||
for (uint32_t y = 0; y < height; ++y) {
|
||||
for (uint32_t x = 0; x < width; x += 2) {
|
||||
const uint8_t* quad1 = start + (y * awidth + x) * 2;
|
||||
frame->GetYPlane()[stride_y * y + x] = quad1[y1_pos];
|
||||
plane_y[stride_y * y + x] = quad1[y1_pos];
|
||||
if ((x + 1) < width) {
|
||||
frame->GetYPlane()[stride_y * y + x + 1] = quad1[y2_pos];
|
||||
plane_y[stride_y * y + x + 1] = quad1[y2_pos];
|
||||
}
|
||||
if ((y & 1) == 0) {
|
||||
const uint8_t* quad2 = quad1 + awidth * 2;
|
||||
if ((y + 1) >= height) {
|
||||
quad2 = quad1;
|
||||
}
|
||||
frame->GetUPlane()[stride_u * (y / 2) + x / 2] =
|
||||
plane_u[stride_u * (y / 2) + x / 2] =
|
||||
(quad1[u_pos] + quad2[u_pos] + 1) / 2;
|
||||
frame->GetVPlane()[stride_v * (y / 2) + x / 2] =
|
||||
plane_v[stride_v * (y / 2) + x / 2] =
|
||||
(quad1[v_pos] + quad2[v_pos] + 1) / 2;
|
||||
}
|
||||
}
|
||||
@ -311,9 +314,12 @@ class VideoFrameTest : public testing::Test {
|
||||
pitch = -pitch;
|
||||
}
|
||||
frame->InitToBlack(width, height, 0);
|
||||
int stride_y = frame->GetYPitch();
|
||||
int stride_u = frame->GetUPitch();
|
||||
int stride_v = frame->GetVPitch();
|
||||
int stride_y = frame->video_frame_buffer()->StrideY();
|
||||
int stride_u = frame->video_frame_buffer()->StrideU();
|
||||
int stride_v = frame->video_frame_buffer()->StrideV();
|
||||
uint8_t* plane_y = frame->video_frame_buffer()->MutableDataY();
|
||||
uint8_t* plane_u = frame->video_frame_buffer()->MutableDataU();
|
||||
uint8_t* plane_v = frame->video_frame_buffer()->MutableDataV();
|
||||
for (int32_t y = 0; y < height; y += 2) {
|
||||
for (int32_t x = 0; x < width; x += 2) {
|
||||
const uint8_t* rgb[4];
|
||||
@ -326,19 +332,19 @@ class VideoFrameTest : public testing::Test {
|
||||
ConvertRgbPixel(rgb[i][r_pos], rgb[i][g_pos], rgb[i][b_pos],
|
||||
&yuv[i][0], &yuv[i][1], &yuv[i][2]);
|
||||
}
|
||||
frame->GetYPlane()[stride_y * y + x] = yuv[0][0];
|
||||
plane_y[stride_y * y + x] = yuv[0][0];
|
||||
if ((x + 1) < width) {
|
||||
frame->GetYPlane()[stride_y * y + x + 1] = yuv[1][0];
|
||||
plane_y[stride_y * y + x + 1] = yuv[1][0];
|
||||
}
|
||||
if ((y + 1) < height) {
|
||||
frame->GetYPlane()[stride_y * (y + 1) + x] = yuv[2][0];
|
||||
plane_y[stride_y * (y + 1) + x] = yuv[2][0];
|
||||
if ((x + 1) < width) {
|
||||
frame->GetYPlane()[stride_y * (y + 1) + x + 1] = yuv[3][0];
|
||||
plane_y[stride_y * (y + 1) + x + 1] = yuv[3][0];
|
||||
}
|
||||
}
|
||||
frame->GetUPlane()[stride_u * (y / 2) + x / 2] =
|
||||
plane_u[stride_u * (y / 2) + x / 2] =
|
||||
(yuv[0][1] + yuv[1][1] + yuv[2][1] + yuv[3][1] + 2) / 4;
|
||||
frame->GetVPlane()[stride_v * (y / 2) + x / 2] =
|
||||
plane_v[stride_v * (y / 2) + x / 2] =
|
||||
(yuv[0][2] + yuv[1][2] + yuv[2][2] + yuv[3][2] + 2) / 4;
|
||||
}
|
||||
}
|
||||
@ -395,15 +401,15 @@ class VideoFrameTest : public testing::Test {
|
||||
|
||||
// Comparison functions for testing.
|
||||
static bool IsNull(const cricket::VideoFrame& frame) {
|
||||
return !frame.GetYPlane();
|
||||
return !frame.video_frame_buffer();
|
||||
}
|
||||
|
||||
static bool IsSize(const cricket::VideoFrame& frame,
|
||||
int width,
|
||||
int height) {
|
||||
return !IsNull(frame) && frame.GetYPitch() >= width &&
|
||||
frame.GetUPitch() >= width / 2 &&
|
||||
frame.GetVPitch() >= width / 2 &&
|
||||
return !IsNull(frame) && frame.video_frame_buffer()->StrideY() >= width &&
|
||||
frame.video_frame_buffer()->StrideU() >= width / 2 &&
|
||||
frame.video_frame_buffer()->StrideV() >= width / 2 &&
|
||||
frame.width() == width && frame.height() == height;
|
||||
}
|
||||
|
||||
@ -444,15 +450,17 @@ class VideoFrameTest : public testing::Test {
|
||||
const uint8_t* v,
|
||||
uint32_t vpitch,
|
||||
int max_error) {
|
||||
return IsSize(frame, width, height) &&
|
||||
frame.GetTimeStamp() == time_stamp &&
|
||||
IsPlaneEqual("y", frame.GetYPlane(), frame.GetYPitch(), y, ypitch,
|
||||
return IsSize(frame, width, height) && frame.GetTimeStamp() == time_stamp &&
|
||||
IsPlaneEqual("y", frame.video_frame_buffer()->DataY(),
|
||||
frame.video_frame_buffer()->StrideY(), y, ypitch,
|
||||
static_cast<uint32_t>(width),
|
||||
static_cast<uint32_t>(height), max_error) &&
|
||||
IsPlaneEqual("u", frame.GetUPlane(), frame.GetUPitch(), u, upitch,
|
||||
IsPlaneEqual("u", frame.video_frame_buffer()->DataU(),
|
||||
frame.video_frame_buffer()->StrideU(), u, upitch,
|
||||
static_cast<uint32_t>((width + 1) / 2),
|
||||
static_cast<uint32_t>((height + 1) / 2), max_error) &&
|
||||
IsPlaneEqual("v", frame.GetVPlane(), frame.GetVPitch(), v, vpitch,
|
||||
IsPlaneEqual("v", frame.video_frame_buffer()->DataV(),
|
||||
frame.video_frame_buffer()->StrideV(), v, vpitch,
|
||||
static_cast<uint32_t>((width + 1) / 2),
|
||||
static_cast<uint32_t>((height + 1) / 2), max_error);
|
||||
}
|
||||
@ -463,9 +471,12 @@ class VideoFrameTest : public testing::Test {
|
||||
return IsEqual(frame1,
|
||||
frame2.width(), frame2.height(),
|
||||
frame2.GetTimeStamp(),
|
||||
frame2.GetYPlane(), frame2.GetYPitch(),
|
||||
frame2.GetUPlane(), frame2.GetUPitch(),
|
||||
frame2.GetVPlane(), frame2.GetVPitch(),
|
||||
frame2.video_frame_buffer()->DataY(),
|
||||
frame2.video_frame_buffer()->StrideY(),
|
||||
frame2.video_frame_buffer()->DataU(),
|
||||
frame2.video_frame_buffer()->StrideU(),
|
||||
frame2.video_frame_buffer()->DataV(),
|
||||
frame2.video_frame_buffer()->StrideV(),
|
||||
max_error);
|
||||
}
|
||||
|
||||
@ -478,23 +489,26 @@ class VideoFrameTest : public testing::Test {
|
||||
frame2.width() - hcrop * 2,
|
||||
frame2.height() - vcrop * 2,
|
||||
frame2.GetTimeStamp(),
|
||||
frame2.GetYPlane() + vcrop * frame2.GetYPitch()
|
||||
frame2.video_frame_buffer()->DataY()
|
||||
+ vcrop * frame2.video_frame_buffer()->StrideY()
|
||||
+ hcrop,
|
||||
frame2.GetYPitch(),
|
||||
frame2.GetUPlane() + vcrop * frame2.GetUPitch() / 2
|
||||
frame2.video_frame_buffer()->StrideY(),
|
||||
frame2.video_frame_buffer()->DataU()
|
||||
+ vcrop * frame2.video_frame_buffer()->StrideU() / 2
|
||||
+ hcrop / 2,
|
||||
frame2.GetUPitch(),
|
||||
frame2.GetVPlane() + vcrop * frame2.GetVPitch() / 2
|
||||
frame2.video_frame_buffer()->StrideU(),
|
||||
frame2.video_frame_buffer()->DataV()
|
||||
+ vcrop * frame2.video_frame_buffer()->StrideV() / 2
|
||||
+ hcrop / 2,
|
||||
frame2.GetVPitch(),
|
||||
frame2.video_frame_buffer()->StrideV(),
|
||||
max_error);
|
||||
}
|
||||
|
||||
static bool IsBlack(const cricket::VideoFrame& frame) {
|
||||
return !IsNull(frame) &&
|
||||
*frame.GetYPlane() == 16 &&
|
||||
*frame.GetUPlane() == 128 &&
|
||||
*frame.GetVPlane() == 128;
|
||||
*frame.video_frame_buffer()->DataY() == 16 &&
|
||||
*frame.video_frame_buffer()->DataU() == 128 &&
|
||||
*frame.video_frame_buffer()->DataV() == 128;
|
||||
}
|
||||
|
||||
////////////////////////
|
||||
@ -541,9 +555,12 @@ class VideoFrameTest : public testing::Test {
|
||||
uint8_t* y = ALIGNP(buf.get(), kAlignment);
|
||||
uint8_t* u = y + kWidth * kHeight;
|
||||
uint8_t* v = u + (kWidth / 2) * kHeight;
|
||||
EXPECT_EQ(0, libyuv::I420ToI422(frame1.GetYPlane(), frame1.GetYPitch(),
|
||||
frame1.GetUPlane(), frame1.GetUPitch(),
|
||||
frame1.GetVPlane(), frame1.GetVPitch(),
|
||||
EXPECT_EQ(0, libyuv::I420ToI422(frame1.video_frame_buffer()->DataY(),
|
||||
frame1.video_frame_buffer()->StrideY(),
|
||||
frame1.video_frame_buffer()->DataU(),
|
||||
frame1.video_frame_buffer()->StrideU(),
|
||||
frame1.video_frame_buffer()->DataV(),
|
||||
frame1.video_frame_buffer()->StrideV(),
|
||||
y, kWidth,
|
||||
u, kWidth / 2,
|
||||
v, kWidth / 2,
|
||||
@ -560,9 +577,12 @@ class VideoFrameTest : public testing::Test {
|
||||
size_t buf_size = kWidth * kHeight * 2;
|
||||
std::unique_ptr<uint8_t[]> buf(new uint8_t[buf_size + kAlignment]);
|
||||
uint8_t* yuy2 = ALIGNP(buf.get(), kAlignment);
|
||||
EXPECT_EQ(0, libyuv::I420ToYUY2(frame1.GetYPlane(), frame1.GetYPitch(),
|
||||
frame1.GetUPlane(), frame1.GetUPitch(),
|
||||
frame1.GetVPlane(), frame1.GetVPitch(),
|
||||
EXPECT_EQ(0, libyuv::I420ToYUY2(frame1.video_frame_buffer()->DataY(),
|
||||
frame1.video_frame_buffer()->StrideY(),
|
||||
frame1.video_frame_buffer()->DataU(),
|
||||
frame1.video_frame_buffer()->StrideU(),
|
||||
frame1.video_frame_buffer()->DataV(),
|
||||
frame1.video_frame_buffer()->StrideV(),
|
||||
yuy2, kWidth * 2,
|
||||
kWidth, kHeight));
|
||||
EXPECT_TRUE(LoadFrame(yuy2, buf_size, cricket::FOURCC_YUY2,
|
||||
@ -577,9 +597,12 @@ class VideoFrameTest : public testing::Test {
|
||||
size_t buf_size = kWidth * kHeight * 2;
|
||||
std::unique_ptr<uint8_t[]> buf(new uint8_t[buf_size + kAlignment + 1]);
|
||||
uint8_t* yuy2 = ALIGNP(buf.get(), kAlignment) + 1;
|
||||
EXPECT_EQ(0, libyuv::I420ToYUY2(frame1.GetYPlane(), frame1.GetYPitch(),
|
||||
frame1.GetUPlane(), frame1.GetUPitch(),
|
||||
frame1.GetVPlane(), frame1.GetVPitch(),
|
||||
EXPECT_EQ(0, libyuv::I420ToYUY2(frame1.video_frame_buffer()->DataY(),
|
||||
frame1.video_frame_buffer()->StrideY(),
|
||||
frame1.video_frame_buffer()->DataU(),
|
||||
frame1.video_frame_buffer()->StrideU(),
|
||||
frame1.video_frame_buffer()->DataV(),
|
||||
frame1.video_frame_buffer()->StrideV(),
|
||||
yuy2, kWidth * 2,
|
||||
kWidth, kHeight));
|
||||
EXPECT_TRUE(LoadFrame(yuy2, buf_size, cricket::FOURCC_YUY2,
|
||||
@ -792,16 +815,23 @@ class VideoFrameTest : public testing::Test {
|
||||
EXPECT_TRUE(frame2.Init(cricket::FOURCC_##FOURCC, kWidth, kHeight, kWidth, \
|
||||
kHeight, \
|
||||
reinterpret_cast<uint8_t*>(ms->GetBuffer()), \
|
||||
data_size, 0, webrtc::kVideoRotation_0)); \
|
||||
int width_rotate = frame1.width(); \
|
||||
int height_rotate = frame1.height(); \
|
||||
EXPECT_TRUE(frame3.InitToBlack(width_rotate, height_rotate, 0)); \
|
||||
libyuv::I420Mirror( \
|
||||
frame2.GetYPlane(), frame2.GetYPitch(), frame2.GetUPlane(), \
|
||||
frame2.GetUPitch(), frame2.GetVPlane(), frame2.GetVPitch(), \
|
||||
frame3.GetYPlane(), frame3.GetYPitch(), frame3.GetUPlane(), \
|
||||
frame3.GetUPitch(), frame3.GetVPlane(), frame3.GetVPitch(), kWidth, \
|
||||
kHeight); \
|
||||
data_size, 0, webrtc::kVideoRotation_0)); \
|
||||
int width_rotate = frame1.width(); \
|
||||
int height_rotate = frame1.height(); \
|
||||
EXPECT_TRUE(frame3.InitToBlack(width_rotate, height_rotate, 0)); \
|
||||
libyuv::I420Mirror(frame2.video_frame_buffer()->DataY(), \
|
||||
frame2.video_frame_buffer()->StrideY(), \
|
||||
frame2.video_frame_buffer()->DataU(), \
|
||||
frame2.video_frame_buffer()->StrideU(), \
|
||||
frame2.video_frame_buffer()->DataV(), \
|
||||
frame2.video_frame_buffer()->StrideV(), \
|
||||
frame3.video_frame_buffer()->MutableDataY(), \
|
||||
frame3.video_frame_buffer()->StrideY(), \
|
||||
frame3.video_frame_buffer()->MutableDataU(), \
|
||||
frame3.video_frame_buffer()->StrideU(), \
|
||||
frame3.video_frame_buffer()->MutableDataV(), \
|
||||
frame3.video_frame_buffer()->StrideV(), \
|
||||
kWidth, kHeight); \
|
||||
EXPECT_TRUE(IsEqual(frame1, frame3, 0)); \
|
||||
}
|
||||
|
||||
@ -823,16 +853,23 @@ class VideoFrameTest : public testing::Test {
|
||||
EXPECT_TRUE(frame2.Init(cricket::FOURCC_##FOURCC, kWidth, kHeight, kWidth, \
|
||||
kHeight, \
|
||||
reinterpret_cast<uint8_t*>(ms->GetBuffer()), \
|
||||
data_size, 0, webrtc::kVideoRotation_0)); \
|
||||
int width_rotate = frame1.width(); \
|
||||
int height_rotate = frame1.height(); \
|
||||
EXPECT_TRUE(frame3.InitToBlack(width_rotate, height_rotate, 0)); \
|
||||
libyuv::I420Rotate( \
|
||||
frame2.GetYPlane(), frame2.GetYPitch(), frame2.GetUPlane(), \
|
||||
frame2.GetUPitch(), frame2.GetVPlane(), frame2.GetVPitch(), \
|
||||
frame3.GetYPlane(), frame3.GetYPitch(), frame3.GetUPlane(), \
|
||||
frame3.GetUPitch(), frame3.GetVPlane(), frame3.GetVPitch(), kWidth, \
|
||||
kHeight, libyuv::kRotate##ROTATE); \
|
||||
data_size, 0, webrtc::kVideoRotation_0)); \
|
||||
int width_rotate = frame1.width(); \
|
||||
int height_rotate = frame1.height(); \
|
||||
EXPECT_TRUE(frame3.InitToBlack(width_rotate, height_rotate, 0)); \
|
||||
libyuv::I420Rotate(frame2.video_frame_buffer()->DataY(), \
|
||||
frame2.video_frame_buffer()->StrideY(), \
|
||||
frame2.video_frame_buffer()->DataU(), \
|
||||
frame2.video_frame_buffer()->StrideU(), \
|
||||
frame2.video_frame_buffer()->DataV(), \
|
||||
frame2.video_frame_buffer()->StrideV(), \
|
||||
frame3.video_frame_buffer()->MutableDataY(), \
|
||||
frame3.video_frame_buffer()->StrideY(), \
|
||||
frame3.video_frame_buffer()->MutableDataU(), \
|
||||
frame3.video_frame_buffer()->StrideU(), \
|
||||
frame3.video_frame_buffer()->MutableDataV(), \
|
||||
frame3.video_frame_buffer()->StrideV(), \
|
||||
kWidth, kHeight, libyuv::kRotate##ROTATE); \
|
||||
EXPECT_TRUE(IsEqual(frame1, frame3, 0)); \
|
||||
}
|
||||
|
||||
@ -952,9 +989,9 @@ class VideoFrameTest : public testing::Test {
|
||||
}
|
||||
EXPECT_EQ(5, frame.width());
|
||||
EXPECT_EQ(5, frame.height());
|
||||
EXPECT_EQ(5, frame.GetYPitch());
|
||||
EXPECT_EQ(3, frame.GetUPitch());
|
||||
EXPECT_EQ(3, frame.GetVPitch());
|
||||
EXPECT_EQ(5, frame.video_frame_buffer()->StrideY());
|
||||
EXPECT_EQ(3, frame.video_frame_buffer()->StrideU());
|
||||
EXPECT_EQ(3, frame.video_frame_buffer()->StrideV());
|
||||
}
|
||||
|
||||
// Test 1 pixel edge case image ARGB buffer.
|
||||
@ -1121,8 +1158,10 @@ class VideoFrameTest : public testing::Test {
|
||||
ASSERT_TRUE(LoadFrameNoRepeat(&frame1));
|
||||
ASSERT_TRUE(LoadFrame(kJpeg400Filename,
|
||||
cricket::FOURCC_MJPG, kWidth, kHeight, &frame2));
|
||||
EXPECT_TRUE(IsPlaneEqual("y", frame1.GetYPlane(), frame1.GetYPitch(),
|
||||
frame2.GetYPlane(), frame2.GetYPitch(),
|
||||
EXPECT_TRUE(IsPlaneEqual("y", frame1.video_frame_buffer()->DataY(),
|
||||
frame1.video_frame_buffer()->StrideY(),
|
||||
frame2.video_frame_buffer()->DataY(),
|
||||
frame2.video_frame_buffer()->StrideY(),
|
||||
kWidth, kHeight, 32));
|
||||
EXPECT_TRUE(IsEqual(frame1, frame2, 128));
|
||||
}
|
||||
@ -1304,9 +1343,7 @@ class VideoFrameTest : public testing::Test {
|
||||
EXPECT_TRUE(frame2.Init(frame1));
|
||||
}
|
||||
EXPECT_TRUE(IsEqual(frame1, frame2, 0));
|
||||
EXPECT_EQ(frame1.GetYPlane(), frame2.GetYPlane());
|
||||
EXPECT_EQ(frame1.GetUPlane(), frame2.GetUPlane());
|
||||
EXPECT_EQ(frame1.GetVPlane(), frame2.GetVPlane());
|
||||
EXPECT_EQ(frame1.video_frame_buffer(), frame2.video_frame_buffer());
|
||||
}
|
||||
|
||||
// Test creating an empty image and initing it to black.
|
||||
@ -1419,9 +1456,12 @@ class VideoFrameTest : public testing::Test {
|
||||
EXPECT_TRUE(frame2.InitToBlack(kWidth, kHeight, 0));
|
||||
for (int i = 0; i < repeat_from; ++i) {
|
||||
EXPECT_EQ(0, RGBToI420(out, stride,
|
||||
frame2.GetYPlane(), frame2.GetYPitch(),
|
||||
frame2.GetUPlane(), frame2.GetUPitch(),
|
||||
frame2.GetVPlane(), frame2.GetVPitch(),
|
||||
frame2.video_frame_buffer()->MutableDataY(),
|
||||
frame2.video_frame_buffer()->StrideY(),
|
||||
frame2.video_frame_buffer()->MutableDataU(),
|
||||
frame2.video_frame_buffer()->StrideU(),
|
||||
frame2.video_frame_buffer()->MutableDataV(),
|
||||
frame2.video_frame_buffer()->StrideV(),
|
||||
kWidth, kHeight));
|
||||
}
|
||||
if (rowpad) {
|
||||
@ -1724,9 +1764,12 @@ class VideoFrameTest : public testing::Test {
|
||||
uint8_t* v = u + (kWidth / 2) * kHeight;
|
||||
ASSERT_TRUE(LoadFrameNoRepeat(&frame1));
|
||||
for (int i = 0; i < repeat_; ++i) {
|
||||
EXPECT_EQ(0, libyuv::I420ToI422(frame1.GetYPlane(), frame1.GetYPitch(),
|
||||
frame1.GetUPlane(), frame1.GetUPitch(),
|
||||
frame1.GetVPlane(), frame1.GetVPitch(),
|
||||
EXPECT_EQ(0, libyuv::I420ToI422(frame1.video_frame_buffer()->DataY(),
|
||||
frame1.video_frame_buffer()->StrideY(),
|
||||
frame1.video_frame_buffer()->DataU(),
|
||||
frame1.video_frame_buffer()->StrideU(),
|
||||
frame1.video_frame_buffer()->DataV(),
|
||||
frame1.video_frame_buffer()->StrideV(),
|
||||
y, kWidth,
|
||||
u, kWidth / 2,
|
||||
v, kWidth / 2,
|
||||
@ -1749,7 +1792,8 @@ class VideoFrameTest : public testing::Test {
|
||||
target.reset(source->Copy());
|
||||
EXPECT_TRUE(IsEqual(*source, *target, 0));
|
||||
source.reset();
|
||||
EXPECT_TRUE(target->GetYPlane() != NULL);
|
||||
ASSERT_TRUE(target->video_frame_buffer() != NULL);
|
||||
EXPECT_TRUE(target->video_frame_buffer()->DataY() != NULL);
|
||||
}
|
||||
|
||||
void CopyIsRef() {
|
||||
@ -1759,9 +1803,7 @@ class VideoFrameTest : public testing::Test {
|
||||
target.reset(source->Copy());
|
||||
EXPECT_TRUE(IsEqual(*source, *target, 0));
|
||||
const T* const_source = source.get();
|
||||
EXPECT_EQ(const_source->GetYPlane(), target->GetYPlane());
|
||||
EXPECT_EQ(const_source->GetUPlane(), target->GetUPlane());
|
||||
EXPECT_EQ(const_source->GetVPlane(), target->GetVPlane());
|
||||
EXPECT_EQ(const_source->video_frame_buffer(), target->video_frame_buffer());
|
||||
}
|
||||
|
||||
void StretchToFrame() {
|
||||
|
||||
@ -79,45 +79,6 @@ int WebRtcVideoFrame::height() const {
|
||||
return video_frame_buffer_ ? video_frame_buffer_->height() : 0;
|
||||
}
|
||||
|
||||
const uint8_t* WebRtcVideoFrame::GetYPlane() const {
|
||||
return video_frame_buffer_ ? video_frame_buffer_->DataY() : nullptr;
|
||||
}
|
||||
|
||||
const uint8_t* WebRtcVideoFrame::GetUPlane() const {
|
||||
return video_frame_buffer_ ? video_frame_buffer_->DataU() : nullptr;
|
||||
}
|
||||
|
||||
const uint8_t* WebRtcVideoFrame::GetVPlane() const {
|
||||
return video_frame_buffer_ ? video_frame_buffer_->DataV() : nullptr;
|
||||
}
|
||||
|
||||
uint8_t* WebRtcVideoFrame::GetYPlane() {
|
||||
return video_frame_buffer_ ? video_frame_buffer_->MutableData(kYPlane)
|
||||
: nullptr;
|
||||
}
|
||||
|
||||
uint8_t* WebRtcVideoFrame::GetUPlane() {
|
||||
return video_frame_buffer_ ? video_frame_buffer_->MutableData(kUPlane)
|
||||
: nullptr;
|
||||
}
|
||||
|
||||
uint8_t* WebRtcVideoFrame::GetVPlane() {
|
||||
return video_frame_buffer_ ? video_frame_buffer_->MutableData(kVPlane)
|
||||
: nullptr;
|
||||
}
|
||||
|
||||
int32_t WebRtcVideoFrame::GetYPitch() const {
|
||||
return video_frame_buffer_ ? video_frame_buffer_->StrideY() : 0;
|
||||
}
|
||||
|
||||
int32_t WebRtcVideoFrame::GetUPitch() const {
|
||||
return video_frame_buffer_ ? video_frame_buffer_->StrideU() : 0;
|
||||
}
|
||||
|
||||
int32_t WebRtcVideoFrame::GetVPitch() const {
|
||||
return video_frame_buffer_ ? video_frame_buffer_->StrideV() : 0;
|
||||
}
|
||||
|
||||
bool WebRtcVideoFrame::IsExclusive() const {
|
||||
return video_frame_buffer_->IsMutable();
|
||||
}
|
||||
@ -126,7 +87,7 @@ void* WebRtcVideoFrame::GetNativeHandle() const {
|
||||
return video_frame_buffer_ ? video_frame_buffer_->native_handle() : nullptr;
|
||||
}
|
||||
|
||||
rtc::scoped_refptr<webrtc::VideoFrameBuffer>
|
||||
const rtc::scoped_refptr<webrtc::VideoFrameBuffer>&
|
||||
WebRtcVideoFrame::video_frame_buffer() const {
|
||||
return video_frame_buffer_;
|
||||
}
|
||||
@ -181,9 +142,12 @@ bool WebRtcVideoFrame::Reset(uint32_t format,
|
||||
int idh = (h < 0) ? -dh : dh;
|
||||
int r = libyuv::ConvertToI420(
|
||||
sample, sample_size,
|
||||
GetYPlane(), GetYPitch(),
|
||||
GetUPlane(), GetUPitch(),
|
||||
GetVPlane(), GetVPitch(),
|
||||
video_frame_buffer_->MutableDataY(),
|
||||
video_frame_buffer_->StrideY(),
|
||||
video_frame_buffer_->MutableDataU(),
|
||||
video_frame_buffer_->StrideU(),
|
||||
video_frame_buffer_->MutableDataV(),
|
||||
video_frame_buffer_->StrideV(),
|
||||
horiz_crop, vert_crop,
|
||||
w, h,
|
||||
dw, idh,
|
||||
@ -252,10 +216,15 @@ const VideoFrame* WebRtcVideoFrame::GetCopyWithRotationApplied() const {
|
||||
// TODO(guoweis): Add a function in webrtc_libyuv.cc to convert from
|
||||
// VideoRotation to libyuv::RotationMode.
|
||||
int ret = libyuv::I420Rotate(
|
||||
GetYPlane(), GetYPitch(), GetUPlane(), GetUPitch(), GetVPlane(),
|
||||
GetVPitch(), rotated_frame_->GetYPlane(), rotated_frame_->GetYPitch(),
|
||||
rotated_frame_->GetUPlane(), rotated_frame_->GetUPitch(),
|
||||
rotated_frame_->GetVPlane(), rotated_frame_->GetVPitch(),
|
||||
video_frame_buffer_->DataY(), video_frame_buffer_->StrideY(),
|
||||
video_frame_buffer_->DataU(), video_frame_buffer_->StrideU(),
|
||||
video_frame_buffer_->DataV(), video_frame_buffer_->StrideV(),
|
||||
rotated_frame_->video_frame_buffer()->MutableDataY(),
|
||||
rotated_frame_->video_frame_buffer()->StrideY(),
|
||||
rotated_frame_->video_frame_buffer()->MutableDataU(),
|
||||
rotated_frame_->video_frame_buffer()->StrideU(),
|
||||
rotated_frame_->video_frame_buffer()->MutableDataV(),
|
||||
rotated_frame_->video_frame_buffer()->StrideV(),
|
||||
orig_width, orig_height,
|
||||
static_cast<libyuv::RotationMode>(rotation()));
|
||||
if (ret == 0) {
|
||||
|
||||
@ -68,17 +68,8 @@ class WebRtcVideoFrame : public VideoFrame {
|
||||
int width() const override;
|
||||
int height() const override;
|
||||
|
||||
const uint8_t* GetYPlane() const override;
|
||||
const uint8_t* GetUPlane() const override;
|
||||
const uint8_t* GetVPlane() const override;
|
||||
uint8_t* GetYPlane() override;
|
||||
uint8_t* GetUPlane() override;
|
||||
uint8_t* GetVPlane() override;
|
||||
int32_t GetYPitch() const override;
|
||||
int32_t GetUPitch() const override;
|
||||
int32_t GetVPitch() const override;
|
||||
void* GetNativeHandle() const override;
|
||||
rtc::scoped_refptr<webrtc::VideoFrameBuffer> video_frame_buffer()
|
||||
const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& video_frame_buffer()
|
||||
const override;
|
||||
|
||||
/* System monotonic clock */
|
||||
|
||||
Reference in New Issue
Block a user