Revert of Move MutableDataY{,U,V} methods to I420Buffer only. (patchset #14 id:260001 of https://codereview.webrtc.org/2278883002/ )

Reason for revert:
Broke downstream application.

Original issue's description:
> Move MutableDataY{,U,V} methods to I420Buffer only.
>
> Deleted from the VideoFrameBuffer base class.
>
> BUG=webrtc:5921
>
> Committed: https://crrev.com/5539ef6c03c273f39fadae41ace47fdc11ac6d60
> Cr-Commit-Position: refs/heads/master@{#14317}

TBR=perkj@webrtc.org,magjed@webrtc.org,pthatcher@webrtc.org,honghaiz@webrtc.org,stefan@webrtc.org
# Skipping CQ checks because original CL landed less than 1 days ago.
NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
BUG=webrtc:5921

Review-Url: https://codereview.webrtc.org/2354223002
Cr-Commit-Position: refs/heads/master@{#14325}
This commit is contained in:
nisse
2016-09-21 03:52:16 -07:00
committed by Commit bot
parent ad5d65845f
commit 776870a259
27 changed files with 530 additions and 502 deletions

View File

@ -35,7 +35,7 @@ CoreVideoFrameBuffer::NativeToI420Buffer() {
size_t width = CVPixelBufferGetWidthOfPlane(pixel_buffer_, 0); size_t width = CVPixelBufferGetWidthOfPlane(pixel_buffer_, 0);
size_t height = CVPixelBufferGetHeightOfPlane(pixel_buffer_, 0); size_t height = CVPixelBufferGetHeightOfPlane(pixel_buffer_, 0);
// TODO(tkchin): Use a frame buffer pool. // TODO(tkchin): Use a frame buffer pool.
rtc::scoped_refptr<webrtc::I420Buffer> buffer = rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
new rtc::RefCountedObject<webrtc::I420Buffer>(width, height); new rtc::RefCountedObject<webrtc::I420Buffer>(width, height);
CVPixelBufferLockBaseAddress(pixel_buffer_, kCVPixelBufferLock_ReadOnly); CVPixelBufferLockBaseAddress(pixel_buffer_, kCVPixelBufferLock_ReadOnly);
const uint8_t* src_y = static_cast<const uint8_t*>( const uint8_t* src_y = static_cast<const uint8_t*>(

View File

@ -52,7 +52,7 @@ TEST(TestI420BufferPool, FailToReuse) {
} }
TEST(TestI420BufferPool, FrameValidAfterPoolDestruction) { TEST(TestI420BufferPool, FrameValidAfterPoolDestruction) {
rtc::scoped_refptr<I420Buffer> buffer; rtc::scoped_refptr<VideoFrameBuffer> buffer;
{ {
I420BufferPool pool; I420BufferPool pool;
buffer = pool.CreateBuffer(16, 16); buffer = pool.CreateBuffer(16, 16);

View File

@ -162,14 +162,16 @@ TEST(TestVideoFrame, CopyFrame) {
EXPECT_EQ(kRotation, small_frame.rotation()); EXPECT_EQ(kRotation, small_frame.rotation());
// Frame of larger dimensions. // Frame of larger dimensions.
rtc::scoped_refptr<I420Buffer> buffer = small_frame.CreateEmptyFrame(width, height,
I420Buffer::Create(width, height, stride_y, stride_u, stride_v); stride_y, stride_u, stride_v);
memset(buffer->MutableDataY(), 1, width * height); memset(small_frame.video_frame_buffer()->MutableDataY(), 1,
memset(buffer->MutableDataU(), 2, ((height + 1) / 2) * stride_u); small_frame.allocated_size(kYPlane));
memset(buffer->MutableDataV(), 3, ((height + 1) / 2) * stride_u); memset(small_frame.video_frame_buffer()->MutableDataU(), 2,
VideoFrame other_frame(buffer, 0, 0, webrtc::kVideoRotation_0); small_frame.allocated_size(kUPlane));
big_frame.CopyFrame(other_frame); memset(small_frame.video_frame_buffer()->MutableDataV(), 3,
EXPECT_TRUE(test::FramesEqual(other_frame, big_frame)); small_frame.allocated_size(kVPlane));
big_frame.CopyFrame(small_frame);
EXPECT_TRUE(test::FramesEqual(small_frame, big_frame));
} }
TEST(TestVideoFrame, ShallowCopy) { TEST(TestVideoFrame, ShallowCopy) {

View File

@ -45,6 +45,12 @@ class VideoFrameBuffer : public rtc::RefCountInterface {
virtual const uint8_t* DataU() const = 0; virtual const uint8_t* DataU() const = 0;
virtual const uint8_t* DataV() const = 0; virtual const uint8_t* DataV() const = 0;
// TODO(nisse): Move MutableData methods to the I420Buffer subclass.
// Non-const data access.
virtual uint8_t* MutableDataY();
virtual uint8_t* MutableDataU();
virtual uint8_t* MutableDataV();
// Returns the number of bytes between successive rows for a given plane. // Returns the number of bytes between successive rows for a given plane.
virtual int StrideY() const = 0; virtual int StrideY() const = 0;
virtual int StrideU() const = 0; virtual int StrideU() const = 0;
@ -92,9 +98,9 @@ class I420Buffer : public VideoFrameBuffer {
const uint8_t* DataU() const override; const uint8_t* DataU() const override;
const uint8_t* DataV() const override; const uint8_t* DataV() const override;
uint8_t* MutableDataY(); uint8_t* MutableDataY() override;
uint8_t* MutableDataU(); uint8_t* MutableDataU() override;
uint8_t* MutableDataV(); uint8_t* MutableDataV() override;
int StrideY() const override; int StrideY() const override;
int StrideU() const override; int StrideU() const override;
int StrideV() const override; int StrideV() const override;

View File

@ -69,7 +69,6 @@ size_t CalcBufferSize(VideoType type, int width, int height);
// already open for writing. // already open for writing.
// Return value: 0 if OK, < 0 otherwise. // Return value: 0 if OK, < 0 otherwise.
int PrintVideoFrame(const VideoFrame& frame, FILE* file); int PrintVideoFrame(const VideoFrame& frame, FILE* file);
int PrintVideoFrame(const VideoFrameBuffer& frame, FILE* file);
// Extract buffer from VideoFrame or VideoFrameBuffer (consecutive // Extract buffer from VideoFrame or VideoFrameBuffer (consecutive
// planes, no stride) // planes, no stride)
@ -93,13 +92,11 @@ int ExtractBuffer(const VideoFrame& input_frame, size_t size, uint8_t* buffer);
// - sample_size : Required only for the parsing of MJPG (set to 0 else). // - sample_size : Required only for the parsing of MJPG (set to 0 else).
// - rotate : Rotation mode of output image. // - rotate : Rotation mode of output image.
// Output: // Output:
// - dst_buffer : Reference to a destination frame buffer. // - dst_frame : Reference to a destination frame.
// Return value: 0 if OK, < 0 otherwise. // Return value: 0 if OK, < 0 otherwise.
// TODO(nisse): Delete this wrapper, and let users call libyuv directly. Most // TODO(nisse): Deprecated, see
// calls pass |src_video_type| == kI420, and should use libyuv::I420Copy. The // https://bugs.chromium.org/p/webrtc/issues/detail?id=5921.
// only exception at the time of this writing is
// VideoCaptureImpl::IncomingFrame, which still needs libyuv::ConvertToI420.
int ConvertToI420(VideoType src_video_type, int ConvertToI420(VideoType src_video_type,
const uint8_t* src_frame, const uint8_t* src_frame,
int crop_x, int crop_x,
@ -108,7 +105,7 @@ int ConvertToI420(VideoType src_video_type,
int src_height, int src_height,
size_t sample_size, size_t sample_size,
VideoRotation rotation, VideoRotation rotation,
I420Buffer* dst_buffer); VideoFrame* dst_frame);
// Convert From I420 // Convert From I420
// Input: // Input:

View File

@ -95,20 +95,21 @@ TEST_F(TestLibYuv, ConvertTest) {
double psnr = 0.0; double psnr = 0.0;
rtc::scoped_refptr<I420Buffer> res_i420_buffer = I420Buffer::Create( VideoFrame res_i420_frame;
width_, height_, width_, (width_ + 1) / 2, (width_ + 1) / 2); res_i420_frame.CreateEmptyFrame(width_, height_, width_,
(width_ + 1) / 2,
(width_ + 1) / 2);
printf("\nConvert #%d I420 <-> I420 \n", j); printf("\nConvert #%d I420 <-> I420 \n", j);
std::unique_ptr<uint8_t[]> out_i420_buffer(new uint8_t[frame_length_]); std::unique_ptr<uint8_t[]> out_i420_buffer(new uint8_t[frame_length_]);
EXPECT_EQ(0, ConvertFromI420(orig_frame_, kI420, 0, out_i420_buffer.get())); EXPECT_EQ(0, ConvertFromI420(orig_frame_, kI420, 0,
out_i420_buffer.get()));
EXPECT_EQ(0, ConvertToI420(kI420, out_i420_buffer.get(), 0, 0, width_, EXPECT_EQ(0, ConvertToI420(kI420, out_i420_buffer.get(), 0, 0, width_,
height_, 0, kVideoRotation_0, height_, 0, kVideoRotation_0, &res_i420_frame));
res_i420_buffer.get()));
if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) { if (PrintVideoFrame(res_i420_frame, output_file) < 0) {
return; return;
} }
psnr = I420PSNR(*orig_frame_.video_frame_buffer(), *res_i420_buffer); psnr = I420PSNR(&orig_frame_, &res_i420_frame);
EXPECT_EQ(48.0, psnr); EXPECT_EQ(48.0, psnr);
j++; j++;
@ -118,18 +119,17 @@ TEST_F(TestLibYuv, ConvertTest) {
int stride_y = 0; int stride_y = 0;
int stride_uv = 0; int stride_uv = 0;
Calc16ByteAlignedStride(width_, &stride_y, &stride_uv); Calc16ByteAlignedStride(width_, &stride_y, &stride_uv);
res_i420_buffer = res_i420_frame.CreateEmptyFrame(width_, height_, stride_y,
I420Buffer::Create(width_, height_, stride_y, stride_uv, stride_uv); stride_uv, stride_uv);
EXPECT_EQ(0, ConvertFromI420(orig_frame_, kRGB24, 0, res_rgb_buffer2.get())); EXPECT_EQ(0, ConvertFromI420(orig_frame_, kRGB24, 0, res_rgb_buffer2.get()));
EXPECT_EQ(0, ConvertToI420(kRGB24, res_rgb_buffer2.get(), 0, 0, width_, EXPECT_EQ(0, ConvertToI420(kRGB24, res_rgb_buffer2.get(), 0, 0, width_,
height_, 0, kVideoRotation_0, height_, 0, kVideoRotation_0, &res_i420_frame));
res_i420_buffer.get()));
if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) { if (PrintVideoFrame(res_i420_frame, output_file) < 0) {
return; return;
} }
psnr = I420PSNR(*orig_frame_.video_frame_buffer(), *res_i420_buffer); psnr = I420PSNR(&orig_frame_, &res_i420_frame);
// Optimization Speed- quality trade-off => 45 dB only (platform dependant). // Optimization Speed- quality trade-off => 45 dB only (platform dependant).
EXPECT_GT(ceil(psnr), 44); EXPECT_GT(ceil(psnr), 44);
@ -139,11 +139,10 @@ TEST_F(TestLibYuv, ConvertTest) {
std::unique_ptr<uint8_t[]> out_uyvy_buffer(new uint8_t[width_ * height_ * 2]); std::unique_ptr<uint8_t[]> out_uyvy_buffer(new uint8_t[width_ * height_ * 2]);
EXPECT_EQ(0, ConvertFromI420(orig_frame_, kUYVY, 0, out_uyvy_buffer.get())); EXPECT_EQ(0, ConvertFromI420(orig_frame_, kUYVY, 0, out_uyvy_buffer.get()));
EXPECT_EQ(0, ConvertToI420(kUYVY, out_uyvy_buffer.get(), 0, 0, width_, EXPECT_EQ(0, ConvertToI420(kUYVY, out_uyvy_buffer.get(), 0, 0, width_,
height_, 0, kVideoRotation_0, height_, 0, kVideoRotation_0, &res_i420_frame));
res_i420_buffer.get())); psnr = I420PSNR(&orig_frame_, &res_i420_frame);
psnr = I420PSNR(*orig_frame_.video_frame_buffer(), *res_i420_buffer);
EXPECT_EQ(48.0, psnr); EXPECT_EQ(48.0, psnr);
if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) { if (PrintVideoFrame(res_i420_frame, output_file) < 0) {
return; return;
} }
j++; j++;
@ -153,31 +152,29 @@ TEST_F(TestLibYuv, ConvertTest) {
EXPECT_EQ(0, ConvertFromI420(orig_frame_, kYUY2, 0, out_yuy2_buffer.get())); EXPECT_EQ(0, ConvertFromI420(orig_frame_, kYUY2, 0, out_yuy2_buffer.get()));
EXPECT_EQ(0, ConvertToI420(kYUY2, out_yuy2_buffer.get(), 0, 0, width_, EXPECT_EQ(0, ConvertToI420(kYUY2, out_yuy2_buffer.get(), 0, 0, width_,
height_, 0, height_, 0, kVideoRotation_0, &res_i420_frame));
kVideoRotation_0, res_i420_buffer.get()));
if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) { if (PrintVideoFrame(res_i420_frame, output_file) < 0) {
return; return;
} }
psnr = I420PSNR(*orig_frame_.video_frame_buffer(), *res_i420_buffer); psnr = I420PSNR(&orig_frame_, &res_i420_frame);
EXPECT_EQ(48.0, psnr); EXPECT_EQ(48.0, psnr);
printf("\nConvert #%d I420 <-> RGB565\n", j); printf("\nConvert #%d I420 <-> RGB565\n", j);
std::unique_ptr<uint8_t[]> out_rgb565_buffer( std::unique_ptr<uint8_t[]> out_rgb565_buffer(
new uint8_t[width_ * height_ * 2]); new uint8_t[width_ * height_ * 2]);
EXPECT_EQ(0, EXPECT_EQ(0, ConvertFromI420(orig_frame_, kRGB565, 0,
ConvertFromI420(orig_frame_, kRGB565, 0, out_rgb565_buffer.get())); out_rgb565_buffer.get()));
EXPECT_EQ(0, ConvertToI420(kRGB565, out_rgb565_buffer.get(), 0, 0, width_, EXPECT_EQ(0, ConvertToI420(kRGB565, out_rgb565_buffer.get(), 0, 0, width_,
height_, 0, height_, 0, kVideoRotation_0, &res_i420_frame));
kVideoRotation_0, res_i420_buffer.get()));
if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) { if (PrintVideoFrame(res_i420_frame, output_file) < 0) {
return; return;
} }
j++; j++;
psnr = I420PSNR(*orig_frame_.video_frame_buffer(), *res_i420_buffer); psnr = I420PSNR(&orig_frame_, &res_i420_frame);
// TODO(leozwang) Investigate the right psnr should be set for I420ToRGB565, // TODO(leozwang) Investigate the right psnr should be set for I420ToRGB565,
// Another example is I420ToRGB24, the psnr is 44 // Another example is I420ToRGB24, the psnr is 44
// TODO(mikhal): Add psnr for RGB565, 1555, 4444, convert to ARGB. // TODO(mikhal): Add psnr for RGB565, 1555, 4444, convert to ARGB.
@ -186,20 +183,18 @@ TEST_F(TestLibYuv, ConvertTest) {
printf("\nConvert #%d I420 <-> ARGB8888\n", j); printf("\nConvert #%d I420 <-> ARGB8888\n", j);
std::unique_ptr<uint8_t[]> out_argb8888_buffer( std::unique_ptr<uint8_t[]> out_argb8888_buffer(
new uint8_t[width_ * height_ * 4]); new uint8_t[width_ * height_ * 4]);
EXPECT_EQ(0, EXPECT_EQ(0, ConvertFromI420(orig_frame_, kARGB, 0,
ConvertFromI420(orig_frame_, kARGB, 0, out_argb8888_buffer.get())); out_argb8888_buffer.get()));
EXPECT_EQ(0, ConvertToI420(kARGB, out_argb8888_buffer.get(), 0, 0, width_, EXPECT_EQ(0, ConvertToI420(kARGB, out_argb8888_buffer.get(), 0, 0, width_,
height_, 0, kVideoRotation_0, height_, 0, kVideoRotation_0, &res_i420_frame));
res_i420_buffer.get()));
if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) { if (PrintVideoFrame(res_i420_frame, output_file) < 0) {
return; return;
} }
psnr = I420PSNR(*orig_frame_.video_frame_buffer(), *res_i420_buffer); psnr = I420PSNR(&orig_frame_, &res_i420_frame);
// TODO(leozwang) Investigate the right psnr should be set for // TODO(leozwang) Investigate the right psnr should be set for I420ToARGB8888,
// I420ToARGB8888,
EXPECT_GT(ceil(psnr), 42); EXPECT_GT(ceil(psnr), 42);
ASSERT_EQ(0, fclose(output_file)); ASSERT_EQ(0, fclose(output_file));
@ -214,48 +209,49 @@ TEST_F(TestLibYuv, ConvertAlignedFrame) {
double psnr = 0.0; double psnr = 0.0;
VideoFrame res_i420_frame;
int stride_y = 0; int stride_y = 0;
int stride_uv = 0; int stride_uv = 0;
Calc16ByteAlignedStride(width_, &stride_y, &stride_uv); Calc16ByteAlignedStride(width_, &stride_y, &stride_uv);
res_i420_frame.CreateEmptyFrame(width_, height_,
rtc::scoped_refptr<I420Buffer> res_i420_buffer = stride_y, stride_uv, stride_uv);
I420Buffer::Create(width_, height_, stride_y, stride_uv, stride_uv);
std::unique_ptr<uint8_t[]> out_i420_buffer(new uint8_t[frame_length_]); std::unique_ptr<uint8_t[]> out_i420_buffer(new uint8_t[frame_length_]);
EXPECT_EQ(0, ConvertFromI420(orig_frame_, kI420, 0, EXPECT_EQ(0, ConvertFromI420(orig_frame_, kI420, 0,
out_i420_buffer.get())); out_i420_buffer.get()));
EXPECT_EQ(0, ConvertToI420(kI420, out_i420_buffer.get(), 0, 0, width_, EXPECT_EQ(0, ConvertToI420(kI420, out_i420_buffer.get(), 0, 0, width_,
height_, 0, kVideoRotation_0, height_, 0, kVideoRotation_0, &res_i420_frame));
res_i420_buffer.get()));
if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) { if (PrintVideoFrame(res_i420_frame, output_file) < 0) {
return; return;
} }
psnr = I420PSNR(*orig_frame_.video_frame_buffer(), *res_i420_buffer); psnr = I420PSNR(&orig_frame_, &res_i420_frame);
EXPECT_EQ(48.0, psnr); EXPECT_EQ(48.0, psnr);
} }
TEST_F(TestLibYuv, RotateTest) { TEST_F(TestLibYuv, RotateTest) {
// Use ConvertToI420 for multiple rotations - see that nothing breaks, all // Use ConvertToI420 for multiple roatations - see that nothing breaks, all
// memory is properly allocated and end result is equal to the starting point. // memory is properly allocated and end result is equal to the starting point.
VideoFrame rotated_res_i420_frame;
int rotated_width = height_; int rotated_width = height_;
int rotated_height = width_; int rotated_height = width_;
int stride_y; int stride_y;
int stride_uv; int stride_uv;
Calc16ByteAlignedStride(rotated_width, &stride_y, &stride_uv); Calc16ByteAlignedStride(rotated_width, &stride_y, &stride_uv);
rtc::scoped_refptr<I420Buffer> rotated_res_i420_buffer = I420Buffer::Create( rotated_res_i420_frame.CreateEmptyFrame(rotated_width,
rotated_width, rotated_height, stride_y, stride_uv, stride_uv); rotated_height,
stride_y,
stride_uv,
stride_uv);
EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer_.get(), 0, 0, width_, height_, EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer_.get(), 0, 0, width_, height_,
0, kVideoRotation_90, 0, kVideoRotation_90, &rotated_res_i420_frame));
rotated_res_i420_buffer.get()));
EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer_.get(), 0, 0, width_, height_, EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer_.get(), 0, 0, width_, height_,
0, kVideoRotation_270, 0, kVideoRotation_270, &rotated_res_i420_frame));
rotated_res_i420_buffer.get())); rotated_res_i420_frame.CreateEmptyFrame(width_, height_,
rotated_res_i420_buffer = I420Buffer::Create( width_, (width_ + 1) / 2,
width_, height_, width_, (width_ + 1) / 2, (width_ + 1) / 2); (width_ + 1) / 2);
EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer_.get(), 0, 0, width_, height_, EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer_.get(), 0, 0, width_, height_,
0, kVideoRotation_180, 0, kVideoRotation_180, &rotated_res_i420_frame));
rotated_res_i420_buffer.get()));
} }
} // namespace webrtc } // namespace webrtc

View File

@ -103,35 +103,33 @@ static int PrintPlane(const uint8_t* buf,
} }
// TODO(nisse): Belongs with the test code? // TODO(nisse): Belongs with the test code?
int PrintVideoFrame(const VideoFrameBuffer& frame, FILE* file) { int PrintVideoFrame(const VideoFrame& frame, FILE* file) {
int width = frame.width(); if (file == NULL)
int height = frame.height(); return -1;
if (frame.IsZeroSize())
return -1;
int width = frame.video_frame_buffer()->width();
int height = frame.video_frame_buffer()->height();
int chroma_width = (width + 1) / 2; int chroma_width = (width + 1) / 2;
int chroma_height = (height + 1) / 2; int chroma_height = (height + 1) / 2;
if (PrintPlane(frame.DataY(), width, height, if (PrintPlane(frame.video_frame_buffer()->DataY(), width, height,
frame.StrideY(), file) < 0) { frame.video_frame_buffer()->StrideY(), file) < 0) {
return -1; return -1;
} }
if (PrintPlane(frame.DataU(), if (PrintPlane(frame.video_frame_buffer()->DataU(),
chroma_width, chroma_height, chroma_width, chroma_height,
frame.StrideU(), file) < 0) { frame.video_frame_buffer()->StrideU(), file) < 0) {
return -1; return -1;
} }
if (PrintPlane(frame.DataV(), if (PrintPlane(frame.video_frame_buffer()->DataV(),
chroma_width, chroma_height, chroma_width, chroma_height,
frame.StrideV(), file) < 0) { frame.video_frame_buffer()->StrideV(), file) < 0) {
return -1; return -1;
} }
return 0; return 0;
} }
int PrintVideoFrame(const VideoFrame& frame, FILE* file) {
if (frame.IsZeroSize())
return -1;
return PrintVideoFrame(*frame.video_frame_buffer(), file);
}
int ExtractBuffer(const rtc::scoped_refptr<VideoFrameBuffer>& input_frame, int ExtractBuffer(const rtc::scoped_refptr<VideoFrameBuffer>& input_frame,
size_t size, size_t size,
uint8_t* buffer) { uint8_t* buffer) {
@ -251,19 +249,23 @@ int ConvertToI420(VideoType src_video_type,
int src_height, int src_height,
size_t sample_size, size_t sample_size,
VideoRotation rotation, VideoRotation rotation,
I420Buffer* dst_buffer) { VideoFrame* dst_frame) {
int dst_width = dst_buffer->width(); int dst_width = dst_frame->width();
int dst_height = dst_buffer->height(); int dst_height = dst_frame->height();
// LibYuv expects pre-rotation values for dst. // LibYuv expects pre-rotation values for dst.
// Stride values should correspond to the destination values. // Stride values should correspond to the destination values.
if (rotation == kVideoRotation_90 || rotation == kVideoRotation_270) { if (rotation == kVideoRotation_90 || rotation == kVideoRotation_270) {
std::swap(dst_width, dst_height); dst_width = dst_frame->height();
dst_height = dst_frame->width();
} }
return libyuv::ConvertToI420( return libyuv::ConvertToI420(
src_frame, sample_size, src_frame, sample_size,
dst_buffer->MutableDataY(), dst_buffer->StrideY(), dst_frame->video_frame_buffer()->MutableDataY(),
dst_buffer->MutableDataU(), dst_buffer->StrideU(), dst_frame->video_frame_buffer()->StrideY(),
dst_buffer->MutableDataV(), dst_buffer->StrideV(), dst_frame->video_frame_buffer()->MutableDataU(),
dst_frame->video_frame_buffer()->StrideU(),
dst_frame->video_frame_buffer()->MutableDataV(),
dst_frame->video_frame_buffer()->StrideV(),
crop_x, crop_y, crop_x, crop_y,
src_width, src_height, src_width, src_height,
dst_width, dst_height, dst_width, dst_height,

View File

@ -87,18 +87,10 @@ void VideoFrame::CreateFrame(const uint8_t* buffer_y,
const int expected_size_y = height * stride_y; const int expected_size_y = height * stride_y;
const int expected_size_u = half_height * stride_u; const int expected_size_u = half_height * stride_u;
const int expected_size_v = half_height * stride_v; const int expected_size_v = half_height * stride_v;
// Allocate a new buffer. CreateEmptyFrame(width, height, stride_y, stride_u, stride_v);
rtc::scoped_refptr<I420Buffer> buffer_ = memcpy(video_frame_buffer_->MutableDataY(), buffer_y, expected_size_y);
I420Buffer::Create(width, height, stride_y, stride_u, stride_v); memcpy(video_frame_buffer_->MutableDataU(), buffer_u, expected_size_u);
memcpy(video_frame_buffer_->MutableDataV(), buffer_v, expected_size_v);
memcpy(buffer_->MutableDataY(), buffer_y, expected_size_y);
memcpy(buffer_->MutableDataU(), buffer_u, expected_size_u);
memcpy(buffer_->MutableDataV(), buffer_v, expected_size_v);
video_frame_buffer_ = buffer_;
timestamp_rtp_ = 0;
ntp_time_ms_ = 0;
timestamp_us_ = 0;
rotation_ = rotation; rotation_ = rotation;
} }

View File

@ -31,6 +31,19 @@ int I420DataSize(int height, int stride_y, int stride_u, int stride_v) {
} // namespace } // namespace
uint8_t* VideoFrameBuffer::MutableDataY() {
RTC_NOTREACHED();
return nullptr;
}
uint8_t* VideoFrameBuffer::MutableDataU() {
RTC_NOTREACHED();
return nullptr;
}
uint8_t* VideoFrameBuffer::MutableDataV() {
RTC_NOTREACHED();
return nullptr;
}
VideoFrameBuffer::~VideoFrameBuffer() {} VideoFrameBuffer::~VideoFrameBuffer() {}
I420Buffer::I420Buffer(int width, int height) I420Buffer::I420Buffer(int width, int height)

View File

@ -453,6 +453,7 @@ class VideoFrameTest : public testing::Test {
static bool IsEqual(const cricket::VideoFrame& frame, static bool IsEqual(const cricket::VideoFrame& frame,
int width, int width,
int height, int height,
int64_t timestamp_us,
const uint8_t* y, const uint8_t* y,
uint32_t ypitch, uint32_t ypitch,
const uint8_t* u, const uint8_t* u,
@ -461,6 +462,7 @@ class VideoFrameTest : public testing::Test {
uint32_t vpitch, uint32_t vpitch,
int max_error) { int max_error) {
return IsSize(frame, width, height) && return IsSize(frame, width, height) &&
frame.timestamp_us() == timestamp_us &&
IsPlaneEqual("y", frame.video_frame_buffer()->DataY(), IsPlaneEqual("y", frame.video_frame_buffer()->DataY(),
frame.video_frame_buffer()->StrideY(), y, ypitch, frame.video_frame_buffer()->StrideY(), y, ypitch,
static_cast<uint32_t>(width), static_cast<uint32_t>(width),
@ -478,25 +480,15 @@ class VideoFrameTest : public testing::Test {
static bool IsEqual(const cricket::VideoFrame& frame1, static bool IsEqual(const cricket::VideoFrame& frame1,
const cricket::VideoFrame& frame2, const cricket::VideoFrame& frame2,
int max_error) { int max_error) {
return frame1.timestamp_us() == frame2.timestamp_us() && return IsEqual(frame1,
IsEqual(frame1,
frame2.width(), frame2.height(), frame2.width(), frame2.height(),
frame2.timestamp_us(),
frame2.video_frame_buffer()->DataY(), frame2.video_frame_buffer()->DataY(),
frame2.video_frame_buffer()->StrideY(), frame2.video_frame_buffer()->StrideY(),
frame2.video_frame_buffer()->DataU(), frame2.video_frame_buffer()->DataU(),
frame2.video_frame_buffer()->StrideU(), frame2.video_frame_buffer()->StrideU(),
frame2.video_frame_buffer()->DataV(), frame2.video_frame_buffer()->DataV(),
frame2.video_frame_buffer()->StrideV(), max_error); frame2.video_frame_buffer()->StrideV(),
}
static bool IsEqual(
const cricket::VideoFrame& frame1,
const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer,
int max_error) {
return IsEqual(frame1, buffer->width(), buffer->height(),
buffer->DataY(), buffer->StrideY(),
buffer->DataU(), buffer->StrideU(),
buffer->DataV(), buffer->StrideV(),
max_error); max_error);
} }
@ -505,10 +497,10 @@ class VideoFrameTest : public testing::Test {
int hcrop, int vcrop, int max_error) { int hcrop, int vcrop, int max_error) {
return frame1.width() <= frame2.width() && return frame1.width() <= frame2.width() &&
frame1.height() <= frame2.height() && frame1.height() <= frame2.height() &&
frame1.timestamp_us() == frame2.timestamp_us() &&
IsEqual(frame1, IsEqual(frame1,
frame2.width() - hcrop * 2, frame2.width() - hcrop * 2,
frame2.height() - vcrop * 2, frame2.height() - vcrop * 2,
frame2.timestamp_us(),
frame2.video_frame_buffer()->DataY() frame2.video_frame_buffer()->DataY()
+ vcrop * frame2.video_frame_buffer()->StrideY() + vcrop * frame2.video_frame_buffer()->StrideY()
+ hcrop, + hcrop,
@ -547,8 +539,8 @@ class VideoFrameTest : public testing::Test {
const uint8_t* y = reinterpret_cast<uint8_t*>(ms.get()->GetBuffer()); const uint8_t* y = reinterpret_cast<uint8_t*>(ms.get()->GetBuffer());
const uint8_t* u = y + kWidth * kHeight; const uint8_t* u = y + kWidth * kHeight;
const uint8_t* v = u + kWidth * kHeight / 4; const uint8_t* v = u + kWidth * kHeight / 4;
EXPECT_TRUE(IsEqual(frame, kWidth, kHeight, y, kWidth, u, kWidth / 2, v, EXPECT_TRUE(IsEqual(frame, kWidth, kHeight, 0, y, kWidth, u,
kWidth / 2, 0)); kWidth / 2, v, kWidth / 2, 0));
} }
// Test constructing an image from a YV12 buffer. // Test constructing an image from a YV12 buffer.
@ -562,8 +554,8 @@ class VideoFrameTest : public testing::Test {
const uint8_t* y = reinterpret_cast<uint8_t*>(ms.get()->GetBuffer()); const uint8_t* y = reinterpret_cast<uint8_t*>(ms.get()->GetBuffer());
const uint8_t* v = y + kWidth * kHeight; const uint8_t* v = y + kWidth * kHeight;
const uint8_t* u = v + kWidth * kHeight / 4; const uint8_t* u = v + kWidth * kHeight / 4;
EXPECT_TRUE(IsEqual(frame, kWidth, kHeight, y, kWidth, u, kWidth / 2, v, EXPECT_TRUE(IsEqual(frame, kWidth, kHeight, 0, y, kWidth, u,
kWidth / 2, 0)); kWidth / 2, v, kWidth / 2, 0));
} }
// Test constructing an image from a I422 buffer. // Test constructing an image from a I422 buffer.
@ -780,8 +772,7 @@ class VideoFrameTest : public testing::Test {
// Macro to help test different rotations // Macro to help test different rotations
#define TEST_MIRROR(FOURCC, BPP) \ #define TEST_MIRROR(FOURCC, BPP) \
void Construct##FOURCC##Mirror() { \ void Construct##FOURCC##Mirror() { \
T frame1, frame2; \ T frame1, frame2, frame3; \
rtc::scoped_refptr<webrtc::I420Buffer> res_buffer; \
std::unique_ptr<rtc::MemoryStream> ms( \ std::unique_ptr<rtc::MemoryStream> ms( \
CreateYuvSample(kWidth, kHeight, BPP)); \ CreateYuvSample(kWidth, kHeight, BPP)); \
ASSERT_TRUE(ms.get() != NULL); \ ASSERT_TRUE(ms.get() != NULL); \
@ -797,18 +788,21 @@ class VideoFrameTest : public testing::Test {
data_size, 0, webrtc::kVideoRotation_0)); \ data_size, 0, webrtc::kVideoRotation_0)); \
int width_rotate = frame1.width(); \ int width_rotate = frame1.width(); \
int height_rotate = frame1.height(); \ int height_rotate = frame1.height(); \
res_buffer = webrtc::I420Buffer::Create(width_rotate, height_rotate); \ frame3.InitToEmptyBuffer(width_rotate, height_rotate); \
libyuv::I420Mirror(frame2.video_frame_buffer()->DataY(), \ libyuv::I420Mirror(frame2.video_frame_buffer()->DataY(), \
frame2.video_frame_buffer()->StrideY(), \ frame2.video_frame_buffer()->StrideY(), \
frame2.video_frame_buffer()->DataU(), \ frame2.video_frame_buffer()->DataU(), \
frame2.video_frame_buffer()->StrideU(), \ frame2.video_frame_buffer()->StrideU(), \
frame2.video_frame_buffer()->DataV(), \ frame2.video_frame_buffer()->DataV(), \
frame2.video_frame_buffer()->StrideV(), \ frame2.video_frame_buffer()->StrideV(), \
res_buffer->MutableDataY(), res_buffer->StrideY(), \ frame3.video_frame_buffer()->MutableDataY(), \
res_buffer->MutableDataU(), res_buffer->StrideU(), \ frame3.video_frame_buffer()->StrideY(), \
res_buffer->MutableDataV(), res_buffer->StrideV(), \ frame3.video_frame_buffer()->MutableDataU(), \
kWidth, kHeight); \ frame3.video_frame_buffer()->StrideU(), \
EXPECT_TRUE(IsEqual(frame1, res_buffer, 0)); \ frame3.video_frame_buffer()->MutableDataV(), \
frame3.video_frame_buffer()->StrideV(), kWidth, \
kHeight); \
EXPECT_TRUE(IsEqual(frame1, frame3, 0)); \
} }
TEST_MIRROR(I420, 420) TEST_MIRROR(I420, 420)
@ -816,8 +810,7 @@ class VideoFrameTest : public testing::Test {
// Macro to help test different rotations // Macro to help test different rotations
#define TEST_ROTATE(FOURCC, BPP, ROTATE) \ #define TEST_ROTATE(FOURCC, BPP, ROTATE) \
void Construct##FOURCC##Rotate##ROTATE() { \ void Construct##FOURCC##Rotate##ROTATE() { \
T frame1, frame2; \ T frame1, frame2, frame3; \
rtc::scoped_refptr<webrtc::I420Buffer> res_buffer; \
std::unique_ptr<rtc::MemoryStream> ms( \ std::unique_ptr<rtc::MemoryStream> ms( \
CreateYuvSample(kWidth, kHeight, BPP)); \ CreateYuvSample(kWidth, kHeight, BPP)); \
ASSERT_TRUE(ms.get() != NULL); \ ASSERT_TRUE(ms.get() != NULL); \
@ -833,18 +826,21 @@ class VideoFrameTest : public testing::Test {
data_size, 0, webrtc::kVideoRotation_0)); \ data_size, 0, webrtc::kVideoRotation_0)); \
int width_rotate = frame1.width(); \ int width_rotate = frame1.width(); \
int height_rotate = frame1.height(); \ int height_rotate = frame1.height(); \
res_buffer = webrtc::I420Buffer::Create(width_rotate, height_rotate); \ frame3.InitToEmptyBuffer(width_rotate, height_rotate); \
libyuv::I420Rotate(frame2.video_frame_buffer()->DataY(), \ libyuv::I420Rotate(frame2.video_frame_buffer()->DataY(), \
frame2.video_frame_buffer()->StrideY(), \ frame2.video_frame_buffer()->StrideY(), \
frame2.video_frame_buffer()->DataU(), \ frame2.video_frame_buffer()->DataU(), \
frame2.video_frame_buffer()->StrideU(), \ frame2.video_frame_buffer()->StrideU(), \
frame2.video_frame_buffer()->DataV(), \ frame2.video_frame_buffer()->DataV(), \
frame2.video_frame_buffer()->StrideV(), \ frame2.video_frame_buffer()->StrideV(), \
res_buffer->MutableDataY(), res_buffer->StrideY(), \ frame3.video_frame_buffer()->MutableDataY(), \
res_buffer->MutableDataU(), res_buffer->StrideU(), \ frame3.video_frame_buffer()->StrideY(), \
res_buffer->MutableDataV(), res_buffer->StrideV(), \ frame3.video_frame_buffer()->MutableDataU(), \
kWidth, kHeight, libyuv::kRotate##ROTATE); \ frame3.video_frame_buffer()->StrideU(), \
EXPECT_TRUE(IsEqual(frame1, res_buffer, 0)); \ frame3.video_frame_buffer()->MutableDataV(), \
frame3.video_frame_buffer()->StrideV(), kWidth, \
kHeight, libyuv::kRotate##ROTATE); \
EXPECT_TRUE(IsEqual(frame1, frame3, 0)); \
} }
// Test constructing an image with rotation. // Test constructing an image with rotation.
@ -948,7 +944,7 @@ class VideoFrameTest : public testing::Test {
const uint8_t* y = pixel; const uint8_t* y = pixel;
const uint8_t* u = y + 1; const uint8_t* u = y + 1;
const uint8_t* v = u + 1; const uint8_t* v = u + 1;
EXPECT_TRUE(IsEqual(frame, 1, 1, y, 1, u, 1, v, 1, 0)); EXPECT_TRUE(IsEqual(frame, 1, 1, 0, y, 1, u, 1, v, 1, 0));
} }
// Test 5 pixel edge case image. // Test 5 pixel edge case image.

View File

@ -66,13 +66,17 @@ void VerifyCodecHasDefaultFeedbackParams(const cricket::VideoCodec& codec) {
cricket::kRtcpFbParamCcm, cricket::kRtcpFbCcmParamFir))); cricket::kRtcpFbParamCcm, cricket::kRtcpFbCcmParamFir)));
} }
static rtc::scoped_refptr<webrtc::VideoFrameBuffer> CreateBlackFrameBuffer( static void CreateBlackFrame(webrtc::VideoFrame* video_frame,
int width, int width,
int height) { int height) {
rtc::scoped_refptr<webrtc::I420Buffer> buffer = video_frame->CreateEmptyFrame(
webrtc::I420Buffer::Create(width, height); width, height, width, (width + 1) / 2, (width + 1) / 2);
buffer->SetToBlack(); memset(video_frame->video_frame_buffer()->MutableDataY(), 16,
return buffer; video_frame->allocated_size(webrtc::kYPlane));
memset(video_frame->video_frame_buffer()->MutableDataU(), 128,
video_frame->allocated_size(webrtc::kUPlane));
memset(video_frame->video_frame_buffer()->MutableDataV(), 128,
video_frame->allocated_size(webrtc::kVPlane));
} }
void VerifySendStreamHasRtxTypes(const webrtc::VideoSendStream::Config& config, void VerifySendStreamHasRtxTypes(const webrtc::VideoSendStream::Config& config,
@ -2200,9 +2204,9 @@ TEST_F(WebRtcVideoChannel2Test, EstimatesNtpStartTimeCorrectly) {
cricket::FakeVideoRenderer renderer; cricket::FakeVideoRenderer renderer;
EXPECT_TRUE(channel_->SetSink(last_ssrc_, &renderer)); EXPECT_TRUE(channel_->SetSink(last_ssrc_, &renderer));
webrtc::VideoFrame video_frame(CreateBlackFrameBuffer(4, 4), webrtc::VideoFrame video_frame;
kInitialTimestamp, 0, CreateBlackFrame(&video_frame, 4, 4);
webrtc::kVideoRotation_0); video_frame.set_timestamp(kInitialTimestamp);
// Initial NTP time is not available on the first frame, but should still be // Initial NTP time is not available on the first frame, but should still be
// able to be estimated. // able to be estimated.
stream->InjectFrame(video_frame); stream->InjectFrame(video_frame);

View File

@ -129,9 +129,7 @@ bool WebRtcVideoFrame::Reset(uint32_t format,
new_height = dw; new_height = dw;
} }
rtc::scoped_refptr<webrtc::I420Buffer> buffer = InitToEmptyBuffer(new_width, new_height);
webrtc::I420Buffer::Create(new_width, new_height);
video_frame_buffer_ = buffer;
rotation_ = apply_rotation ? webrtc::kVideoRotation_0 : rotation; rotation_ = apply_rotation ? webrtc::kVideoRotation_0 : rotation;
int horiz_crop = ((w - dw) / 2) & ~1; int horiz_crop = ((w - dw) / 2) & ~1;
@ -142,10 +140,15 @@ bool WebRtcVideoFrame::Reset(uint32_t format,
int idh = (h < 0) ? -dh : dh; int idh = (h < 0) ? -dh : dh;
int r = libyuv::ConvertToI420( int r = libyuv::ConvertToI420(
sample, sample_size, sample, sample_size,
buffer->MutableDataY(), buffer->StrideY(), video_frame_buffer_->MutableDataY(),
buffer->MutableDataU(), buffer->StrideU(), video_frame_buffer_->StrideY(),
buffer->MutableDataV(), buffer->StrideV(), video_frame_buffer_->MutableDataU(),
horiz_crop, vert_crop, w, h, dw, idh, video_frame_buffer_->StrideU(),
video_frame_buffer_->MutableDataV(),
video_frame_buffer_->StrideV(),
horiz_crop, vert_crop,
w, h,
dw, idh,
static_cast<libyuv::RotationMode>( static_cast<libyuv::RotationMode>(
apply_rotation ? rotation : webrtc::kVideoRotation_0), apply_rotation ? rotation : webrtc::kVideoRotation_0),
format); format);
@ -159,7 +162,7 @@ bool WebRtcVideoFrame::Reset(uint32_t format,
} }
void WebRtcVideoFrame::InitToEmptyBuffer(int w, int h) { void WebRtcVideoFrame::InitToEmptyBuffer(int w, int h) {
video_frame_buffer_ = webrtc::I420Buffer::Create(w, h); video_frame_buffer_ = new rtc::RefCountedObject<webrtc::I420Buffer>(w, h);
rotation_ = webrtc::kVideoRotation_0; rotation_ = webrtc::kVideoRotation_0;
} }

View File

@ -420,19 +420,15 @@ class VideoCaptureExternalTest : public testing::Test {
capability.maxFPS = kTestFramerate; capability.maxFPS = kTestFramerate;
capture_callback_.SetExpectedCapability(capability); capture_callback_.SetExpectedCapability(capability);
rtc::scoped_refptr<webrtc::I420Buffer> buffer = webrtc::I420Buffer::Create( test_frame_.CreateEmptyFrame(kTestWidth, kTestHeight, kTestWidth,
kTestWidth, kTestHeight, ((kTestWidth + 1) / 2), (kTestWidth + 1) / 2);
kTestWidth, ((kTestWidth + 1) / 2), (kTestWidth + 1) / 2);
memset(buffer->MutableDataY(), 127, kTestWidth * kTestHeight);
memset(buffer->MutableDataU(), 127,
((kTestWidth + 1) / 2) * ((kTestHeight + 1) / 2));
memset(buffer->MutableDataV(), 127,
((kTestWidth + 1) / 2) * ((kTestHeight + 1) / 2));
test_frame_.reset(
new webrtc::VideoFrame(buffer, 0, 0, webrtc::kVideoRotation_0));
SleepMs(1); // Wait 1ms so that two tests can't have the same timestamp. SleepMs(1); // Wait 1ms so that two tests can't have the same timestamp.
memset(test_frame_.video_frame_buffer()->MutableDataY(), 127,
kTestWidth * kTestHeight);
memset(test_frame_.video_frame_buffer()->MutableDataU(), 127,
((kTestWidth + 1) / 2) * ((kTestHeight + 1) / 2));
memset(test_frame_.video_frame_buffer()->MutableDataV(), 127,
((kTestWidth + 1) / 2) * ((kTestHeight + 1) / 2));
capture_module_->RegisterCaptureDataCallback(capture_callback_); capture_module_->RegisterCaptureDataCallback(capture_callback_);
capture_module_->RegisterCaptureCallback(capture_feedback_); capture_module_->RegisterCaptureCallback(capture_feedback_);
@ -447,7 +443,7 @@ class VideoCaptureExternalTest : public testing::Test {
webrtc::VideoCaptureExternal* capture_input_interface_; webrtc::VideoCaptureExternal* capture_input_interface_;
rtc::scoped_refptr<VideoCaptureModule> capture_module_; rtc::scoped_refptr<VideoCaptureModule> capture_module_;
std::unique_ptr<webrtc::ProcessThread> process_module_; std::unique_ptr<webrtc::ProcessThread> process_module_;
std::unique_ptr<webrtc::VideoFrame> test_frame_; webrtc::VideoFrame test_frame_;
TestVideoCaptureCallback capture_callback_; TestVideoCaptureCallback capture_callback_;
TestVideoCaptureFeedBack capture_feedback_; TestVideoCaptureFeedBack capture_feedback_;
}; };
@ -455,13 +451,13 @@ class VideoCaptureExternalTest : public testing::Test {
// Test input of external video frames. // Test input of external video frames.
TEST_F(VideoCaptureExternalTest, TestExternalCapture) { TEST_F(VideoCaptureExternalTest, TestExternalCapture) {
size_t length = webrtc::CalcBufferSize(webrtc::kI420, size_t length = webrtc::CalcBufferSize(webrtc::kI420,
test_frame_->width(), test_frame_.width(),
test_frame_->height()); test_frame_.height());
std::unique_ptr<uint8_t[]> test_buffer(new uint8_t[length]); std::unique_ptr<uint8_t[]> test_buffer(new uint8_t[length]);
webrtc::ExtractBuffer(*test_frame_, length, test_buffer.get()); webrtc::ExtractBuffer(test_frame_, length, test_buffer.get());
EXPECT_EQ(0, capture_input_interface_->IncomingFrame(test_buffer.get(), EXPECT_EQ(0, capture_input_interface_->IncomingFrame(test_buffer.get(),
length, capture_callback_.capability(), 0)); length, capture_callback_.capability(), 0));
EXPECT_TRUE(capture_callback_.CompareLastFrame(*test_frame_)); EXPECT_TRUE(capture_callback_.CompareLastFrame(test_frame_));
} }
// Test frame rate and no picture alarm. // Test frame rate and no picture alarm.
@ -477,13 +473,12 @@ TEST_F(VideoCaptureExternalTest, MAYBE_FrameRate) {
while ((rtc::TimeNanos() - startTime) < testTime) { while ((rtc::TimeNanos() - startTime) < testTime) {
size_t length = webrtc::CalcBufferSize(webrtc::kI420, size_t length = webrtc::CalcBufferSize(webrtc::kI420,
test_frame_->width(), test_frame_.width(),
test_frame_->height()); test_frame_.height());
std::unique_ptr<uint8_t[]> test_buffer(new uint8_t[length]); std::unique_ptr<uint8_t[]> test_buffer(new uint8_t[length]);
webrtc::ExtractBuffer(*test_frame_, length, test_buffer.get()); webrtc::ExtractBuffer(test_frame_, length, test_buffer.get());
EXPECT_EQ( EXPECT_EQ(0, capture_input_interface_->IncomingFrame(test_buffer.get(),
0, capture_input_interface_->IncomingFrame( length, capture_callback_.capability(), 0));
test_buffer.get(), length, capture_callback_.capability(), 0));
SleepMs(100); SleepMs(100);
} }
EXPECT_TRUE(capture_feedback_.frame_rate() >= 8 && EXPECT_TRUE(capture_feedback_.frame_rate() >= 8 &&
@ -494,10 +489,10 @@ TEST_F(VideoCaptureExternalTest, MAYBE_FrameRate) {
startTime = rtc::TimeNanos(); startTime = rtc::TimeNanos();
while ((rtc::TimeNanos() - startTime) < testTime) { while ((rtc::TimeNanos() - startTime) < testTime) {
size_t length = webrtc::CalcBufferSize(webrtc::kI420, size_t length = webrtc::CalcBufferSize(webrtc::kI420,
test_frame_->width(), test_frame_.width(),
test_frame_->height()); test_frame_.height());
std::unique_ptr<uint8_t[]> test_buffer(new uint8_t[length]); std::unique_ptr<uint8_t[]> test_buffer(new uint8_t[length]);
webrtc::ExtractBuffer(*test_frame_, length, test_buffer.get()); webrtc::ExtractBuffer(test_frame_, length, test_buffer.get());
EXPECT_EQ(0, capture_input_interface_->IncomingFrame(test_buffer.get(), EXPECT_EQ(0, capture_input_interface_->IncomingFrame(test_buffer.get(),
length, capture_callback_.capability(), 0)); length, capture_callback_.capability(), 0));
SleepMs(1000 / 30); SleepMs(1000 / 30);
@ -512,10 +507,10 @@ TEST_F(VideoCaptureExternalTest, MAYBE_FrameRate) {
TEST_F(VideoCaptureExternalTest, Rotation) { TEST_F(VideoCaptureExternalTest, Rotation) {
EXPECT_EQ(0, capture_module_->SetCaptureRotation(webrtc::kVideoRotation_0)); EXPECT_EQ(0, capture_module_->SetCaptureRotation(webrtc::kVideoRotation_0));
size_t length = webrtc::CalcBufferSize(webrtc::kI420, size_t length = webrtc::CalcBufferSize(webrtc::kI420,
test_frame_->width(), test_frame_.width(),
test_frame_->height()); test_frame_.height());
std::unique_ptr<uint8_t[]> test_buffer(new uint8_t[length]); std::unique_ptr<uint8_t[]> test_buffer(new uint8_t[length]);
webrtc::ExtractBuffer(*test_frame_, length, test_buffer.get()); webrtc::ExtractBuffer(test_frame_, length, test_buffer.get());
EXPECT_EQ(0, capture_input_interface_->IncomingFrame(test_buffer.get(), EXPECT_EQ(0, capture_input_interface_->IncomingFrame(test_buffer.get(),
length, capture_callback_.capability(), 0)); length, capture_callback_.capability(), 0));
EXPECT_EQ(0, capture_module_->SetCaptureRotation(webrtc::kVideoRotation_90)); EXPECT_EQ(0, capture_module_->SetCaptureRotation(webrtc::kVideoRotation_90));

View File

@ -275,14 +275,14 @@ int32_t VideoCaptureImpl::IncomingFrame(
// Setting absolute height (in case it was negative). // Setting absolute height (in case it was negative).
// In Windows, the image starts bottom left, instead of top left. // In Windows, the image starts bottom left, instead of top left.
// Setting a negative source height, inverts the image (within LibYuv). // Setting a negative source height, inverts the image (within LibYuv).
_captureFrame.CreateEmptyFrame(target_width,
// TODO(nisse): Use a pool? abs(target_height),
rtc::scoped_refptr<I420Buffer> buffer = I420Buffer::Create( stride_y,
target_width, abs(target_height), stride_y, stride_uv, stride_uv); stride_uv, stride_uv);
const int conversionResult = ConvertToI420( const int conversionResult = ConvertToI420(
commonVideoType, videoFrame, 0, 0, // No cropping commonVideoType, videoFrame, 0, 0, // No cropping
width, height, videoFrameLength, width, height, videoFrameLength,
apply_rotation ? _rotateFrame : kVideoRotation_0, buffer.get()); apply_rotation ? _rotateFrame : kVideoRotation_0, &_captureFrame);
if (conversionResult < 0) if (conversionResult < 0)
{ {
LOG(LS_ERROR) << "Failed to convert capture frame from type " LOG(LS_ERROR) << "Failed to convert capture frame from type "
@ -290,12 +290,15 @@ int32_t VideoCaptureImpl::IncomingFrame(
return -1; return -1;
} }
VideoFrame captureFrame( if (!apply_rotation) {
buffer, 0, rtc::TimeMillis(), _captureFrame.set_rotation(_rotateFrame);
!apply_rotation ? _rotateFrame : kVideoRotation_0); } else {
captureFrame.set_ntp_time_ms(captureTime); _captureFrame.set_rotation(kVideoRotation_0);
}
_captureFrame.set_ntp_time_ms(captureTime);
_captureFrame.set_render_time_ms(rtc::TimeMillis());
DeliverCapturedFrame(captureFrame); DeliverCapturedFrame(_captureFrame);
} }
else // Encoded format else // Encoded format
{ {

View File

@ -137,6 +137,8 @@ private:
VideoRotation _rotateFrame; // Set if the frame should be rotated by the VideoRotation _rotateFrame; // Set if the frame should be rotated by the
// capture module. // capture module.
VideoFrame _captureFrame;
// Indicate whether rotation should be applied before delivered externally. // Indicate whether rotation should be applied before delivered externally.
bool apply_rotation_; bool apply_rotation_;
}; };

View File

@ -121,46 +121,51 @@ int H264DecoderImpl::AVGetBuffer2(
return ret; return ret;
} }
// The video frame is stored in |frame_buffer|. |av_frame| is FFmpeg's version // The video frame is stored in |video_frame|. |av_frame| is FFmpeg's version
// of a video frame and will be set up to reference |frame_buffer|'s data. // of a video frame and will be set up to reference |video_frame|'s buffers.
// TODO(nisse): The VideoFrame's timestamp and rotation info is not used.
// Refactor to do not use a VideoFrame object at all.
// FFmpeg expects the initial allocation to be zero-initialized according to // FFmpeg expects the initial allocation to be zero-initialized according to
// http://crbug.com/390941. Our pool is set up to zero-initialize new buffers. // http://crbug.com/390941. Our pool is set up to zero-initialize new buffers.
// TODO(nisse): Delete that feature from the video pool, instead add VideoFrame* video_frame = new VideoFrame(
// an explicit call to InitializeData here. decoder->pool_.CreateBuffer(width, height),
rtc::scoped_refptr<I420Buffer> frame_buffer = 0 /* timestamp */, 0 /* render_time_ms */, kVideoRotation_0);
decoder->pool_.CreateBuffer(width, height);
int y_size = width * height;
int uv_size = ((width + 1) / 2) * ((height + 1) / 2);
// DCHECK that we have a continuous buffer as is required. // DCHECK that we have a continuous buffer as is required.
RTC_DCHECK_EQ(frame_buffer->DataU(), frame_buffer->DataY() + y_size); RTC_DCHECK_EQ(video_frame->video_frame_buffer()->DataU(),
RTC_DCHECK_EQ(frame_buffer->DataV(), frame_buffer->DataU() + uv_size); video_frame->video_frame_buffer()->DataY() +
int total_size = y_size + 2 * uv_size; video_frame->allocated_size(kYPlane));
RTC_DCHECK_EQ(video_frame->video_frame_buffer()->DataV(),
video_frame->video_frame_buffer()->DataU() +
video_frame->allocated_size(kUPlane));
int total_size = video_frame->allocated_size(kYPlane) +
video_frame->allocated_size(kUPlane) +
video_frame->allocated_size(kVPlane);
av_frame->format = context->pix_fmt; av_frame->format = context->pix_fmt;
av_frame->reordered_opaque = context->reordered_opaque; av_frame->reordered_opaque = context->reordered_opaque;
// Set |av_frame| members as required by FFmpeg. // Set |av_frame| members as required by FFmpeg.
av_frame->data[kYPlaneIndex] = frame_buffer->MutableDataY(); av_frame->data[kYPlaneIndex] =
av_frame->linesize[kYPlaneIndex] = frame_buffer->StrideY(); video_frame->video_frame_buffer()->MutableDataY();
av_frame->data[kUPlaneIndex] = frame_buffer->MutableDataU(); av_frame->linesize[kYPlaneIndex] =
av_frame->linesize[kUPlaneIndex] = frame_buffer->StrideU(); video_frame->video_frame_buffer()->StrideY();
av_frame->data[kVPlaneIndex] = frame_buffer->MutableDataV(); av_frame->data[kUPlaneIndex] =
av_frame->linesize[kVPlaneIndex] = frame_buffer->StrideV(); video_frame->video_frame_buffer()->MutableDataU();
av_frame->linesize[kUPlaneIndex] =
video_frame->video_frame_buffer()->StrideU();
av_frame->data[kVPlaneIndex] =
video_frame->video_frame_buffer()->MutableDataV();
av_frame->linesize[kVPlaneIndex] =
video_frame->video_frame_buffer()->StrideV();
RTC_DCHECK_EQ(av_frame->extended_data, av_frame->data); RTC_DCHECK_EQ(av_frame->extended_data, av_frame->data);
// Create a VideoFrame object, to keep a reference to the buffer. av_frame->buf[0] = av_buffer_create(av_frame->data[kYPlaneIndex],
// TODO(nisse): The VideoFrame's timestamp and rotation info is not used.
// Refactor to do not use a VideoFrame object at all.
av_frame->buf[0] = av_buffer_create(
av_frame->data[kYPlaneIndex],
total_size, total_size,
AVFreeBuffer2, AVFreeBuffer2,
static_cast<void*>(new VideoFrame(frame_buffer, static_cast<void*>(video_frame),
0 /* timestamp */,
0 /* render_time_ms */,
kVideoRotation_0)),
0); 0);
RTC_CHECK(av_frame->buf[0]); RTC_CHECK(av_frame->buf[0]);
return 0; return 0;

View File

@ -137,7 +137,8 @@ int I420Encoder::RegisterEncodeCompleteCallback(
} }
I420Decoder::I420Decoder() I420Decoder::I420Decoder()
: _width(0), : _decodedImage(),
_width(0),
_height(0), _height(0),
_inited(false), _inited(false),
_decodeCompleteCallback(NULL) {} _decodeCompleteCallback(NULL) {}
@ -198,19 +199,17 @@ int I420Decoder::Decode(const EncodedImage& inputImage,
} }
// Set decoded image parameters. // Set decoded image parameters.
int half_width = (_width + 1) / 2; int half_width = (_width + 1) / 2;
rtc::scoped_refptr<webrtc::I420Buffer> frame_buffer = _decodedImage.CreateEmptyFrame(_width, _height, _width, half_width,
I420Buffer::Create(_width, _height, _width, half_width, half_width); half_width);
// Converting from buffer to plane representation.
// Converting from raw buffer I420Buffer.
int ret = ConvertToI420(kI420, buffer, 0, 0, _width, _height, 0, int ret = ConvertToI420(kI420, buffer, 0, 0, _width, _height, 0,
kVideoRotation_0, frame_buffer.get()); kVideoRotation_0, &_decodedImage);
if (ret < 0) { if (ret < 0) {
return WEBRTC_VIDEO_CODEC_MEMORY; return WEBRTC_VIDEO_CODEC_MEMORY;
} }
_decodedImage.set_timestamp(inputImage._timeStamp);
VideoFrame decoded_image(frame_buffer, inputImage._timeStamp, 0, _decodeCompleteCallback->Decoded(_decodedImage);
webrtc::kVideoRotation_0);
_decodeCompleteCallback->Decoded(decoded_image);
return WEBRTC_VIDEO_CODEC_OK; return WEBRTC_VIDEO_CODEC_OK;
} }

View File

@ -298,9 +298,10 @@ int SimulcastEncoderAdapter::Encode(
return ret; return ret;
} }
} else { } else {
VideoFrame dst_frame;
// Making sure that destination frame is of sufficient size.
// Aligning stride values based on width. // Aligning stride values based on width.
rtc::scoped_refptr<I420Buffer> dst_buffer = dst_frame.CreateEmptyFrame(dst_width, dst_height, dst_width,
I420Buffer::Create(dst_width, dst_height, dst_width,
(dst_width + 1) / 2, (dst_width + 1) / 2); (dst_width + 1) / 2, (dst_width + 1) / 2);
libyuv::I420Scale(input_image.video_frame_buffer()->DataY(), libyuv::I420Scale(input_image.video_frame_buffer()->DataY(),
input_image.video_frame_buffer()->StrideY(), input_image.video_frame_buffer()->StrideY(),
@ -309,16 +310,18 @@ int SimulcastEncoderAdapter::Encode(
input_image.video_frame_buffer()->DataV(), input_image.video_frame_buffer()->DataV(),
input_image.video_frame_buffer()->StrideV(), input_image.video_frame_buffer()->StrideV(),
src_width, src_height, src_width, src_height,
dst_buffer->MutableDataY(), dst_buffer->StrideY(), dst_frame.video_frame_buffer()->MutableDataY(),
dst_buffer->MutableDataU(), dst_buffer->StrideU(), dst_frame.video_frame_buffer()->StrideY(),
dst_buffer->MutableDataV(), dst_buffer->StrideV(), dst_frame.video_frame_buffer()->MutableDataU(),
dst_frame.video_frame_buffer()->StrideU(),
dst_frame.video_frame_buffer()->MutableDataV(),
dst_frame.video_frame_buffer()->StrideV(),
dst_width, dst_height, dst_width, dst_height,
libyuv::kFilterBilinear); libyuv::kFilterBilinear);
dst_frame.set_timestamp(input_image.timestamp());
dst_frame.set_render_time_ms(input_image.render_time_ms());
int ret = streaminfos_[stream_idx].encoder->Encode( int ret = streaminfos_[stream_idx].encoder->Encode(
VideoFrame(dst_buffer, input_image.timestamp(), dst_frame, codec_specific_info, &stream_frame_types);
input_image.render_time_ms(), webrtc::kVideoRotation_0),
codec_specific_info, &stream_frame_types);
if (ret != WEBRTC_VIDEO_CODEC_OK) { if (ret != WEBRTC_VIDEO_CODEC_OK) {
return ret; return ret;
} }

View File

@ -535,11 +535,17 @@ TEST_F(TestSimulcastEncoderAdapterFake, TestFailureReturnCodesFromEncodeCalls) {
.WillOnce(Return(WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE)); .WillOnce(Return(WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE));
// Send a fake frame and assert the return is software fallback. // Send a fake frame and assert the return is software fallback.
VideoFrame input_frame;
int half_width = (kDefaultWidth + 1) / 2; int half_width = (kDefaultWidth + 1) / 2;
rtc::scoped_refptr<I420Buffer> input_buffer = I420Buffer::Create( input_frame.CreateEmptyFrame(kDefaultWidth, kDefaultHeight, kDefaultWidth,
kDefaultWidth, kDefaultHeight, kDefaultWidth, half_width, half_width); half_width, half_width);
input_buffer->InitializeData(); memset(input_frame.video_frame_buffer()->MutableDataY(), 0,
VideoFrame input_frame(input_buffer, 0, 0, webrtc::kVideoRotation_0); input_frame.allocated_size(kYPlane));
memset(input_frame.video_frame_buffer()->MutableDataU(), 0,
input_frame.allocated_size(kUPlane));
memset(input_frame.video_frame_buffer()->MutableDataV(), 0,
input_frame.allocated_size(kVPlane));
std::vector<FrameType> frame_types(3, kVideoFrameKey); std::vector<FrameType> frame_types(3, kVideoFrameKey);
EXPECT_EQ(WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE, EXPECT_EQ(WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE,
adapter_->Encode(input_frame, nullptr, &frame_types)); adapter_->Encode(input_frame, nullptr, &frame_types));

View File

@ -236,8 +236,8 @@ class TestVp8Simulcast : public ::testing::Test {
} }
} }
// Fills in an I420Buffer from |plane_colors|. // Fills in an VideoFrameBuffer from |plane_colors|.
static void CreateImage(const rtc::scoped_refptr<I420Buffer>& buffer, static void CreateImage(const rtc::scoped_refptr<VideoFrameBuffer>& buffer,
int plane_colors[kNumOfPlanes]) { int plane_colors[kNumOfPlanes]) {
int width = buffer->width(); int width = buffer->width();
int height = buffer->height(); int height = buffer->height();
@ -317,11 +317,14 @@ class TestVp8Simulcast : public ::testing::Test {
EXPECT_EQ(0, encoder_->InitEncode(&settings_, 1, 1200)); EXPECT_EQ(0, encoder_->InitEncode(&settings_, 1, 1200));
EXPECT_EQ(0, decoder_->InitDecode(&settings_, 1)); EXPECT_EQ(0, decoder_->InitDecode(&settings_, 1));
int half_width = (kDefaultWidth + 1) / 2; int half_width = (kDefaultWidth + 1) / 2;
input_buffer_ = I420Buffer::Create(kDefaultWidth, kDefaultHeight, input_frame_.CreateEmptyFrame(kDefaultWidth, kDefaultHeight, kDefaultWidth,
kDefaultWidth, half_width, half_width); half_width, half_width);
input_buffer_->InitializeData(); memset(input_frame_.video_frame_buffer()->MutableDataY(), 0,
input_frame_.reset( input_frame_.allocated_size(kYPlane));
new VideoFrame(input_buffer_, 0, 0, webrtc::kVideoRotation_0)); memset(input_frame_.video_frame_buffer()->MutableDataU(), 0,
input_frame_.allocated_size(kUPlane));
memset(input_frame_.video_frame_buffer()->MutableDataV(), 0,
input_frame_.allocated_size(kVPlane));
} }
virtual void TearDown() { virtual void TearDown() {
@ -393,33 +396,33 @@ class TestVp8Simulcast : public ::testing::Test {
std::vector<FrameType> frame_types(kNumberOfSimulcastStreams, std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
kVideoFrameDelta); kVideoFrameDelta);
ExpectStreams(kVideoFrameKey, kNumberOfSimulcastStreams); ExpectStreams(kVideoFrameKey, kNumberOfSimulcastStreams);
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
ExpectStreams(kVideoFrameDelta, kNumberOfSimulcastStreams); ExpectStreams(kVideoFrameDelta, kNumberOfSimulcastStreams);
input_frame_->set_timestamp(input_frame_->timestamp() + 3000); input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
frame_types[0] = kVideoFrameKey; frame_types[0] = kVideoFrameKey;
ExpectStreams(kVideoFrameKey, kNumberOfSimulcastStreams); ExpectStreams(kVideoFrameKey, kNumberOfSimulcastStreams);
input_frame_->set_timestamp(input_frame_->timestamp() + 3000); input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
std::fill(frame_types.begin(), frame_types.end(), kVideoFrameDelta); std::fill(frame_types.begin(), frame_types.end(), kVideoFrameDelta);
frame_types[1] = kVideoFrameKey; frame_types[1] = kVideoFrameKey;
ExpectStreams(kVideoFrameKey, kNumberOfSimulcastStreams); ExpectStreams(kVideoFrameKey, kNumberOfSimulcastStreams);
input_frame_->set_timestamp(input_frame_->timestamp() + 3000); input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
std::fill(frame_types.begin(), frame_types.end(), kVideoFrameDelta); std::fill(frame_types.begin(), frame_types.end(), kVideoFrameDelta);
frame_types[2] = kVideoFrameKey; frame_types[2] = kVideoFrameKey;
ExpectStreams(kVideoFrameKey, kNumberOfSimulcastStreams); ExpectStreams(kVideoFrameKey, kNumberOfSimulcastStreams);
input_frame_->set_timestamp(input_frame_->timestamp() + 3000); input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
std::fill(frame_types.begin(), frame_types.end(), kVideoFrameDelta); std::fill(frame_types.begin(), frame_types.end(), kVideoFrameDelta);
ExpectStreams(kVideoFrameDelta, kNumberOfSimulcastStreams); ExpectStreams(kVideoFrameDelta, kNumberOfSimulcastStreams);
input_frame_->set_timestamp(input_frame_->timestamp() + 3000); input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
} }
void TestPaddingAllStreams() { void TestPaddingAllStreams() {
@ -428,11 +431,11 @@ class TestVp8Simulcast : public ::testing::Test {
std::vector<FrameType> frame_types(kNumberOfSimulcastStreams, std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
kVideoFrameDelta); kVideoFrameDelta);
ExpectStreams(kVideoFrameKey, 1); ExpectStreams(kVideoFrameKey, 1);
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
ExpectStreams(kVideoFrameDelta, 1); ExpectStreams(kVideoFrameDelta, 1);
input_frame_->set_timestamp(input_frame_->timestamp() + 3000); input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
} }
void TestPaddingTwoStreams() { void TestPaddingTwoStreams() {
@ -441,11 +444,11 @@ class TestVp8Simulcast : public ::testing::Test {
std::vector<FrameType> frame_types(kNumberOfSimulcastStreams, std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
kVideoFrameDelta); kVideoFrameDelta);
ExpectStreams(kVideoFrameKey, 1); ExpectStreams(kVideoFrameKey, 1);
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
ExpectStreams(kVideoFrameDelta, 1); ExpectStreams(kVideoFrameDelta, 1);
input_frame_->set_timestamp(input_frame_->timestamp() + 3000); input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
} }
void TestPaddingTwoStreamsOneMaxedOut() { void TestPaddingTwoStreamsOneMaxedOut() {
@ -455,11 +458,11 @@ class TestVp8Simulcast : public ::testing::Test {
std::vector<FrameType> frame_types(kNumberOfSimulcastStreams, std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
kVideoFrameDelta); kVideoFrameDelta);
ExpectStreams(kVideoFrameKey, 1); ExpectStreams(kVideoFrameKey, 1);
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
ExpectStreams(kVideoFrameDelta, 1); ExpectStreams(kVideoFrameDelta, 1);
input_frame_->set_timestamp(input_frame_->timestamp() + 3000); input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
} }
void TestPaddingOneStream() { void TestPaddingOneStream() {
@ -468,11 +471,11 @@ class TestVp8Simulcast : public ::testing::Test {
std::vector<FrameType> frame_types(kNumberOfSimulcastStreams, std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
kVideoFrameDelta); kVideoFrameDelta);
ExpectStreams(kVideoFrameKey, 2); ExpectStreams(kVideoFrameKey, 2);
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
ExpectStreams(kVideoFrameDelta, 2); ExpectStreams(kVideoFrameDelta, 2);
input_frame_->set_timestamp(input_frame_->timestamp() + 3000); input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
} }
void TestPaddingOneStreamTwoMaxedOut() { void TestPaddingOneStreamTwoMaxedOut() {
@ -483,11 +486,11 @@ class TestVp8Simulcast : public ::testing::Test {
std::vector<FrameType> frame_types(kNumberOfSimulcastStreams, std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
kVideoFrameDelta); kVideoFrameDelta);
ExpectStreams(kVideoFrameKey, 2); ExpectStreams(kVideoFrameKey, 2);
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
ExpectStreams(kVideoFrameDelta, 2); ExpectStreams(kVideoFrameDelta, 2);
input_frame_->set_timestamp(input_frame_->timestamp() + 3000); input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
} }
void TestSendAllStreams() { void TestSendAllStreams() {
@ -497,11 +500,11 @@ class TestVp8Simulcast : public ::testing::Test {
std::vector<FrameType> frame_types(kNumberOfSimulcastStreams, std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
kVideoFrameDelta); kVideoFrameDelta);
ExpectStreams(kVideoFrameKey, 3); ExpectStreams(kVideoFrameKey, 3);
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
ExpectStreams(kVideoFrameDelta, 3); ExpectStreams(kVideoFrameDelta, 3);
input_frame_->set_timestamp(input_frame_->timestamp() + 3000); input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
} }
void TestDisablingStreams() { void TestDisablingStreams() {
@ -510,47 +513,47 @@ class TestVp8Simulcast : public ::testing::Test {
std::vector<FrameType> frame_types(kNumberOfSimulcastStreams, std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
kVideoFrameDelta); kVideoFrameDelta);
ExpectStreams(kVideoFrameKey, 3); ExpectStreams(kVideoFrameKey, 3);
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
ExpectStreams(kVideoFrameDelta, 3); ExpectStreams(kVideoFrameDelta, 3);
input_frame_->set_timestamp(input_frame_->timestamp() + 3000); input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
// We should only get two streams and padding for one. // We should only get two streams and padding for one.
encoder_->SetRates( encoder_->SetRates(
kTargetBitrates[0] + kTargetBitrates[1] + kMinBitrates[2] / 2, 30); kTargetBitrates[0] + kTargetBitrates[1] + kMinBitrates[2] / 2, 30);
ExpectStreams(kVideoFrameDelta, 2); ExpectStreams(kVideoFrameDelta, 2);
input_frame_->set_timestamp(input_frame_->timestamp() + 3000); input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
// We should only get the first stream and padding for two. // We should only get the first stream and padding for two.
encoder_->SetRates(kTargetBitrates[0] + kMinBitrates[1] / 2, 30); encoder_->SetRates(kTargetBitrates[0] + kMinBitrates[1] / 2, 30);
ExpectStreams(kVideoFrameDelta, 1); ExpectStreams(kVideoFrameDelta, 1);
input_frame_->set_timestamp(input_frame_->timestamp() + 3000); input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
// We don't have enough bitrate for the thumbnail stream, but we should get // We don't have enough bitrate for the thumbnail stream, but we should get
// it anyway with current configuration. // it anyway with current configuration.
encoder_->SetRates(kTargetBitrates[0] - 1, 30); encoder_->SetRates(kTargetBitrates[0] - 1, 30);
ExpectStreams(kVideoFrameDelta, 1); ExpectStreams(kVideoFrameDelta, 1);
input_frame_->set_timestamp(input_frame_->timestamp() + 3000); input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
// We should only get two streams and padding for one. // We should only get two streams and padding for one.
encoder_->SetRates( encoder_->SetRates(
kTargetBitrates[0] + kTargetBitrates[1] + kMinBitrates[2] / 2, 30); kTargetBitrates[0] + kTargetBitrates[1] + kMinBitrates[2] / 2, 30);
// We get a key frame because a new stream is being enabled. // We get a key frame because a new stream is being enabled.
ExpectStreams(kVideoFrameKey, 2); ExpectStreams(kVideoFrameKey, 2);
input_frame_->set_timestamp(input_frame_->timestamp() + 3000); input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
// We should get all three streams. // We should get all three streams.
encoder_->SetRates( encoder_->SetRates(
kTargetBitrates[0] + kTargetBitrates[1] + kTargetBitrates[2], 30); kTargetBitrates[0] + kTargetBitrates[1] + kTargetBitrates[2], 30);
// We get a key frame because a new stream is being enabled. // We get a key frame because a new stream is being enabled.
ExpectStreams(kVideoFrameKey, 3); ExpectStreams(kVideoFrameKey, 3);
input_frame_->set_timestamp(input_frame_->timestamp() + 3000); input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
} }
void SwitchingToOneStream(int width, int height) { void SwitchingToOneStream(int width, int height) {
@ -568,12 +571,14 @@ class TestVp8Simulcast : public ::testing::Test {
} }
// Setting input image to new resolution. // Setting input image to new resolution.
int half_width = (settings_.width + 1) / 2; int half_width = (settings_.width + 1) / 2;
input_buffer_ = I420Buffer::Create(settings_.width, settings_.height, input_frame_.CreateEmptyFrame(settings_.width, settings_.height,
settings_.width, half_width, half_width); settings_.width, half_width, half_width);
input_buffer_->InitializeData(); memset(input_frame_.video_frame_buffer()->MutableDataY(), 0,
input_frame_.allocated_size(kYPlane));
input_frame_.reset( memset(input_frame_.video_frame_buffer()->MutableDataU(), 0,
new VideoFrame(input_buffer_, 0, 0, webrtc::kVideoRotation_0)); input_frame_.allocated_size(kUPlane));
memset(input_frame_.video_frame_buffer()->MutableDataV(), 0,
input_frame_.allocated_size(kVPlane));
// The for loop above did not set the bitrate of the highest layer. // The for loop above did not set the bitrate of the highest layer.
settings_.simulcastStream[settings_.numberOfSimulcastStreams - 1] settings_.simulcastStream[settings_.numberOfSimulcastStreams - 1]
@ -598,7 +603,7 @@ class TestVp8Simulcast : public ::testing::Test {
.Times(1) .Times(1)
.WillRepeatedly(Return( .WillRepeatedly(Return(
EncodedImageCallback::Result(EncodedImageCallback::Result::OK, 0))); EncodedImageCallback::Result(EncodedImageCallback::Result::OK, 0)));
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
// Switch back. // Switch back.
DefaultSettings(&settings_, kDefaultTemporalLayerProfile); DefaultSettings(&settings_, kDefaultTemporalLayerProfile);
@ -609,12 +614,15 @@ class TestVp8Simulcast : public ::testing::Test {
ExpectStreams(kVideoFrameKey, 1); ExpectStreams(kVideoFrameKey, 1);
// Resize |input_frame_| to the new resolution. // Resize |input_frame_| to the new resolution.
half_width = (settings_.width + 1) / 2; half_width = (settings_.width + 1) / 2;
input_buffer_ = I420Buffer::Create(settings_.width, settings_.height, input_frame_.CreateEmptyFrame(settings_.width, settings_.height,
settings_.width, half_width, half_width); settings_.width, half_width, half_width);
input_buffer_->InitializeData(); memset(input_frame_.video_frame_buffer()->MutableDataY(), 0,
input_frame_.reset( input_frame_.allocated_size(kYPlane));
new VideoFrame(input_buffer_, 0, 0, webrtc::kVideoRotation_0)); memset(input_frame_.video_frame_buffer()->MutableDataU(), 0,
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); input_frame_.allocated_size(kUPlane));
memset(input_frame_.video_frame_buffer()->MutableDataV(), 0,
input_frame_.allocated_size(kVPlane));
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
} }
void TestSwitchingToOneStream() { SwitchingToOneStream(1024, 768); } void TestSwitchingToOneStream() { SwitchingToOneStream(1024, 768); }
@ -629,7 +637,7 @@ class TestVp8Simulcast : public ::testing::Test {
encoder_->SetRates(kMaxBitrates[2], 30); // To get all three streams. encoder_->SetRates(kMaxBitrates[2], 30); // To get all three streams.
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL)); EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
int picture_id = -1; int picture_id = -1;
int temporal_layer = -1; int temporal_layer = -1;
bool layer_sync = false; bool layer_sync = false;
@ -639,22 +647,22 @@ class TestVp8Simulcast : public ::testing::Test {
EXPECT_TRUE(layer_sync); EXPECT_TRUE(layer_sync);
int key_frame_picture_id = picture_id; int key_frame_picture_id = picture_id;
input_frame_->set_timestamp(input_frame_->timestamp() + 3000); input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL)); EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
encoder_callback.GetLastEncodedFrameInfo(&picture_id, &temporal_layer, encoder_callback.GetLastEncodedFrameInfo(&picture_id, &temporal_layer,
&layer_sync, 0); &layer_sync, 0);
EXPECT_EQ(2, temporal_layer); EXPECT_EQ(2, temporal_layer);
EXPECT_TRUE(layer_sync); EXPECT_TRUE(layer_sync);
input_frame_->set_timestamp(input_frame_->timestamp() + 3000); input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL)); EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
encoder_callback.GetLastEncodedFrameInfo(&picture_id, &temporal_layer, encoder_callback.GetLastEncodedFrameInfo(&picture_id, &temporal_layer,
&layer_sync, 0); &layer_sync, 0);
EXPECT_EQ(1, temporal_layer); EXPECT_EQ(1, temporal_layer);
EXPECT_TRUE(layer_sync); EXPECT_TRUE(layer_sync);
input_frame_->set_timestamp(input_frame_->timestamp() + 3000); input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL)); EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
encoder_callback.GetLastEncodedFrameInfo(&picture_id, &temporal_layer, encoder_callback.GetLastEncodedFrameInfo(&picture_id, &temporal_layer,
&layer_sync, 0); &layer_sync, 0);
EXPECT_EQ(2, temporal_layer); EXPECT_EQ(2, temporal_layer);
@ -667,8 +675,8 @@ class TestVp8Simulcast : public ::testing::Test {
// Must match last key frame to trigger. // Must match last key frame to trigger.
codec_specific.codecSpecific.VP8.pictureIdRPSI = key_frame_picture_id; codec_specific.codecSpecific.VP8.pictureIdRPSI = key_frame_picture_id;
input_frame_->set_timestamp(input_frame_->timestamp() + 3000); input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(*input_frame_, &codec_specific, NULL)); EXPECT_EQ(0, encoder_->Encode(input_frame_, &codec_specific, NULL));
encoder_callback.GetLastEncodedFrameInfo(&picture_id, &temporal_layer, encoder_callback.GetLastEncodedFrameInfo(&picture_id, &temporal_layer,
&layer_sync, 0); &layer_sync, 0);
@ -678,8 +686,8 @@ class TestVp8Simulcast : public ::testing::Test {
// Must match last key frame to trigger, test bad id. // Must match last key frame to trigger, test bad id.
codec_specific.codecSpecific.VP8.pictureIdRPSI = key_frame_picture_id + 17; codec_specific.codecSpecific.VP8.pictureIdRPSI = key_frame_picture_id + 17;
input_frame_->set_timestamp(input_frame_->timestamp() + 3000); input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(*input_frame_, &codec_specific, NULL)); EXPECT_EQ(0, encoder_->Encode(input_frame_, &codec_specific, NULL));
encoder_callback.GetLastEncodedFrameInfo(&picture_id, &temporal_layer, encoder_callback.GetLastEncodedFrameInfo(&picture_id, &temporal_layer,
&layer_sync, 0); &layer_sync, 0);
@ -703,9 +711,9 @@ class TestVp8Simulcast : public ::testing::Test {
plane_offset[kYPlane] = kColorY; plane_offset[kYPlane] = kColorY;
plane_offset[kUPlane] = kColorU; plane_offset[kUPlane] = kColorU;
plane_offset[kVPlane] = kColorV; plane_offset[kVPlane] = kColorV;
CreateImage(input_buffer_, plane_offset); CreateImage(input_frame_.video_frame_buffer(), plane_offset);
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL)); EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
int picture_id = -1; int picture_id = -1;
int temporal_layer = -1; int temporal_layer = -1;
bool layer_sync = false; bool layer_sync = false;
@ -719,27 +727,27 @@ class TestVp8Simulcast : public ::testing::Test {
plane_offset[kYPlane] += 1; plane_offset[kYPlane] += 1;
plane_offset[kUPlane] += 1; plane_offset[kUPlane] += 1;
plane_offset[kVPlane] += 1; plane_offset[kVPlane] += 1;
CreateImage(input_buffer_, plane_offset); CreateImage(input_frame_.video_frame_buffer(), plane_offset);
input_frame_->set_timestamp(input_frame_->timestamp() + 3000); input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL)); EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
// Change color. // Change color.
plane_offset[kYPlane] += 1; plane_offset[kYPlane] += 1;
plane_offset[kUPlane] += 1; plane_offset[kUPlane] += 1;
plane_offset[kVPlane] += 1; plane_offset[kVPlane] += 1;
CreateImage(input_buffer_, plane_offset); CreateImage(input_frame_.video_frame_buffer(), plane_offset);
input_frame_->set_timestamp(input_frame_->timestamp() + 3000); input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL)); EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
// Change color. // Change color.
plane_offset[kYPlane] += 1; plane_offset[kYPlane] += 1;
plane_offset[kUPlane] += 1; plane_offset[kUPlane] += 1;
plane_offset[kVPlane] += 1; plane_offset[kVPlane] += 1;
CreateImage(input_buffer_, plane_offset); CreateImage(input_frame_.video_frame_buffer(), plane_offset);
input_frame_->set_timestamp(input_frame_->timestamp() + 3000); input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL)); EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
CodecSpecificInfo codec_specific; CodecSpecificInfo codec_specific;
codec_specific.codecType = kVideoCodecVP8; codec_specific.codecType = kVideoCodecVP8;
@ -751,10 +759,10 @@ class TestVp8Simulcast : public ::testing::Test {
plane_offset[kYPlane] = kColorY; plane_offset[kYPlane] = kColorY;
plane_offset[kUPlane] = kColorU; plane_offset[kUPlane] = kColorU;
plane_offset[kVPlane] = kColorV; plane_offset[kVPlane] = kColorV;
CreateImage(input_buffer_, plane_offset); CreateImage(input_frame_.video_frame_buffer(), plane_offset);
input_frame_->set_timestamp(input_frame_->timestamp() + 3000); input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(*input_frame_, &codec_specific, NULL)); EXPECT_EQ(0, encoder_->Encode(input_frame_, &codec_specific, NULL));
EncodedImage encoded_frame; EncodedImage encoded_frame;
encoder_callback.GetLastEncodedKeyFrame(&encoded_frame); encoder_callback.GetLastEncodedKeyFrame(&encoded_frame);
@ -776,47 +784,47 @@ class TestVp8Simulcast : public ::testing::Test {
bool expected_layer_sync[3] = {false, false, false}; bool expected_layer_sync[3] = {false, false, false};
// First frame: #0. // First frame: #0.
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL)); EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
SetExpectedValues3<int>(0, 0, 0, expected_temporal_idx); SetExpectedValues3<int>(0, 0, 0, expected_temporal_idx);
SetExpectedValues3<bool>(true, true, true, expected_layer_sync); SetExpectedValues3<bool>(true, true, true, expected_layer_sync);
VerifyTemporalIdxAndSyncForAllSpatialLayers( VerifyTemporalIdxAndSyncForAllSpatialLayers(
&encoder_callback, expected_temporal_idx, expected_layer_sync, 3); &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
// Next frame: #1. // Next frame: #1.
input_frame_->set_timestamp(input_frame_->timestamp() + 3000); input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL)); EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
SetExpectedValues3<int>(2, 2, 2, expected_temporal_idx); SetExpectedValues3<int>(2, 2, 2, expected_temporal_idx);
SetExpectedValues3<bool>(true, true, true, expected_layer_sync); SetExpectedValues3<bool>(true, true, true, expected_layer_sync);
VerifyTemporalIdxAndSyncForAllSpatialLayers( VerifyTemporalIdxAndSyncForAllSpatialLayers(
&encoder_callback, expected_temporal_idx, expected_layer_sync, 3); &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
// Next frame: #2. // Next frame: #2.
input_frame_->set_timestamp(input_frame_->timestamp() + 3000); input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL)); EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
SetExpectedValues3<int>(1, 1, 1, expected_temporal_idx); SetExpectedValues3<int>(1, 1, 1, expected_temporal_idx);
SetExpectedValues3<bool>(true, true, true, expected_layer_sync); SetExpectedValues3<bool>(true, true, true, expected_layer_sync);
VerifyTemporalIdxAndSyncForAllSpatialLayers( VerifyTemporalIdxAndSyncForAllSpatialLayers(
&encoder_callback, expected_temporal_idx, expected_layer_sync, 3); &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
// Next frame: #3. // Next frame: #3.
input_frame_->set_timestamp(input_frame_->timestamp() + 3000); input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL)); EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
SetExpectedValues3<int>(2, 2, 2, expected_temporal_idx); SetExpectedValues3<int>(2, 2, 2, expected_temporal_idx);
SetExpectedValues3<bool>(false, false, false, expected_layer_sync); SetExpectedValues3<bool>(false, false, false, expected_layer_sync);
VerifyTemporalIdxAndSyncForAllSpatialLayers( VerifyTemporalIdxAndSyncForAllSpatialLayers(
&encoder_callback, expected_temporal_idx, expected_layer_sync, 3); &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
// Next frame: #4. // Next frame: #4.
input_frame_->set_timestamp(input_frame_->timestamp() + 3000); input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL)); EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
SetExpectedValues3<int>(0, 0, 0, expected_temporal_idx); SetExpectedValues3<int>(0, 0, 0, expected_temporal_idx);
SetExpectedValues3<bool>(false, false, false, expected_layer_sync); SetExpectedValues3<bool>(false, false, false, expected_layer_sync);
VerifyTemporalIdxAndSyncForAllSpatialLayers( VerifyTemporalIdxAndSyncForAllSpatialLayers(
&encoder_callback, expected_temporal_idx, expected_layer_sync, 3); &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
// Next frame: #5. // Next frame: #5.
input_frame_->set_timestamp(input_frame_->timestamp() + 3000); input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL)); EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
SetExpectedValues3<int>(2, 2, 2, expected_temporal_idx); SetExpectedValues3<int>(2, 2, 2, expected_temporal_idx);
SetExpectedValues3<bool>(false, false, false, expected_layer_sync); SetExpectedValues3<bool>(false, false, false, expected_layer_sync);
VerifyTemporalIdxAndSyncForAllSpatialLayers( VerifyTemporalIdxAndSyncForAllSpatialLayers(
@ -845,47 +853,47 @@ class TestVp8Simulcast : public ::testing::Test {
bool expected_layer_sync[3] = {false, false, false}; bool expected_layer_sync[3] = {false, false, false};
// First frame: #0. // First frame: #0.
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL)); EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
SetExpectedValues3<int>(0, 0, 255, expected_temporal_idx); SetExpectedValues3<int>(0, 0, 255, expected_temporal_idx);
SetExpectedValues3<bool>(true, true, false, expected_layer_sync); SetExpectedValues3<bool>(true, true, false, expected_layer_sync);
VerifyTemporalIdxAndSyncForAllSpatialLayers( VerifyTemporalIdxAndSyncForAllSpatialLayers(
&encoder_callback, expected_temporal_idx, expected_layer_sync, 3); &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
// Next frame: #1. // Next frame: #1.
input_frame_->set_timestamp(input_frame_->timestamp() + 3000); input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL)); EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
SetExpectedValues3<int>(2, 1, 255, expected_temporal_idx); SetExpectedValues3<int>(2, 1, 255, expected_temporal_idx);
SetExpectedValues3<bool>(true, true, false, expected_layer_sync); SetExpectedValues3<bool>(true, true, false, expected_layer_sync);
VerifyTemporalIdxAndSyncForAllSpatialLayers( VerifyTemporalIdxAndSyncForAllSpatialLayers(
&encoder_callback, expected_temporal_idx, expected_layer_sync, 3); &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
// Next frame: #2. // Next frame: #2.
input_frame_->set_timestamp(input_frame_->timestamp() + 3000); input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL)); EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
SetExpectedValues3<int>(1, 0, 255, expected_temporal_idx); SetExpectedValues3<int>(1, 0, 255, expected_temporal_idx);
SetExpectedValues3<bool>(true, false, false, expected_layer_sync); SetExpectedValues3<bool>(true, false, false, expected_layer_sync);
VerifyTemporalIdxAndSyncForAllSpatialLayers( VerifyTemporalIdxAndSyncForAllSpatialLayers(
&encoder_callback, expected_temporal_idx, expected_layer_sync, 3); &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
// Next frame: #3. // Next frame: #3.
input_frame_->set_timestamp(input_frame_->timestamp() + 3000); input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL)); EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
SetExpectedValues3<int>(2, 1, 255, expected_temporal_idx); SetExpectedValues3<int>(2, 1, 255, expected_temporal_idx);
SetExpectedValues3<bool>(false, false, false, expected_layer_sync); SetExpectedValues3<bool>(false, false, false, expected_layer_sync);
VerifyTemporalIdxAndSyncForAllSpatialLayers( VerifyTemporalIdxAndSyncForAllSpatialLayers(
&encoder_callback, expected_temporal_idx, expected_layer_sync, 3); &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
// Next frame: #4. // Next frame: #4.
input_frame_->set_timestamp(input_frame_->timestamp() + 3000); input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL)); EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
SetExpectedValues3<int>(0, 0, 255, expected_temporal_idx); SetExpectedValues3<int>(0, 0, 255, expected_temporal_idx);
SetExpectedValues3<bool>(false, false, false, expected_layer_sync); SetExpectedValues3<bool>(false, false, false, expected_layer_sync);
VerifyTemporalIdxAndSyncForAllSpatialLayers( VerifyTemporalIdxAndSyncForAllSpatialLayers(
&encoder_callback, expected_temporal_idx, expected_layer_sync, 3); &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
// Next frame: #5. // Next frame: #5.
input_frame_->set_timestamp(input_frame_->timestamp() + 3000); input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL)); EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
SetExpectedValues3<int>(2, 1, 255, expected_temporal_idx); SetExpectedValues3<int>(2, 1, 255, expected_temporal_idx);
SetExpectedValues3<bool>(false, false, false, expected_layer_sync); SetExpectedValues3<bool>(false, false, false, expected_layer_sync);
VerifyTemporalIdxAndSyncForAllSpatialLayers( VerifyTemporalIdxAndSyncForAllSpatialLayers(
@ -903,27 +911,24 @@ class TestVp8Simulcast : public ::testing::Test {
// 1. stride > width 2. stride_y != stride_uv/2 // 1. stride > width 2. stride_y != stride_uv/2
int stride_y = kDefaultWidth + 20; int stride_y = kDefaultWidth + 20;
int stride_uv = ((kDefaultWidth + 1) / 2) + 5; int stride_uv = ((kDefaultWidth + 1) / 2) + 5;
input_buffer_ = I420Buffer::Create(kDefaultWidth, kDefaultHeight, stride_y, input_frame_.CreateEmptyFrame(kDefaultWidth, kDefaultHeight, stride_y,
stride_uv, stride_uv); stride_uv, stride_uv);
input_frame_.reset(
new VideoFrame(input_buffer_, 0, 0, webrtc::kVideoRotation_0));
// Set color. // Set color.
int plane_offset[kNumOfPlanes]; int plane_offset[kNumOfPlanes];
plane_offset[kYPlane] = kColorY; plane_offset[kYPlane] = kColorY;
plane_offset[kUPlane] = kColorU; plane_offset[kUPlane] = kColorU;
plane_offset[kVPlane] = kColorV; plane_offset[kVPlane] = kColorV;
CreateImage(input_buffer_, plane_offset); CreateImage(input_frame_.video_frame_buffer(), plane_offset);
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL)); EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
// Change color. // Change color.
plane_offset[kYPlane] += 1; plane_offset[kYPlane] += 1;
plane_offset[kUPlane] += 1; plane_offset[kUPlane] += 1;
plane_offset[kVPlane] += 1; plane_offset[kVPlane] += 1;
CreateImage(input_buffer_, plane_offset); CreateImage(input_frame_.video_frame_buffer(), plane_offset);
input_frame_->set_timestamp(input_frame_->timestamp() + 3000); input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL)); EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
EncodedImage encoded_frame; EncodedImage encoded_frame;
// Only encoding one frame - so will be a key frame. // Only encoding one frame - so will be a key frame.
@ -963,8 +968,7 @@ class TestVp8Simulcast : public ::testing::Test {
std::unique_ptr<VP8Decoder> decoder_; std::unique_ptr<VP8Decoder> decoder_;
MockDecodedImageCallback decoder_callback_; MockDecodedImageCallback decoder_callback_;
VideoCodec settings_; VideoCodec settings_;
rtc::scoped_refptr<I420Buffer> input_buffer_; VideoFrame input_frame_;
std::unique_ptr<VideoFrame> input_frame_;
}; };
} // namespace testing } // namespace testing

View File

@ -147,15 +147,13 @@ class TestVp8Impl : public ::testing::Test {
EXPECT_EQ(stride_y, 176); EXPECT_EQ(stride_y, 176);
EXPECT_EQ(stride_uv, 96); EXPECT_EQ(stride_uv, 96);
rtc::scoped_refptr<I420Buffer> buffer = I420Buffer::Create( input_frame_.CreateEmptyFrame(codec_inst_.width, codec_inst_.height,
codec_inst_.width, codec_inst_.height, stride_y, stride_uv, stride_uv); stride_y, stride_uv, stride_uv);
input_frame_.set_timestamp(kTestTimestamp);
// Using ConvertToI420 to add stride to the image. // Using ConvertToI420 to add stride to the image.
EXPECT_EQ( EXPECT_EQ(0, ConvertToI420(kI420, source_buffer_.get(), 0, 0,
0, ConvertToI420(kI420, source_buffer_.get(), 0, 0, codec_inst_.width, codec_inst_.width, codec_inst_.height, 0,
codec_inst_.height, 0, kVideoRotation_0, kVideoRotation_0, &input_frame_));
buffer.get()));
input_frame_.reset(
new VideoFrame(buffer, kTestTimestamp, 0, webrtc::kVideoRotation_0));
} }
void SetUpEncodeDecode() { void SetUpEncodeDecode() {
@ -197,7 +195,7 @@ class TestVp8Impl : public ::testing::Test {
std::unique_ptr<Vp8UnitTestDecodeCompleteCallback> decode_complete_callback_; std::unique_ptr<Vp8UnitTestDecodeCompleteCallback> decode_complete_callback_;
std::unique_ptr<uint8_t[]> source_buffer_; std::unique_ptr<uint8_t[]> source_buffer_;
FILE* source_file_; FILE* source_file_;
std::unique_ptr<VideoFrame> input_frame_; VideoFrame input_frame_;
std::unique_ptr<VideoEncoder> encoder_; std::unique_ptr<VideoEncoder> encoder_;
std::unique_ptr<VideoDecoder> decoder_; std::unique_ptr<VideoDecoder> decoder_;
EncodedImage encoded_frame_; EncodedImage encoded_frame_;
@ -239,7 +237,7 @@ TEST_F(TestVp8Impl, EncoderParameterTest) {
#endif #endif
TEST_F(TestVp8Impl, MAYBE_AlignedStrideEncodeDecode) { TEST_F(TestVp8Impl, MAYBE_AlignedStrideEncodeDecode) {
SetUpEncodeDecode(); SetUpEncodeDecode();
encoder_->Encode(*input_frame_, NULL, NULL); encoder_->Encode(input_frame_, NULL, NULL);
EXPECT_GT(WaitForEncodedFrame(), 0u); EXPECT_GT(WaitForEncodedFrame(), 0u);
// First frame should be a key frame. // First frame should be a key frame.
encoded_frame_._frameType = kVideoFrameKey; encoded_frame_._frameType = kVideoFrameKey;
@ -248,7 +246,7 @@ TEST_F(TestVp8Impl, MAYBE_AlignedStrideEncodeDecode) {
decoder_->Decode(encoded_frame_, false, NULL)); decoder_->Decode(encoded_frame_, false, NULL));
EXPECT_GT(WaitForDecodedFrame(), 0u); EXPECT_GT(WaitForDecodedFrame(), 0u);
// Compute PSNR on all planes (faster than SSIM). // Compute PSNR on all planes (faster than SSIM).
EXPECT_GT(I420PSNR(input_frame_.get(), &decoded_frame_), 36); EXPECT_GT(I420PSNR(&input_frame_, &decoded_frame_), 36);
EXPECT_EQ(kTestTimestamp, decoded_frame_.timestamp()); EXPECT_EQ(kTestTimestamp, decoded_frame_.timestamp());
EXPECT_EQ(kTestNtpTimeMs, decoded_frame_.ntp_time_ms()); EXPECT_EQ(kTestNtpTimeMs, decoded_frame_.ntp_time_ms());
} }
@ -260,7 +258,7 @@ TEST_F(TestVp8Impl, MAYBE_AlignedStrideEncodeDecode) {
#endif #endif
TEST_F(TestVp8Impl, MAYBE_DecodeWithACompleteKeyFrame) { TEST_F(TestVp8Impl, MAYBE_DecodeWithACompleteKeyFrame) {
SetUpEncodeDecode(); SetUpEncodeDecode();
encoder_->Encode(*input_frame_, NULL, NULL); encoder_->Encode(input_frame_, NULL, NULL);
EXPECT_GT(WaitForEncodedFrame(), 0u); EXPECT_GT(WaitForEncodedFrame(), 0u);
// Setting complete to false -> should return an error. // Setting complete to false -> should return an error.
encoded_frame_._completeFrame = false; encoded_frame_._completeFrame = false;
@ -275,7 +273,7 @@ TEST_F(TestVp8Impl, MAYBE_DecodeWithACompleteKeyFrame) {
encoded_frame_._frameType = kVideoFrameKey; encoded_frame_._frameType = kVideoFrameKey;
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
decoder_->Decode(encoded_frame_, false, NULL)); decoder_->Decode(encoded_frame_, false, NULL));
EXPECT_GT(I420PSNR(input_frame_.get(), &decoded_frame_), 36); EXPECT_GT(I420PSNR(&input_frame_, &decoded_frame_), 36);
} }
} // namespace webrtc } // namespace webrtc

View File

@ -1306,18 +1306,18 @@ int VP8DecoderImpl::ReturnFrame(const vpx_image_t* img,
last_frame_width_ = img->d_w; last_frame_width_ = img->d_w;
last_frame_height_ = img->d_h; last_frame_height_ = img->d_h;
// Allocate memory for decoded image. // Allocate memory for decoded image.
rtc::scoped_refptr<I420Buffer> buffer = VideoFrame decoded_image(buffer_pool_.CreateBuffer(img->d_w, img->d_h),
buffer_pool_.CreateBuffer(img->d_w, img->d_h); timestamp, 0, kVideoRotation_0);
libyuv::I420Copy(img->planes[VPX_PLANE_Y], img->stride[VPX_PLANE_Y], libyuv::I420Copy(img->planes[VPX_PLANE_Y], img->stride[VPX_PLANE_Y],
img->planes[VPX_PLANE_U], img->stride[VPX_PLANE_U], img->planes[VPX_PLANE_U], img->stride[VPX_PLANE_U],
img->planes[VPX_PLANE_V], img->stride[VPX_PLANE_V], img->planes[VPX_PLANE_V], img->stride[VPX_PLANE_V],
buffer->MutableDataY(), buffer->StrideY(), decoded_image.video_frame_buffer()->MutableDataY(),
buffer->MutableDataU(), buffer->StrideU(), decoded_image.video_frame_buffer()->StrideY(),
buffer->MutableDataV(), buffer->StrideV(), decoded_image.video_frame_buffer()->MutableDataU(),
decoded_image.video_frame_buffer()->StrideU(),
decoded_image.video_frame_buffer()->MutableDataV(),
decoded_image.video_frame_buffer()->StrideV(),
img->d_w, img->d_h); img->d_w, img->d_h);
VideoFrame decoded_image(buffer, timestamp, 0, kVideoRotation_0);
decoded_image.set_ntp_time_ms(ntp_time_ms); decoded_image.set_ntp_time_ms(ntp_time_ms);
int ret = decode_complete_callback_->Decoded(decoded_image); int ret = decode_complete_callback_->Decoded(decoded_image);
if (ret != 0) if (ret != 0)

View File

@ -148,7 +148,7 @@ int SequenceCoder(webrtc::test::CommandLineParser* parser) {
return -1; return -1;
} }
EXPECT_EQ(0, decoder->InitDecode(&inst, 1)); EXPECT_EQ(0, decoder->InitDecode(&inst, 1));
webrtc::VideoFrame input_frame;
size_t length = webrtc::CalcBufferSize(webrtc::kI420, width, height); size_t length = webrtc::CalcBufferSize(webrtc::kI420, width, height);
std::unique_ptr<uint8_t[]> frame_buffer(new uint8_t[length]); std::unique_ptr<uint8_t[]> frame_buffer(new uint8_t[length]);
@ -163,18 +163,14 @@ int SequenceCoder(webrtc::test::CommandLineParser* parser) {
int64_t starttime = rtc::TimeMillis(); int64_t starttime = rtc::TimeMillis();
int frame_cnt = 1; int frame_cnt = 1;
int frames_processed = 0; int frames_processed = 0;
rtc::scoped_refptr<webrtc::I420Buffer> i420_buffer = input_frame.CreateEmptyFrame(width, height, width, half_width, half_width);
webrtc::I420Buffer::Create(width, height, width, half_width, half_width);
while (!feof(input_file) && while (!feof(input_file) &&
(num_frames == -1 || frames_processed < num_frames)) { (num_frames == -1 || frames_processed < num_frames)) {
if (fread(frame_buffer.get(), 1, length, input_file) != length) if (fread(frame_buffer.get(), 1, length, input_file) != length)
continue; continue;
if (frame_cnt >= start_frame) { if (frame_cnt >= start_frame) {
webrtc::ConvertToI420(webrtc::kI420, frame_buffer.get(), 0, 0, width, webrtc::ConvertToI420(webrtc::kI420, frame_buffer.get(), 0, 0, width,
height, 0, webrtc::kVideoRotation_0, &i420_buffer); height, 0, webrtc::kVideoRotation_0, &input_frame);
webrtc::VideoFrame input_frame(i420_buffer, 0, 0,
webrtc::kVideoRotation_0);
encoder->Encode(input_frame, NULL, NULL); encoder->Encode(input_frame, NULL, NULL);
decoder->Decode(encoder_callback.encoded_image(), false, NULL); decoder->Decode(encoder_callback.encoded_image(), false, NULL);
++frames_processed; ++frames_processed;

View File

@ -141,10 +141,8 @@ TEST_F(VideoProcessingTest, Denoiser) {
while (fread(video_buffer.get(), 1, frame_length_, source_file_) == while (fread(video_buffer.get(), 1, frame_length_, source_file_) ==
frame_length_) { frame_length_) {
// Using ConvertToI420 to add stride to the image. // Using ConvertToI420 to add stride to the image.
rtc::scoped_refptr<webrtc::I420Buffer> input_buffer =
I420Buffer::Create(width_, height_, width_, half_width_, half_width_);
EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_, EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
0, kVideoRotation_0, input_buffer.get())); 0, kVideoRotation_0, &video_frame_));
rtc::scoped_refptr<I420Buffer>* p_denoised_c = &denoised_frame_c; rtc::scoped_refptr<I420Buffer>* p_denoised_c = &denoised_frame_c;
rtc::scoped_refptr<I420Buffer>* p_denoised_prev_c = &denoised_frame_prev_c; rtc::scoped_refptr<I420Buffer>* p_denoised_prev_c = &denoised_frame_prev_c;
@ -159,9 +157,11 @@ TEST_F(VideoProcessingTest, Denoiser) {
p_denoised_sse_neon = &denoised_frame_prev_sse_neon; p_denoised_sse_neon = &denoised_frame_prev_sse_neon;
p_denoised_prev_sse_neon = &denoised_frame_sse_neon; p_denoised_prev_sse_neon = &denoised_frame_sse_neon;
} }
denoiser_c.DenoiseFrame(input_buffer, p_denoised_c, p_denoised_prev_c, denoiser_c.DenoiseFrame(video_frame_.video_frame_buffer(),
p_denoised_c, p_denoised_prev_c,
false); false);
denoiser_sse_neon.DenoiseFrame(input_buffer, p_denoised_sse_neon, denoiser_sse_neon.DenoiseFrame(video_frame_.video_frame_buffer(),
p_denoised_sse_neon,
p_denoised_prev_sse_neon, false); p_denoised_prev_sse_neon, false);
// Invert the flag. // Invert the flag.
denoised_frame_toggle ^= 1; denoised_frame_toggle ^= 1;

View File

@ -15,7 +15,6 @@
#include <memory> #include <memory>
#include <string> #include <string>
#include "webrtc/base/keep_ref_until_done.h"
#include "webrtc/base/timeutils.h" #include "webrtc/base/timeutils.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/test/testsupport/fileutils.h" #include "webrtc/test/testsupport/fileutils.h"
@ -34,21 +33,20 @@ static void PreprocessFrameAndVerify(const VideoFrame& source,
int target_height, int target_height,
VideoProcessing* vpm, VideoProcessing* vpm,
const VideoFrame* out_frame); const VideoFrame* out_frame);
rtc::scoped_refptr<VideoFrameBuffer> CropBuffer( static void CropFrame(const uint8_t* source_data,
const rtc::scoped_refptr<VideoFrameBuffer>& source_buffer,
int source_width, int source_width,
int source_height, int source_height,
int offset_x, int offset_x,
int offset_y, int offset_y,
int cropped_width, int cropped_width,
int cropped_height); int cropped_height,
VideoFrame* cropped_frame);
// The |source_data| is cropped and scaled to |target_width| x |target_height|, // The |source_data| is cropped and scaled to |target_width| x |target_height|,
// and then scaled back to the expected cropped size. |expected_psnr| is used to // and then scaled back to the expected cropped size. |expected_psnr| is used to
// verify basic quality, and is set to be ~0.1/0.05dB lower than actual PSNR // verify basic quality, and is set to be ~0.1/0.05dB lower than actual PSNR
// verified under the same conditions. // verified under the same conditions.
static void TestSize( static void TestSize(const VideoFrame& source_frame,
const VideoFrame& source_frame, const VideoFrame& cropped_source_frame,
const rtc::scoped_refptr<VideoFrameBuffer>& cropped_source_buffer,
int target_width, int target_width,
int target_height, int target_height,
double expected_psnr, double expected_psnr,
@ -70,6 +68,15 @@ void VideoProcessingTest::SetUp() {
vp_ = VideoProcessing::Create(); vp_ = VideoProcessing::Create();
ASSERT_TRUE(vp_ != NULL); ASSERT_TRUE(vp_ != NULL);
video_frame_.CreateEmptyFrame(width_, height_, width_,
half_width_, half_width_);
// Clear video frame so DrMemory/Valgrind will allow reads of the buffer.
memset(video_frame_.video_frame_buffer()->MutableDataY(), 0,
video_frame_.allocated_size(kYPlane));
memset(video_frame_.video_frame_buffer()->MutableDataU(), 0,
video_frame_.allocated_size(kUPlane));
memset(video_frame_.video_frame_buffer()->MutableDataV(), 0,
video_frame_.allocated_size(kVPlane));
const std::string video_file = const std::string video_file =
webrtc::test::ResourcePath("foreman_cif", "yuv"); webrtc::test::ResourcePath("foreman_cif", "yuv");
source_file_ = fopen(video_file.c_str(), "rb"); source_file_ = fopen(video_file.c_str(), "rb");
@ -102,18 +109,11 @@ TEST_F(VideoProcessingTest, PreprocessorLogic) {
VideoFrame* out_frame = NULL; VideoFrame* out_frame = NULL;
// Set rescaling => output frame != NULL. // Set rescaling => output frame != NULL.
vp_->SetInputFrameResampleMode(kFastRescaling); vp_->SetInputFrameResampleMode(kFastRescaling);
PreprocessFrameAndVerify(video_frame_, resolution, resolution, vp_,
rtc::scoped_refptr<webrtc::I420Buffer> buffer = out_frame);
I420Buffer::Create(width_, height_, width_, half_width_, half_width_);
// Clear video frame so DrMemory/Valgrind will allow reads of the buffer.
buffer->InitializeData();
VideoFrame video_frame(buffer, 0, 0, webrtc::kVideoRotation_0);
PreprocessFrameAndVerify(video_frame, resolution, resolution, vp_, out_frame);
// No rescaling=> output frame = NULL. // No rescaling=> output frame = NULL.
vp_->SetInputFrameResampleMode(kNoRescaling); vp_->SetInputFrameResampleMode(kNoRescaling);
EXPECT_TRUE(vp_->PreprocessFrame(video_frame) != nullptr); EXPECT_TRUE(vp_->PreprocessFrame(video_frame_) != nullptr);
} }
#if defined(WEBRTC_IOS) #if defined(WEBRTC_IOS)
@ -133,15 +133,15 @@ TEST_F(VideoProcessingTest, Resampler) {
vp_->EnableTemporalDecimation(false); vp_->EnableTemporalDecimation(false);
// Reading test frame // Reading test frame
rtc::scoped_refptr<webrtc::I420Buffer> buffer = std::unique_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
I420Buffer::Create(width_, height_, width_, half_width_, half_width_); ASSERT_EQ(frame_length_,
fread(video_buffer.get(), 1, frame_length_, source_file_));
ASSERT_EQ(static_cast<size_t>(size_y_), // Using ConvertToI420 to add stride to the image.
fread(buffer->MutableDataY(), 1, size_y_, source_file_)); EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
ASSERT_EQ(static_cast<size_t>(size_uv_), 0, kVideoRotation_0, &video_frame_));
fread(buffer->MutableDataU(), 1, size_uv_, source_file_)); // Cropped source frame that will contain the expected visible region.
ASSERT_EQ(static_cast<size_t>(size_uv_), VideoFrame cropped_source_frame;
fread(buffer->MutableDataV(), 1, size_uv_, source_file_)); cropped_source_frame.CopyFrame(video_frame_);
for (uint32_t run_idx = 0; run_idx < NumRuns; run_idx++) { for (uint32_t run_idx = 0; run_idx < NumRuns; run_idx++) {
// Initiate test timer. // Initiate test timer.
@ -149,37 +149,48 @@ TEST_F(VideoProcessingTest, Resampler) {
// Init the sourceFrame with a timestamp. // Init the sourceFrame with a timestamp.
int64_t time_start_ms = time_start / rtc::kNumNanosecsPerMillisec; int64_t time_start_ms = time_start / rtc::kNumNanosecsPerMillisec;
VideoFrame video_frame(buffer, time_start_ms * 90, time_start_ms, video_frame_.set_render_time_ms(time_start_ms);
webrtc::kVideoRotation_0); video_frame_.set_timestamp(time_start_ms * 90);
// Test scaling to different sizes: source is of |width|/|height| = 352/288. // Test scaling to different sizes: source is of |width|/|height| = 352/288.
// Pure scaling: // Pure scaling:
TestSize(video_frame, buffer, width_ / 4, height_ / 4, 25.2, vp_); TestSize(video_frame_, video_frame_, width_ / 4, height_ / 4, 25.2, vp_);
TestSize(video_frame, buffer, width_ / 2, height_ / 2, 28.1, vp_); TestSize(video_frame_, video_frame_, width_ / 2, height_ / 2, 28.1, vp_);
// No resampling: // No resampling:
TestSize(video_frame, buffer, width_, height_, -1, vp_); TestSize(video_frame_, video_frame_, width_, height_, -1, vp_);
TestSize(video_frame, buffer, 2 * width_, 2 * height_, 32.2, vp_); TestSize(video_frame_, video_frame_, 2 * width_, 2 * height_, 32.2, vp_);
// Scaling and cropping. The cropped source frame is the largest center // Scaling and cropping. The cropped source frame is the largest center
// aligned region that can be used from the source while preserving aspect // aligned region that can be used from the source while preserving aspect
// ratio. // ratio.
TestSize(video_frame, CropBuffer(buffer, width_, height_, 0, 56, 352, 176), CropFrame(video_buffer.get(), width_, height_, 0, 56, 352, 176,
100, 50, 24.0, vp_); &cropped_source_frame);
TestSize(video_frame, CropBuffer(buffer, width_, height_, 0, 30, 352, 225), TestSize(video_frame_, cropped_source_frame, 100, 50, 24.0, vp_);
400, 256, 31.3, vp_);
TestSize(video_frame, CropBuffer(buffer, width_, height_, 68, 0, 216, 288), CropFrame(video_buffer.get(), width_, height_, 0, 30, 352, 225,
480, 640, 32.15, vp_); &cropped_source_frame);
TestSize(video_frame, CropBuffer(buffer, width_, height_, 0, 12, 352, 264), TestSize(video_frame_, cropped_source_frame, 400, 256, 31.3, vp_);
960, 720, 32.2, vp_);
TestSize(video_frame, CropBuffer(buffer, width_, height_, 0, 44, 352, 198), CropFrame(video_buffer.get(), width_, height_, 68, 0, 216, 288,
1280, 720, 32.15, vp_); &cropped_source_frame);
TestSize(video_frame_, cropped_source_frame, 480, 640, 32.15, vp_);
CropFrame(video_buffer.get(), width_, height_, 0, 12, 352, 264,
&cropped_source_frame);
TestSize(video_frame_, cropped_source_frame, 960, 720, 32.2, vp_);
CropFrame(video_buffer.get(), width_, height_, 0, 44, 352, 198,
&cropped_source_frame);
TestSize(video_frame_, cropped_source_frame, 1280, 720, 32.15, vp_);
// Upsampling to odd size. // Upsampling to odd size.
TestSize(video_frame, CropBuffer(buffer, width_, height_, 0, 26, 352, 233), CropFrame(video_buffer.get(), width_, height_, 0, 26, 352, 233,
501, 333, 32.05, vp_); &cropped_source_frame);
TestSize(video_frame_, cropped_source_frame, 501, 333, 32.05, vp_);
// Downsample to odd size. // Downsample to odd size.
TestSize(video_frame, CropBuffer(buffer, width_, height_, 0, 34, 352, 219), CropFrame(video_buffer.get(), width_, height_, 0, 34, 352, 219,
281, 175, 29.3, vp_); &cropped_source_frame);
TestSize(video_frame_, cropped_source_frame, 281, 175, 29.3, vp_);
// Stop timer. // Stop timer.
const int64_t runtime = const int64_t runtime =
@ -218,32 +229,24 @@ void PreprocessFrameAndVerify(const VideoFrame& source,
EXPECT_EQ(target_height, (out_frame)->height()); EXPECT_EQ(target_height, (out_frame)->height());
} }
rtc::scoped_refptr<VideoFrameBuffer> CropBuffer( void CropFrame(const uint8_t* source_data,
const rtc::scoped_refptr<VideoFrameBuffer>& source_buffer,
int source_width, int source_width,
int source_height, int source_height,
int offset_x, int offset_x,
int offset_y, int offset_y,
int cropped_width, int cropped_width,
int cropped_height) { int cropped_height,
// Force even. VideoFrame* cropped_frame) {
offset_x &= 1; cropped_frame->CreateEmptyFrame(cropped_width, cropped_height, cropped_width,
offset_y &= 1; (cropped_width + 1) / 2,
(cropped_width + 1) / 2);
size_t y_start = offset_x + offset_y * source_buffer->StrideY(); EXPECT_EQ(0,
size_t u_start = (offset_x / 2) + (offset_y / 2) * source_buffer->StrideU(); ConvertToI420(kI420, source_data, offset_x, offset_y, source_width,
size_t v_start = (offset_x / 2) + (offset_y / 2) * source_buffer->StrideU(); source_height, 0, kVideoRotation_0, cropped_frame));
return rtc::scoped_refptr<VideoFrameBuffer>(
new rtc::RefCountedObject<WrappedI420Buffer>(
cropped_width, cropped_height, source_buffer->DataY() + y_start,
source_buffer->StrideY(), source_buffer->DataU() + u_start,
source_buffer->StrideU(), source_buffer->DataV() + v_start,
source_buffer->StrideV(), rtc::KeepRefUntilDone(source_buffer)));
} }
void TestSize(const VideoFrame& source_frame, void TestSize(const VideoFrame& source_frame,
const rtc::scoped_refptr<VideoFrameBuffer>& cropped_source_buffer, const VideoFrame& cropped_source_frame,
int target_width, int target_width,
int target_height, int target_height,
double expected_psnr, double expected_psnr,
@ -260,14 +263,12 @@ void TestSize(const VideoFrame& source_frame,
// Scale |resampled_source_frame| back to the source scale. // Scale |resampled_source_frame| back to the source scale.
VideoFrame resampled_source_frame; VideoFrame resampled_source_frame;
resampled_source_frame.CopyFrame(*out_frame); resampled_source_frame.CopyFrame(*out_frame);
PreprocessFrameAndVerify(resampled_source_frame, PreprocessFrameAndVerify(resampled_source_frame, cropped_source_frame.width(),
cropped_source_buffer->width(), cropped_source_frame.height(), vpm, out_frame);
cropped_source_buffer->height(), vpm, out_frame);
WriteProcessedFrameForVisualInspection(resampled_source_frame, *out_frame); WriteProcessedFrameForVisualInspection(resampled_source_frame, *out_frame);
// Compute PSNR against the cropped source frame and check expectation. // Compute PSNR against the cropped source frame and check expectation.
double psnr = double psnr = I420PSNR(&cropped_source_frame, out_frame);
I420PSNR(*cropped_source_buffer, *out_frame->video_frame_buffer());
EXPECT_GT(psnr, expected_psnr); EXPECT_GT(psnr, expected_psnr);
printf( printf(
"PSNR: %f. PSNR is between source of size %d %d, and a modified " "PSNR: %f. PSNR is between source of size %d %d, and a modified "

View File

@ -33,6 +33,7 @@ class VideoProcessingTest : public ::testing::Test {
static void TearDownTestCase() { Trace::ReturnTrace(); } static void TearDownTestCase() { Trace::ReturnTrace(); }
VideoProcessing* vp_; VideoProcessing* vp_;
FILE* source_file_; FILE* source_file_;
VideoFrame video_frame_;
const int width_; const int width_;
const int half_width_; const int half_width_;
const int height_; const int height_;

View File

@ -116,18 +116,22 @@ class VideoEncoderSoftwareFallbackWrapperTest : public ::testing::Test {
CountingFakeEncoder fake_encoder_; CountingFakeEncoder fake_encoder_;
VideoEncoderSoftwareFallbackWrapper fallback_wrapper_; VideoEncoderSoftwareFallbackWrapper fallback_wrapper_;
VideoCodec codec_ = {}; VideoCodec codec_ = {};
std::unique_ptr<VideoFrame> frame_; VideoFrame frame_;
}; };
void VideoEncoderSoftwareFallbackWrapperTest::EncodeFrame() { void VideoEncoderSoftwareFallbackWrapperTest::EncodeFrame() {
rtc::scoped_refptr<I420Buffer> buffer = I420Buffer::Create( frame_.CreateEmptyFrame(kWidth, kHeight, kWidth, (kWidth + 1) / 2,
kWidth, kHeight, kWidth, (kWidth + 1) / 2, (kWidth + 1) / 2); (kWidth + 1) / 2);
buffer->SetToBlack(); memset(frame_.video_frame_buffer()->MutableDataY(), 16,
std::vector<FrameType> types(1, kVideoFrameKey); frame_.allocated_size(webrtc::kYPlane));
memset(frame_.video_frame_buffer()->MutableDataU(), 128,
frame_.allocated_size(webrtc::kUPlane));
memset(frame_.video_frame_buffer()->MutableDataV(), 128,
frame_.allocated_size(webrtc::kVPlane));
frame_.reset(new VideoFrame(buffer, 0, 0, webrtc::kVideoRotation_0)); std::vector<FrameType> types(1, kVideoFrameKey);
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
fallback_wrapper_.Encode(*frame_, nullptr, &types)); fallback_wrapper_.Encode(frame_, nullptr, &types));
} }
void VideoEncoderSoftwareFallbackWrapperTest::UtilizeFallbackEncoder() { void VideoEncoderSoftwareFallbackWrapperTest::UtilizeFallbackEncoder() {
@ -221,9 +225,9 @@ TEST_F(VideoEncoderSoftwareFallbackWrapperTest,
// Encoding a frame using the fallback should arrive at the new callback. // Encoding a frame using the fallback should arrive at the new callback.
std::vector<FrameType> types(1, kVideoFrameKey); std::vector<FrameType> types(1, kVideoFrameKey);
frame_->set_timestamp(frame_->timestamp() + 1000); frame_.set_timestamp(frame_.timestamp() + 1000);
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
fallback_wrapper_.Encode(*frame_, nullptr, &types)); fallback_wrapper_.Encode(frame_, nullptr, &types));
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, fallback_wrapper_.Release()); EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, fallback_wrapper_.Release());
} }