Revert of New static method I420Buffer::SetToBlack. (patchset #4 id:60001 of https://codereview.webrtc.org/2029273004/ )

Reason for revert:
Breaks chrome, in particular, the tests in

media_stream_remote_video_source_unittest.cc

use the InitToBlack method which is being deleted.

Original issue's description:
> New static method I420Buffer::SetToBlack.
>
> Replaces cricket::VideoFrame::SetToBlack and
> cricket::WebRtcVideoFrame::InitToBlack, which are deleted.
>
> Refactors the black frame logic in VideoBroadcaster, and a few of the
> tests.
>
> BUG=webrtc:5682
>
> Committed: https://crrev.com/663f9e2ddc86e813f6db04ba2cf5ac1ed9e7ef67
> Cr-Commit-Position: refs/heads/master@{#13063}

TBR=perkj@webrtc.org,pbos@webrtc.org
# Skipping CQ checks because original CL landed less than 1 days ago.
NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
BUG=webrtc:5682

Review-Url: https://codereview.webrtc.org/2049023002
Cr-Commit-Position: refs/heads/master@{#13065}
This commit is contained in:
nisse
2016-06-08 05:20:53 -07:00
committed by Commit bot
parent 0ab07d67cb
commit 271d740788
12 changed files with 143 additions and 159 deletions

View File

@ -87,18 +87,8 @@ class I420Buffer : public VideoFrameBuffer {
public:
I420Buffer(int width, int height);
I420Buffer(int width, int height, int stride_y, int stride_u, int stride_v);
// Sets all three planes to all zeros. Used to work around for
// quirks in memory checkers
// (https://bugs.chromium.org/p/libyuv/issues/detail?id=377) and
// ffmpeg (http://crbug.com/390941).
// TODO(nisse): Should be deleted if/when those issues are resolved
// in a better way.
void InitializeData();
// Sets the frame buffer to all black.
void SetToBlack();
int width() const override;
int height() const override;
const uint8_t* DataY() const override;

View File

@ -200,14 +200,6 @@ rtc::scoped_refptr<I420Buffer> I420Buffer::Copy(
return copy;
}
void I420Buffer::SetToBlack() {
RTC_CHECK(libyuv::I420Rect(MutableDataY(), StrideY(),
MutableDataU(), StrideU(),
MutableDataV(), StrideV(),
0, 0, width(), height(),
0, 128, 128) == 0);
}
NativeHandleBuffer::NativeHandleBuffer(void* native_handle,
int width,
int height)

View File

@ -33,11 +33,8 @@ class FakeVideoRenderer : public rtc::VideoSinkInterface<cricket::VideoFrame> {
virtual void OnFrame(const VideoFrame& frame) {
rtc::CritScope cs(&crit_);
// TODO(zhurunz) Check with VP8 team to see if we can remove this
// tolerance on Y values. Some unit tests produce Y values close
// to 16 rather than close to zero, for supposedly black frames.
// Largest value observed is 34, e.g., running
// P2PTestConductor.LocalP2PTest16To9 (peerconnection_unittests).
black_frame_ = CheckFrameColorYuv(0, 48, 128, 128, 128, 128, &frame);
// tolerance on Y values.
black_frame_ = CheckFrameColorYuv(6, 48, 128, 128, 128, 128, &frame);
// Treat unexpected frame size as error.
++num_rendered_frames_;
width_ = frame.width();

View File

@ -54,9 +54,7 @@ void VideoBroadcaster::OnFrame(const cricket::VideoFrame& frame) {
rtc::CritScope cs(&sinks_and_wants_lock_);
for (auto& sink_pair : sink_pairs()) {
if (sink_pair.wants.black_frames) {
sink_pair.sink->OnFrame(cricket::WebRtcVideoFrame(
GetBlackFrameBuffer(frame.width(), frame.height()),
frame.rotation(), frame.timestamp_us()));
sink_pair.sink->OnFrame(GetBlackFrame(frame));
} else {
sink_pair.sink->OnFrame(frame);
}
@ -95,17 +93,20 @@ void VideoBroadcaster::UpdateWants() {
current_wants_ = wants;
}
const rtc::scoped_refptr<webrtc::VideoFrameBuffer>&
VideoBroadcaster::GetBlackFrameBuffer(int width, int height) {
if (!black_frame_buffer_ || black_frame_buffer_->width() != width ||
black_frame_buffer_->height() != height) {
rtc::scoped_refptr<webrtc::I420Buffer> buffer =
new RefCountedObject<webrtc::I420Buffer>(width, height);
buffer->SetToBlack();
black_frame_buffer_ = buffer;
const cricket::VideoFrame& VideoBroadcaster::GetBlackFrame(
const cricket::VideoFrame& frame) {
if (black_frame_ && black_frame_->width() == frame.width() &&
black_frame_->height() == frame.height() &&
black_frame_->rotation() == frame.rotation()) {
black_frame_->set_timestamp_us(frame.timestamp_us());
return *black_frame_;
}
return black_frame_buffer_;
black_frame_.reset(new cricket::WebRtcVideoFrame(
new rtc::RefCountedObject<webrtc::I420Buffer>(frame.width(),
frame.height()),
frame.rotation(), frame.timestamp_us()));
black_frame_->SetToBlack();
return *black_frame_;
}
} // namespace rtc

View File

@ -49,15 +49,14 @@ class VideoBroadcaster : public VideoSourceBase,
protected:
void UpdateWants() EXCLUSIVE_LOCKS_REQUIRED(sinks_and_wants_lock_);
const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& GetBlackFrameBuffer(
int width, int height)
const cricket::VideoFrame& GetBlackFrame(const cricket::VideoFrame& frame)
EXCLUSIVE_LOCKS_REQUIRED(sinks_and_wants_lock_);
ThreadChecker thread_checker_;
rtc::CriticalSection sinks_and_wants_lock_;
VideoSinkWants current_wants_ GUARDED_BY(sinks_and_wants_lock_);
rtc::scoped_refptr<webrtc::VideoFrameBuffer> black_frame_buffer_;
std::unique_ptr<cricket::WebRtcVideoFrame> black_frame_;
};
} // namespace rtc

View File

@ -131,16 +131,13 @@ TEST(VideoBroadcasterTest, SinkWantsBlackFrames) {
FakeVideoRenderer sink2;
VideoSinkWants wants2;
wants2.black_frames = false;
wants1.black_frames = false;
broadcaster.AddOrUpdateSink(&sink2, wants2);
rtc::scoped_refptr<webrtc::I420Buffer> buffer(
new rtc::RefCountedObject<webrtc::I420Buffer>(100, 200));
// Makes it not all black.
buffer->InitializeData();
cricket::WebRtcVideoFrame frame1(
buffer, webrtc::kVideoRotation_0, 10 /* timestamp_us */);
cricket::WebRtcVideoFrame frame1;
frame1.InitToBlack(100, 200, 10000 /*ts*/);
// Make it not all-black
frame1.video_frame_buffer()->MutableDataU()[0] = 0;
broadcaster.OnFrame(frame1);
EXPECT_TRUE(sink1.black_frame());
EXPECT_EQ(10000, sink1.timestamp());
@ -153,8 +150,10 @@ TEST(VideoBroadcasterTest, SinkWantsBlackFrames) {
wants2.black_frames = true;
broadcaster.AddOrUpdateSink(&sink2, wants2);
cricket::WebRtcVideoFrame frame2(
buffer, webrtc::kVideoRotation_0, 30 /* timestamp_us */);
cricket::WebRtcVideoFrame frame2;
frame2.InitToBlack(100, 200, 30000 /*ts*/);
// Make it not all-black
frame2.video_frame_buffer()->MutableDataU()[0] = 0;
broadcaster.OnFrame(frame2);
EXPECT_FALSE(sink1.black_frame());
EXPECT_EQ(30000, sink1.timestamp());

View File

@ -148,6 +148,18 @@ void VideoFrame::StretchToFrame(VideoFrame* dst,
dst->set_rotation(rotation());
}
bool VideoFrame::SetToBlack() {
return libyuv::I420Rect(video_frame_buffer()->MutableDataY(),
video_frame_buffer()->StrideY(),
video_frame_buffer()->MutableDataU(),
video_frame_buffer()->StrideU(),
video_frame_buffer()->MutableDataV(),
video_frame_buffer()->StrideV(),
0, 0,
width(), height(),
16, 128, 128) == 0;
}
static const size_t kMaxSampleSize = 1000000000u;
// Returns whether a sample is valid.
bool VideoFrame::Validate(uint32_t fourcc,

View File

@ -91,6 +91,9 @@ class VideoFrame {
virtual void StretchToFrame(VideoFrame *target, bool interpolate,
bool crop) const;
// Sets the video frame to black.
virtual bool SetToBlack();
// Tests if sample is valid. Returns true if valid.
static bool Validate(uint32_t fourcc,
int w,

View File

@ -254,28 +254,25 @@ class VideoFrameTest : public testing::Test {
// Simple conversion routines to verify the optimized VideoFrame routines.
// Converts from the specified colorspace to I420.
std::unique_ptr<T> ConvertYuv422(const rtc::MemoryStream* ms,
uint32_t fourcc,
uint32_t width,
uint32_t height) {
bool ConvertYuv422(const rtc::MemoryStream* ms,
uint32_t fourcc,
uint32_t width,
uint32_t height,
T* frame) {
int y1_pos, y2_pos, u_pos, v_pos;
if (!GetYuv422Packing(fourcc, &y1_pos, &y2_pos, &u_pos, &v_pos)) {
return nullptr;
return false;
}
rtc::scoped_refptr<webrtc::I420Buffer> buffer(
new rtc::RefCountedObject<webrtc::I420Buffer>(width, height));
buffer->SetToBlack();
const uint8_t* start = reinterpret_cast<const uint8_t*>(ms->GetBuffer());
int awidth = (width + 1) & ~1;
int stride_y = buffer->StrideY();
int stride_u = buffer->StrideU();
int stride_v = buffer->StrideV();
uint8_t* plane_y = buffer->MutableDataY();
uint8_t* plane_u = buffer->MutableDataU();
uint8_t* plane_v = buffer->MutableDataV();
frame->InitToBlack(width, height, 0);
int stride_y = frame->video_frame_buffer()->StrideY();
int stride_u = frame->video_frame_buffer()->StrideU();
int stride_v = frame->video_frame_buffer()->StrideV();
uint8_t* plane_y = frame->video_frame_buffer()->MutableDataY();
uint8_t* plane_u = frame->video_frame_buffer()->MutableDataU();
uint8_t* plane_v = frame->video_frame_buffer()->MutableDataV();
for (uint32_t y = 0; y < height; ++y) {
for (uint32_t x = 0; x < width; x += 2) {
const uint8_t* quad1 = start + (y * awidth + x) * 2;
@ -295,18 +292,19 @@ class VideoFrameTest : public testing::Test {
}
}
}
return std::unique_ptr<T>(new T(buffer, 0, webrtc::kVideoRotation_0));
return true;
}
// Convert RGB to 420.
// A negative height inverts the image.
std::unique_ptr<T> ConvertRgb(const rtc::MemoryStream* ms,
uint32_t fourcc,
int32_t width,
int32_t height) {
bool ConvertRgb(const rtc::MemoryStream* ms,
uint32_t fourcc,
int32_t width,
int32_t height,
T* frame) {
int r_pos, g_pos, b_pos, bytes;
if (!GetRgbPacking(fourcc, &r_pos, &g_pos, &b_pos, &bytes)) {
return nullptr;
return false;
}
int pitch = width * bytes;
const uint8_t* start = reinterpret_cast<const uint8_t*>(ms->GetBuffer());
@ -315,17 +313,13 @@ class VideoFrameTest : public testing::Test {
start = start + pitch * (height - 1);
pitch = -pitch;
}
rtc::scoped_refptr<webrtc::I420Buffer> buffer(
new rtc::RefCountedObject<webrtc::I420Buffer>(width, height));
buffer->SetToBlack();
int stride_y = buffer->StrideY();
int stride_u = buffer->StrideU();
int stride_v = buffer->StrideV();
uint8_t* plane_y = buffer->MutableDataY();
uint8_t* plane_u = buffer->MutableDataU();
uint8_t* plane_v = buffer->MutableDataV();
frame->InitToBlack(width, height, 0);
int stride_y = frame->video_frame_buffer()->StrideY();
int stride_u = frame->video_frame_buffer()->StrideU();
int stride_v = frame->video_frame_buffer()->StrideV();
uint8_t* plane_y = frame->video_frame_buffer()->MutableDataY();
uint8_t* plane_u = frame->video_frame_buffer()->MutableDataU();
uint8_t* plane_v = frame->video_frame_buffer()->MutableDataV();
for (int32_t y = 0; y < height; y += 2) {
for (int32_t x = 0; x < width; x += 2) {
const uint8_t* rgb[4];
@ -354,7 +348,7 @@ class VideoFrameTest : public testing::Test {
(yuv[0][2] + yuv[1][2] + yuv[2][2] + yuv[3][2] + 2) / 4;
}
}
return std::unique_ptr<T>(new T(buffer, 0, webrtc::kVideoRotation_0));
return true;
}
// Simple and slow RGB->YUV conversion. From NTSC standard, c/o Wikipedia.
@ -512,7 +506,7 @@ class VideoFrameTest : public testing::Test {
static bool IsBlack(const cricket::VideoFrame& frame) {
return !IsNull(frame) &&
*frame.video_frame_buffer()->DataY() <= 16 &&
*frame.video_frame_buffer()->DataY() == 16 &&
*frame.video_frame_buffer()->DataU() == 128 &&
*frame.video_frame_buffer()->DataV() == 128;
}
@ -619,29 +613,29 @@ class VideoFrameTest : public testing::Test {
// Test constructing an image from a wide YUY2 buffer.
// Normal is 1280x720. Wide is 12800x72
void ConstructYuy2Wide() {
T frame1, frame2;
std::unique_ptr<rtc::MemoryStream> ms(
CreateYuv422Sample(cricket::FOURCC_YUY2, kWidth * 10, kHeight / 10));
ASSERT_TRUE(ms.get() != NULL);
std::unique_ptr<T> frame1 = ConvertYuv422(ms.get(), cricket::FOURCC_YUY2,
kWidth * 10, kHeight / 10);
ASSERT_TRUE(frame1);
T frame2;
EXPECT_TRUE(ConvertYuv422(ms.get(), cricket::FOURCC_YUY2,
kWidth * 10, kHeight / 10,
&frame1));
EXPECT_TRUE(LoadFrame(ms.get(), cricket::FOURCC_YUY2,
kWidth * 10, kHeight / 10, &frame2));
EXPECT_TRUE(IsEqual(*frame1, frame2, 0));
EXPECT_TRUE(IsEqual(frame1, frame2, 0));
}
// Test constructing an image from a UYVY buffer.
void ConstructUyvy() {
T frame1, frame2;
std::unique_ptr<rtc::MemoryStream> ms(
CreateYuv422Sample(cricket::FOURCC_UYVY, kWidth, kHeight));
ASSERT_TRUE(ms.get() != NULL);
std::unique_ptr<T> frame1 = ConvertYuv422(ms.get(), cricket::FOURCC_UYVY,
kWidth, kHeight);
T frame2;
EXPECT_TRUE(ConvertYuv422(ms.get(), cricket::FOURCC_UYVY, kWidth, kHeight,
&frame1));
EXPECT_TRUE(LoadFrame(ms.get(), cricket::FOURCC_UYVY,
kWidth, kHeight, &frame2));
EXPECT_TRUE(IsEqual(*frame1, frame2, 0));
EXPECT_TRUE(IsEqual(frame1, frame2, 0));
}
// Test constructing an image from a random buffer.
@ -676,91 +670,85 @@ class VideoFrameTest : public testing::Test {
// Test constructing an image from a ABGR buffer
// Due to rounding, some pixels may differ slightly from the VideoFrame impl.
void ConstructABGR() {
T frame1, frame2;
std::unique_ptr<rtc::MemoryStream> ms(
CreateRgbSample(cricket::FOURCC_ABGR, kWidth, kHeight));
ASSERT_TRUE(ms.get() != NULL);
std::unique_ptr<T> frame1 = ConvertRgb(ms.get(), cricket::FOURCC_ABGR,
kWidth, kHeight);
ASSERT_TRUE(frame1);
T frame2;
EXPECT_TRUE(ConvertRgb(ms.get(), cricket::FOURCC_ABGR, kWidth, kHeight,
&frame1));
EXPECT_TRUE(LoadFrame(ms.get(), cricket::FOURCC_ABGR,
kWidth, kHeight, &frame2));
EXPECT_TRUE(IsEqual(*frame1, frame2, 2));
EXPECT_TRUE(IsEqual(frame1, frame2, 2));
}
// Test constructing an image from a ARGB buffer
// Due to rounding, some pixels may differ slightly from the VideoFrame impl.
void ConstructARGB() {
T frame1, frame2;
std::unique_ptr<rtc::MemoryStream> ms(
CreateRgbSample(cricket::FOURCC_ARGB, kWidth, kHeight));
ASSERT_TRUE(ms.get() != NULL);
std::unique_ptr<T> frame1 = ConvertRgb(ms.get(), cricket::FOURCC_ARGB,
kWidth, kHeight);
ASSERT_TRUE(frame1);
T frame2;
EXPECT_TRUE(ConvertRgb(ms.get(), cricket::FOURCC_ARGB, kWidth, kHeight,
&frame1));
EXPECT_TRUE(LoadFrame(ms.get(), cricket::FOURCC_ARGB,
kWidth, kHeight, &frame2));
EXPECT_TRUE(IsEqual(*frame1, frame2, 2));
EXPECT_TRUE(IsEqual(frame1, frame2, 2));
}
// Test constructing an image from a wide ARGB buffer
// Normal is 1280x720. Wide is 12800x72
void ConstructARGBWide() {
T frame1, frame2;
std::unique_ptr<rtc::MemoryStream> ms(
CreateRgbSample(cricket::FOURCC_ARGB, kWidth * 10, kHeight / 10));
ASSERT_TRUE(ms.get() != NULL);
std::unique_ptr<T> frame1 = ConvertRgb(ms.get(), cricket::FOURCC_ARGB,
kWidth * 10, kHeight / 10);
ASSERT_TRUE(frame1);
T frame2;
EXPECT_TRUE(ConvertRgb(ms.get(), cricket::FOURCC_ARGB,
kWidth * 10, kHeight / 10, &frame1));
EXPECT_TRUE(LoadFrame(ms.get(), cricket::FOURCC_ARGB,
kWidth * 10, kHeight / 10, &frame2));
EXPECT_TRUE(IsEqual(*frame1, frame2, 2));
EXPECT_TRUE(IsEqual(frame1, frame2, 2));
}
// Test constructing an image from an BGRA buffer.
// Due to rounding, some pixels may differ slightly from the VideoFrame impl.
void ConstructBGRA() {
T frame1, frame2;
std::unique_ptr<rtc::MemoryStream> ms(
CreateRgbSample(cricket::FOURCC_BGRA, kWidth, kHeight));
ASSERT_TRUE(ms.get() != NULL);
std::unique_ptr<T> frame1 = ConvertRgb(ms.get(), cricket::FOURCC_BGRA,
kWidth, kHeight);
ASSERT_TRUE(frame1);
T frame2;
EXPECT_TRUE(ConvertRgb(ms.get(), cricket::FOURCC_BGRA, kWidth, kHeight,
&frame1));
EXPECT_TRUE(LoadFrame(ms.get(), cricket::FOURCC_BGRA,
kWidth, kHeight, &frame2));
EXPECT_TRUE(IsEqual(*frame1, frame2, 2));
EXPECT_TRUE(IsEqual(frame1, frame2, 2));
}
// Test constructing an image from a 24BG buffer.
// Due to rounding, some pixels may differ slightly from the VideoFrame impl.
void Construct24BG() {
T frame1, frame2;
std::unique_ptr<rtc::MemoryStream> ms(
CreateRgbSample(cricket::FOURCC_24BG, kWidth, kHeight));
ASSERT_TRUE(ms.get() != NULL);
std::unique_ptr<T> frame1 = ConvertRgb(ms.get(), cricket::FOURCC_24BG,
kWidth, kHeight);
ASSERT_TRUE(frame1);
T frame2;
EXPECT_TRUE(ConvertRgb(ms.get(), cricket::FOURCC_24BG, kWidth, kHeight,
&frame1));
EXPECT_TRUE(LoadFrame(ms.get(), cricket::FOURCC_24BG,
kWidth, kHeight, &frame2));
EXPECT_TRUE(IsEqual(*frame1, frame2, 2));
EXPECT_TRUE(IsEqual(frame1, frame2, 2));
}
// Test constructing an image from a raw RGB buffer.
// Due to rounding, some pixels may differ slightly from the VideoFrame impl.
void ConstructRaw() {
T frame1, frame2;
std::unique_ptr<rtc::MemoryStream> ms(
CreateRgbSample(cricket::FOURCC_RAW, kWidth, kHeight));
ASSERT_TRUE(ms.get() != NULL);
std::unique_ptr<T> frame1 = ConvertRgb(ms.get(), cricket::FOURCC_RAW,
kWidth, kHeight);
ASSERT_TRUE(frame1);
T frame2;
EXPECT_TRUE(ConvertRgb(ms.get(), cricket::FOURCC_RAW, kWidth, kHeight,
&frame1));
EXPECT_TRUE(LoadFrame(ms.get(), cricket::FOURCC_RAW,
kWidth, kHeight, &frame2));
EXPECT_TRUE(IsEqual(*frame1, frame2, 2));
EXPECT_TRUE(IsEqual(frame1, frame2, 2));
}
// Test constructing an image from a RGB565 buffer
@ -830,7 +818,7 @@ class VideoFrameTest : public testing::Test {
data_size, 0, webrtc::kVideoRotation_0)); \
int width_rotate = frame1.width(); \
int height_rotate = frame1.height(); \
frame3.InitToEmptyBuffer(width_rotate, height_rotate, 0); \
EXPECT_TRUE(frame3.InitToBlack(width_rotate, height_rotate, 0)); \
libyuv::I420Mirror(frame2.video_frame_buffer()->DataY(), \
frame2.video_frame_buffer()->StrideY(), \
frame2.video_frame_buffer()->DataU(), \
@ -868,7 +856,7 @@ class VideoFrameTest : public testing::Test {
data_size, 0, webrtc::kVideoRotation_0)); \
int width_rotate = frame1.width(); \
int height_rotate = frame1.height(); \
frame3.InitToEmptyBuffer(width_rotate, height_rotate, 0); \
EXPECT_TRUE(frame3.InitToBlack(width_rotate, height_rotate, 0)); \
libyuv::I420Rotate(frame2.video_frame_buffer()->DataY(), \
frame2.video_frame_buffer()->StrideY(), \
frame2.video_frame_buffer()->DataU(), \
@ -1092,17 +1080,16 @@ class VideoFrameTest : public testing::Test {
// Test constructing an image from an ARGB buffer with horizontal cropping.
void ConstructARGBCropHorizontal() {
T frame1, frame2;
std::unique_ptr<rtc::MemoryStream> ms(
CreateRgbSample(cricket::FOURCC_ARGB, kWidth, kHeight));
ASSERT_TRUE(ms.get() != NULL);
std::unique_ptr<T> frame1 = ConvertRgb(ms.get(), cricket::FOURCC_ARGB,
kWidth, kHeight);
ASSERT_TRUE(frame1);
T frame2;
EXPECT_TRUE(ConvertRgb(ms.get(), cricket::FOURCC_ARGB, kWidth, kHeight,
&frame1));
EXPECT_TRUE(LoadFrame(ms.get(), cricket::FOURCC_ARGB, kWidth, kHeight,
kWidth * 3 / 4, kHeight, webrtc::kVideoRotation_0,
&frame2));
EXPECT_TRUE(IsEqualWithCrop(frame2, *frame1, kWidth / 8, 0, 2));
EXPECT_TRUE(IsEqualWithCrop(frame2, frame1, kWidth / 8, 0, 2));
}
// Test constructing an image from an I420 buffer, cropping top and bottom.
@ -1359,6 +1346,16 @@ class VideoFrameTest : public testing::Test {
EXPECT_EQ(frame1.video_frame_buffer(), frame2.video_frame_buffer());
}
// Test creating an empty image and initing it to black.
void ConstructBlack() {
T frame;
for (int i = 0; i < repeat_; ++i) {
EXPECT_TRUE(frame.InitToBlack(kWidth, kHeight, 0));
}
EXPECT_TRUE(IsSize(frame, kWidth, kHeight));
EXPECT_TRUE(IsBlack(frame));
}
// Test constructing an image from a YUY2 buffer with a range of sizes.
// Only tests that conversion does not crash or corrupt heap.
void ConstructYuy2AllSizes() {
@ -1380,18 +1377,17 @@ class VideoFrameTest : public testing::Test {
// Test constructing an image from a ARGB buffer with a range of sizes.
// Only tests that conversion does not crash or corrupt heap.
void ConstructARGBAllSizes() {
T frame1, frame2;
for (int height = kMinHeightAll; height <= kMaxHeightAll; ++height) {
for (int width = kMinWidthAll; width <= kMaxWidthAll; ++width) {
std::unique_ptr<rtc::MemoryStream> ms(
CreateRgbSample(cricket::FOURCC_ARGB, width, height));
ASSERT_TRUE(ms.get() != NULL);
std::unique_ptr<T> frame1 = ConvertRgb(ms.get(), cricket::FOURCC_ARGB,
width, height);
ASSERT_TRUE(frame1);
T frame2;
EXPECT_TRUE(ConvertRgb(ms.get(), cricket::FOURCC_ARGB, width, height,
&frame1));
EXPECT_TRUE(LoadFrame(ms.get(), cricket::FOURCC_ARGB,
width, height, &frame2));
EXPECT_TRUE(IsEqual(*frame1, frame2, 64));
EXPECT_TRUE(IsEqual(frame1, frame2, 64));
}
}
// Test a practical window size for screencasting usecase.
@ -1402,13 +1398,12 @@ class VideoFrameTest : public testing::Test {
std::unique_ptr<rtc::MemoryStream> ms(
CreateRgbSample(cricket::FOURCC_ARGB, kOddWidth + i, kOddHeight + j));
ASSERT_TRUE(ms.get() != NULL);
std::unique_ptr<T> frame1 = ConvertRgb(ms.get(), cricket::FOURCC_ARGB,
kOddWidth + i, kOddHeight + j);
ASSERT_TRUE(frame1);
T frame2;
EXPECT_TRUE(ConvertRgb(ms.get(), cricket::FOURCC_ARGB,
kOddWidth + i, kOddHeight + j,
&frame1));
EXPECT_TRUE(LoadFrame(ms.get(), cricket::FOURCC_ARGB,
kOddWidth + i, kOddHeight + j, &frame2));
EXPECT_TRUE(IsEqual(*frame1, frame2, 64));
EXPECT_TRUE(IsEqual(frame1, frame2, 64));
}
}
}
@ -1458,7 +1453,7 @@ class VideoFrameTest : public testing::Test {
out,
out_size, stride));
}
frame2.InitToEmptyBuffer(kWidth, kHeight, 0);
EXPECT_TRUE(frame2.InitToBlack(kWidth, kHeight, 0));
for (int i = 0; i < repeat_from; ++i) {
EXPECT_EQ(0, RGBToI420(out, stride,
frame2.video_frame_buffer()->MutableDataY(),
@ -1813,12 +1808,8 @@ class VideoFrameTest : public testing::Test {
void StretchToFrame() {
// Create the source frame as a black frame.
rtc::scoped_refptr<webrtc::I420Buffer> buffer(
new rtc::RefCountedObject<webrtc::I420Buffer>(kWidth * 2, kHeight * 2));
buffer->SetToBlack();
T source(buffer, 0, webrtc::kVideoRotation_0);
T source;
EXPECT_TRUE(source.InitToBlack(kWidth * 2, kHeight * 2, 0));
EXPECT_TRUE(IsSize(source, kWidth * 2, kHeight * 2));
// Create the target frame by loading from a file.

View File

@ -65,6 +65,12 @@ bool WebRtcVideoFrame::Init(const CapturedFrame* frame, int dw, int dh,
frame->rotation, apply_rotation);
}
bool WebRtcVideoFrame::InitToBlack(int w, int h,
int64_t time_stamp_ns) {
InitToEmptyBuffer(w, h, time_stamp_ns);
return SetToBlack();
}
int WebRtcVideoFrame::width() const {
return video_frame_buffer_ ? video_frame_buffer_->width() : 0;
}

View File

@ -63,6 +63,8 @@ class WebRtcVideoFrame : public VideoFrame {
void InitToEmptyBuffer(int w, int h);
void InitToEmptyBuffer(int w, int h, int64_t time_stamp_ns);
bool InitToBlack(int w, int h, int64_t time_stamp_ns);
int width() const override;
int height() const override;

View File

@ -20,24 +20,15 @@ namespace {
class WebRtcVideoTestFrame : public cricket::WebRtcVideoFrame {
public:
WebRtcVideoTestFrame() {}
WebRtcVideoTestFrame(
const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer,
int64_t time_stamp_ns,
webrtc::VideoRotation rotation)
: WebRtcVideoFrame(buffer, time_stamp_ns, rotation) {}
// The ApplyRotationToFrame test needs this as a public method.
using cricket::WebRtcVideoFrame::set_rotation;
virtual VideoFrame* CreateEmptyFrame(int w,
int h,
int64_t time_stamp) const override {
rtc::scoped_refptr<webrtc::I420Buffer> buffer(
new rtc::RefCountedObject<webrtc::I420Buffer>(w, h));
buffer->SetToBlack();
return new WebRtcVideoTestFrame(
buffer, time_stamp, webrtc::kVideoRotation_0);
WebRtcVideoTestFrame* frame = new WebRtcVideoTestFrame();
frame->InitToBlack(w, h, time_stamp);
return frame;
}
};
@ -154,6 +145,7 @@ TEST_WEBRTCVIDEOFRAME(ConstructI420CropVertical)
// TODO(juberti): WebRtcVideoFrame is not currently refcounted.
// TEST_WEBRTCVIDEOFRAME(ConstructCopy)
// TEST_WEBRTCVIDEOFRAME(ConstructCopyIsRef)
TEST_WEBRTCVIDEOFRAME(ConstructBlack)
// TODO(fbarchard): Implement Jpeg
// TEST_WEBRTCVIDEOFRAME(ConstructMjpgI420)
TEST_WEBRTCVIDEOFRAME(ConstructMjpgI422)