Reland of Delete webrtc::VideoFrame methods buffer and stride. (patchset #1 id:1 of https://codereview.webrtc.org/1935443002/ )

Reason for revert:
I plan to reland this change in a week or two, after downstream users are updated.

Original issue's description:
> Revert of Delete webrtc::VideoFrame methods buffer and stride. (patchset #14 id:250001 of https://codereview.webrtc.org/1900673002/ )
>
> Reason for revert:
> Breaks chrome FYI bots.
>
> Original issue's description:
> > Delete webrtc::VideoFrame methods buffer and stride.
> >
> > To make the HasOneRef/IsMutable hack work, also had to change the
> > video_frame_buffer method to return a const ref to a scoped_ref_ptr,
> > to not imply an AddRef.
> >
> > BUG=webrtc:5682
>
> TBR=perkj@webrtc.org,magjed@webrtc.org,pbos@webrtc.org,pthatcher@webrtc.org,stefan@webrtc.org
> # Skipping CQ checks because original CL landed less than 1 days ago.
> NOPRESUBMIT=true
> NOTREECHECKS=true
> NOTRY=true
> BUG=webrtc:5682
>
> Committed: https://crrev.com/5b3c443d301f2c2f18dac5b02652c08b91ea3828
> Cr-Commit-Position: refs/heads/master@{#12558}

TBR=perkj@webrtc.org,magjed@webrtc.org,pbos@webrtc.org,pthatcher@webrtc.org,stefan@webrtc.org
# Not skipping CQ checks because original CL landed more than 1 days ago.
BUG=webrtc:5682

Review-Url: https://codereview.webrtc.org/1963413004
Cr-Commit-Position: refs/heads/master@{#12721}
This commit is contained in:
nisse
2016-05-13 04:12:41 -07:00
committed by Commit bot
parent a3002db8d6
commit d0dc66e0ea
24 changed files with 411 additions and 398 deletions

View File

@ -301,14 +301,21 @@ int SimulcastEncoderAdapter::Encode(
// Aligning stride values based on width.
dst_frame.CreateEmptyFrame(dst_width, dst_height, dst_width,
(dst_width + 1) / 2, (dst_width + 1) / 2);
libyuv::I420Scale(
input_image.buffer(kYPlane), input_image.stride(kYPlane),
input_image.buffer(kUPlane), input_image.stride(kUPlane),
input_image.buffer(kVPlane), input_image.stride(kVPlane), src_width,
src_height, dst_frame.buffer(kYPlane), dst_frame.stride(kYPlane),
dst_frame.buffer(kUPlane), dst_frame.stride(kUPlane),
dst_frame.buffer(kVPlane), dst_frame.stride(kVPlane), dst_width,
dst_height, libyuv::kFilterBilinear);
libyuv::I420Scale(input_image.video_frame_buffer()->DataY(),
input_image.video_frame_buffer()->StrideY(),
input_image.video_frame_buffer()->DataU(),
input_image.video_frame_buffer()->StrideU(),
input_image.video_frame_buffer()->DataV(),
input_image.video_frame_buffer()->StrideV(),
src_width, src_height,
dst_frame.video_frame_buffer()->MutableDataY(),
dst_frame.video_frame_buffer()->StrideY(),
dst_frame.video_frame_buffer()->MutableDataU(),
dst_frame.video_frame_buffer()->StrideU(),
dst_frame.video_frame_buffer()->MutableDataV(),
dst_frame.video_frame_buffer()->StrideV(),
dst_width, dst_height,
libyuv::kFilterBilinear);
dst_frame.set_timestamp(input_image.timestamp());
dst_frame.set_render_time_ms(input_image.render_time_ms());
streaminfos_[stream_idx].encoder->Encode(dst_frame, codec_specific_info,

View File

@ -119,13 +119,13 @@ class Vp8TestDecodedImageCallback : public DecodedImageCallback {
Vp8TestDecodedImageCallback() : decoded_frames_(0) {}
int32_t Decoded(VideoFrame& decoded_image) override {
for (int i = 0; i < decoded_image.width(); ++i) {
EXPECT_NEAR(kColorY, decoded_image.buffer(kYPlane)[i], 1);
EXPECT_NEAR(kColorY, decoded_image.video_frame_buffer()->DataY()[i], 1);
}
// TODO(mikhal): Verify the difference between U,V and the original.
for (int i = 0; i < ((decoded_image.width() + 1) / 2); ++i) {
EXPECT_NEAR(kColorU, decoded_image.buffer(kUPlane)[i], 4);
EXPECT_NEAR(kColorV, decoded_image.buffer(kVPlane)[i], 4);
EXPECT_NEAR(kColorU, decoded_image.video_frame_buffer()->DataU()[i], 4);
EXPECT_NEAR(kColorV, decoded_image.video_frame_buffer()->DataV()[i], 4);
}
decoded_frames_++;
return 0;
@ -222,26 +222,40 @@ class TestVp8Simulcast : public ::testing::Test {
TestVp8Simulcast(VP8Encoder* encoder, VP8Decoder* decoder)
: encoder_(encoder), decoder_(decoder) {}
// Creates an VideoFrame from |plane_colors|.
static void CreateImage(VideoFrame* frame, int plane_colors[kNumOfPlanes]) {
for (int plane_num = 0; plane_num < kNumOfPlanes; ++plane_num) {
int width =
(plane_num != kYPlane ? (frame->width() + 1) / 2 : frame->width());
int height =
(plane_num != kYPlane ? (frame->height() + 1) / 2 : frame->height());
PlaneType plane_type = static_cast<PlaneType>(plane_num);
uint8_t* data = frame->buffer(plane_type);
static void SetPlane(uint8_t* data,
uint8_t value,
int width,
int height,
int stride) {
for (int i = 0; i < height; i++, data += stride) {
// Setting allocated area to zero - setting only image size to
// requested values - will make it easier to distinguish between image
// size and frame size (accounting for stride).
memset(frame->buffer(plane_type), 0, frame->allocated_size(plane_type));
for (int i = 0; i < height; i++) {
memset(data, plane_colors[plane_num], width);
data += frame->stride(plane_type);
}
memset(data, value, width);
memset(data + width, 0, stride - width);
}
}
// Fills in an VideoFrameBuffer from |plane_colors|.
static void CreateImage(const rtc::scoped_refptr<VideoFrameBuffer>& buffer,
int plane_colors[kNumOfPlanes]) {
int width = buffer->width();
int height = buffer->height();
int chroma_width = (width + 1) / 2;
int chroma_height = (height + 1) / 2;
SetPlane(buffer->MutableDataY(), plane_colors[0],
width, height, buffer->StrideY());
SetPlane(buffer->MutableDataU(), plane_colors[1],
chroma_width, chroma_height,
buffer->StrideU());
SetPlane(buffer->MutableDataV(), plane_colors[2],
chroma_width, chroma_height,
buffer->StrideV());
}
static void DefaultSettings(VideoCodec* settings,
const int* temporal_layer_profile) {
assert(settings);
@ -305,11 +319,11 @@ class TestVp8Simulcast : public ::testing::Test {
int half_width = (kDefaultWidth + 1) / 2;
input_frame_.CreateEmptyFrame(kDefaultWidth, kDefaultHeight, kDefaultWidth,
half_width, half_width);
memset(input_frame_.buffer(kYPlane), 0,
memset(input_frame_.video_frame_buffer()->MutableDataY(), 0,
input_frame_.allocated_size(kYPlane));
memset(input_frame_.buffer(kUPlane), 0,
memset(input_frame_.video_frame_buffer()->MutableDataU(), 0,
input_frame_.allocated_size(kUPlane));
memset(input_frame_.buffer(kVPlane), 0,
memset(input_frame_.video_frame_buffer()->MutableDataV(), 0,
input_frame_.allocated_size(kVPlane));
}
@ -555,11 +569,11 @@ class TestVp8Simulcast : public ::testing::Test {
int half_width = (settings_.width + 1) / 2;
input_frame_.CreateEmptyFrame(settings_.width, settings_.height,
settings_.width, half_width, half_width);
memset(input_frame_.buffer(kYPlane), 0,
memset(input_frame_.video_frame_buffer()->MutableDataY(), 0,
input_frame_.allocated_size(kYPlane));
memset(input_frame_.buffer(kUPlane), 0,
memset(input_frame_.video_frame_buffer()->MutableDataU(), 0,
input_frame_.allocated_size(kUPlane));
memset(input_frame_.buffer(kVPlane), 0,
memset(input_frame_.video_frame_buffer()->MutableDataV(), 0,
input_frame_.allocated_size(kVPlane));
// The for loop above did not set the bitrate of the highest layer.
@ -596,11 +610,11 @@ class TestVp8Simulcast : public ::testing::Test {
half_width = (settings_.width + 1) / 2;
input_frame_.CreateEmptyFrame(settings_.width, settings_.height,
settings_.width, half_width, half_width);
memset(input_frame_.buffer(kYPlane), 0,
memset(input_frame_.video_frame_buffer()->MutableDataY(), 0,
input_frame_.allocated_size(kYPlane));
memset(input_frame_.buffer(kUPlane), 0,
memset(input_frame_.video_frame_buffer()->MutableDataU(), 0,
input_frame_.allocated_size(kUPlane));
memset(input_frame_.buffer(kVPlane), 0,
memset(input_frame_.video_frame_buffer()->MutableDataV(), 0,
input_frame_.allocated_size(kVPlane));
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
}
@ -691,7 +705,7 @@ class TestVp8Simulcast : public ::testing::Test {
plane_offset[kYPlane] = kColorY;
plane_offset[kUPlane] = kColorU;
plane_offset[kVPlane] = kColorV;
CreateImage(&input_frame_, plane_offset);
CreateImage(input_frame_.video_frame_buffer(), plane_offset);
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
int picture_id = -1;
@ -707,7 +721,7 @@ class TestVp8Simulcast : public ::testing::Test {
plane_offset[kYPlane] += 1;
plane_offset[kUPlane] += 1;
plane_offset[kVPlane] += 1;
CreateImage(&input_frame_, plane_offset);
CreateImage(input_frame_.video_frame_buffer(), plane_offset);
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
@ -715,7 +729,7 @@ class TestVp8Simulcast : public ::testing::Test {
plane_offset[kYPlane] += 1;
plane_offset[kUPlane] += 1;
plane_offset[kVPlane] += 1;
CreateImage(&input_frame_, plane_offset);
CreateImage(input_frame_.video_frame_buffer(), plane_offset);
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
@ -724,7 +738,7 @@ class TestVp8Simulcast : public ::testing::Test {
plane_offset[kYPlane] += 1;
plane_offset[kUPlane] += 1;
plane_offset[kVPlane] += 1;
CreateImage(&input_frame_, plane_offset);
CreateImage(input_frame_.video_frame_buffer(), plane_offset);
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
@ -739,7 +753,7 @@ class TestVp8Simulcast : public ::testing::Test {
plane_offset[kYPlane] = kColorY;
plane_offset[kUPlane] = kColorU;
plane_offset[kVPlane] = kColorV;
CreateImage(&input_frame_, plane_offset);
CreateImage(input_frame_.video_frame_buffer(), plane_offset);
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(input_frame_, &codec_specific, NULL));
@ -898,7 +912,7 @@ class TestVp8Simulcast : public ::testing::Test {
plane_offset[kYPlane] = kColorY;
plane_offset[kUPlane] = kColorU;
plane_offset[kVPlane] = kColorV;
CreateImage(&input_frame_, plane_offset);
CreateImage(input_frame_.video_frame_buffer(), plane_offset);
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
@ -906,7 +920,7 @@ class TestVp8Simulcast : public ::testing::Test {
plane_offset[kYPlane] += 1;
plane_offset[kUPlane] += 1;
plane_offset[kVPlane] += 1;
CreateImage(&input_frame_, plane_offset);
CreateImage(input_frame_.video_frame_buffer(), plane_offset);
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));

View File

@ -752,15 +752,18 @@ int VP8EncoderImpl::Encode(const VideoFrame& frame,
// Image in vpx_image_t format.
// Input image is const. VP8's raw image is not defined as const.
raw_images_[0].planes[VPX_PLANE_Y] =
const_cast<uint8_t*>(input_image.buffer(kYPlane));
const_cast<uint8_t*>(input_image.video_frame_buffer()->DataY());
raw_images_[0].planes[VPX_PLANE_U] =
const_cast<uint8_t*>(input_image.buffer(kUPlane));
const_cast<uint8_t*>(input_image.video_frame_buffer()->DataU());
raw_images_[0].planes[VPX_PLANE_V] =
const_cast<uint8_t*>(input_image.buffer(kVPlane));
const_cast<uint8_t*>(input_image.video_frame_buffer()->DataV());
raw_images_[0].stride[VPX_PLANE_Y] = input_image.stride(kYPlane);
raw_images_[0].stride[VPX_PLANE_U] = input_image.stride(kUPlane);
raw_images_[0].stride[VPX_PLANE_V] = input_image.stride(kVPlane);
raw_images_[0].stride[VPX_PLANE_Y] =
input_image.video_frame_buffer()->StrideY();
raw_images_[0].stride[VPX_PLANE_U] =
input_image.video_frame_buffer()->StrideU();
raw_images_[0].stride[VPX_PLANE_V] =
input_image.video_frame_buffer()->StrideV();
for (size_t i = 1; i < encoders_.size(); ++i) {
// Scale the image down a number of times by downsampling factor
@ -1357,9 +1360,12 @@ int VP8DecoderImpl::ReturnFrame(const vpx_image_t* img,
libyuv::I420Copy(img->planes[VPX_PLANE_Y], img->stride[VPX_PLANE_Y],
img->planes[VPX_PLANE_U], img->stride[VPX_PLANE_U],
img->planes[VPX_PLANE_V], img->stride[VPX_PLANE_V],
decoded_image.buffer(kYPlane), decoded_image.stride(kYPlane),
decoded_image.buffer(kUPlane), decoded_image.stride(kUPlane),
decoded_image.buffer(kVPlane), decoded_image.stride(kVPlane),
decoded_image.video_frame_buffer()->MutableDataY(),
decoded_image.video_frame_buffer()->StrideY(),
decoded_image.video_frame_buffer()->MutableDataU(),
decoded_image.video_frame_buffer()->StrideU(),
decoded_image.video_frame_buffer()->MutableDataV(),
decoded_image.video_frame_buffer()->StrideV(),
img->d_w, img->d_h);
decoded_image.set_ntp_time_ms(ntp_time_ms);
int ret = decode_complete_callback_->Decoded(decoded_image);