Revert "Revert "Update video_coding/codecs to new VideoFrameBuffer interface""
This reverts commit 88f94fa36aa61f7904d30251205c544ada2c4301. Chromium code has been updated. Original change's description: > Revert "Update video_coding/codecs to new VideoFrameBuffer interface" > > This reverts commit 20ebf4ede803cd4f628ef9378700f60b72f2eab0. > > Reason for revert: > > Suspect of breaking FYI bots. > See https://build.chromium.org/p/chromium.webrtc.fyi/builders/Win7%20Tester/builds/9036 and others. > > Sample logs: > Backtrace: > [5024:1036:0607/173649.857:FATAL:webrtc_video_frame_adapter.cc(98)] Check failed: false. > Backtrace: > base::debug::StackTrace::StackTrace [0x02D04A37+55] > base::debug::StackTrace::StackTrace [0x02CCBB8A+10] > content::WebRtcVideoFrameAdapter::NativeToI420Buffer [0x0508AD71+305] > webrtc::VideoFrameBuffer::ToI420 [0x0230BF67+39] > webrtc::H264EncoderImpl::Encode [0x057E8D0B+267] > webrtc::VCMGenericEncoder::Encode [0x057E0E34+333] > webrtc::vcm::VideoSender::AddVideoFrame [0x057DED9B+796] > webrtc::ViEEncoder::EncodeVideoFrame [0x057C00F6+884] > webrtc::ViEEncoder::EncodeTask::Run [0x057C12D7+215] > rtc::TaskQueue::PostTask [0x03EE5CFB+194] > base::internal::Invoker<base::internal::BindState<enum extensions::`anonymous namespace'::VerificationResult (__cdecl*)(std::unique_ptr<extensions::NetworkingCastPrivateDelegate::Credentials,std::default_delete<extensions::NetworkingCastPrivateDelegate::C [0x02DDCAA5+31] > base::internal::Invoker<base::internal::BindState<enum extensions::`anonymous namespace'::VerificationResult (__cdecl*)(std::unique_ptr<extensions::NetworkingCastPrivateDelegate::Credentials,std::default_delete<extensions::NetworkingCastPrivateDelegate::C [0x02DDEE86+22] > base::debug::TaskAnnotator::RunTask [0x02D08289+409] > base::MessageLoop::RunTask [0x02C8CEC1+1233] > base::MessageLoop::DoWork [0x02C8C1AD+765] > base::MessagePumpDefault::Run [0x02D0A20B+219] > base::MessageLoop::Run [0x02C8C9DB+107] > base::RunLoop::Run [0x02C89583+147] > base::Thread::Run [0x02CBEFCD+173] > base::Thread::ThreadMain [0x02CBFADE+622] > base::PlatformThread::Sleep [0x02C9E1A2+290] > BaseThreadInitThunk [0x75C3338A+18] > RtlInitializeExceptionChain [0x773A9902+99] > RtlInitializeExceptionChain [0x773A98D5+54] > > Original change's description: > > Update video_coding/codecs to new VideoFrameBuffer interface > > > > This is a follow-up cleanup for CL > > https://codereview.webrtc.org/2847383002/. > > > > Bug: webrtc:7632 > > Change-Id: I47861d779968f2fee94db9c017102a8e87e67fb7 > > Reviewed-on: https://chromium-review.googlesource.com/524163 > > Reviewed-by: Rasmus Brandt <brandtr@webrtc.org> > > Reviewed-by: Niels Moller <nisse@webrtc.org> > > Commit-Queue: Magnus Jedvert <magjed@webrtc.org> > > Cr-Commit-Position: refs/heads/master@{#18477} > > TBR=magjed@webrtc.org,nisse@webrtc.org,brandtr@webrtc.org > No-Presubmit: true > No-Tree-Checks: true > No-Try: true > Bug: webrtc:7632 > > Change-Id: I3b73fc7d16ff19ceba196e964dcb36a36510912c > Reviewed-on: https://chromium-review.googlesource.com/527793 > Reviewed-by: Guido Urdaneta <guidou@chromium.org> > Commit-Queue: Guido Urdaneta <guidou@chromium.org> > Cr-Commit-Position: refs/heads/master@{#18489} TBR=tterriberry@mozilla.com,mflodman@webrtc.org,magjed@webrtc.org,stefan@webrtc.org,guidou@chromium.org,nisse@webrtc.org,brandtr@webrtc.org,webrtc-reviews@webrtc.org # Not skipping CQ checks because original CL landed > 1 day ago. No-Presubmit: true Bug: webrtc:7632 Change-Id: I0962a704e8a9939d4364ce9069c863c9951654c9 Reviewed-on: https://chromium-review.googlesource.com/530684 Commit-Queue: Magnus Jedvert <magjed@webrtc.org> Reviewed-by: Magnus Jedvert <magjed@webrtc.org> Cr-Commit-Position: refs/heads/master@{#18527}
This commit is contained in:
committed by
Commit Bot
parent
29584c50c3
commit
72dbe2a211
@ -134,7 +134,7 @@ int H264DecoderImpl::AVGetBuffer2(
|
||||
decoder->pool_.CreateBuffer(width, height);
|
||||
|
||||
int y_size = width * height;
|
||||
int uv_size = ((width + 1) / 2) * ((height + 1) / 2);
|
||||
int uv_size = frame_buffer->ChromaWidth() * frame_buffer->ChromaHeight();
|
||||
// DCHECK that we have a continuous buffer as is required.
|
||||
RTC_DCHECK_EQ(frame_buffer->DataU(), frame_buffer->DataY() + y_size);
|
||||
RTC_DCHECK_EQ(frame_buffer->DataV(), frame_buffer->DataU() + uv_size);
|
||||
@ -349,12 +349,11 @@ int32_t H264DecoderImpl::Decode(const EncodedImage& input_image,
|
||||
VideoFrame* video_frame = static_cast<VideoFrame*>(
|
||||
av_buffer_get_opaque(av_frame_->buf[0]));
|
||||
RTC_DCHECK(video_frame);
|
||||
RTC_CHECK_EQ(av_frame_->data[kYPlaneIndex],
|
||||
video_frame->video_frame_buffer()->DataY());
|
||||
RTC_CHECK_EQ(av_frame_->data[kUPlaneIndex],
|
||||
video_frame->video_frame_buffer()->DataU());
|
||||
RTC_CHECK_EQ(av_frame_->data[kVPlaneIndex],
|
||||
video_frame->video_frame_buffer()->DataV());
|
||||
rtc::scoped_refptr<webrtc::I420BufferInterface> i420_buffer =
|
||||
video_frame->video_frame_buffer()->GetI420();
|
||||
RTC_CHECK_EQ(av_frame_->data[kYPlaneIndex], i420_buffer->DataY());
|
||||
RTC_CHECK_EQ(av_frame_->data[kUPlaneIndex], i420_buffer->DataU());
|
||||
RTC_CHECK_EQ(av_frame_->data[kVPlaneIndex], i420_buffer->DataV());
|
||||
video_frame->set_timestamp(input_image._timeStamp);
|
||||
|
||||
rtc::Optional<uint8_t> qp;
|
||||
@ -369,15 +368,15 @@ int32_t H264DecoderImpl::Decode(const EncodedImage& input_image,
|
||||
// The decoded image may be larger than what is supposed to be visible, see
|
||||
// |AVGetBuffer2|'s use of |avcodec_align_dimensions|. This crops the image
|
||||
// without copying the underlying buffer.
|
||||
rtc::scoped_refptr<VideoFrameBuffer> buf = video_frame->video_frame_buffer();
|
||||
if (av_frame_->width != buf->width() || av_frame_->height != buf->height()) {
|
||||
if (av_frame_->width != i420_buffer->width() ||
|
||||
av_frame_->height != i420_buffer->height()) {
|
||||
rtc::scoped_refptr<VideoFrameBuffer> cropped_buf(
|
||||
new rtc::RefCountedObject<WrappedI420Buffer>(
|
||||
av_frame_->width, av_frame_->height,
|
||||
buf->DataY(), buf->StrideY(),
|
||||
buf->DataU(), buf->StrideU(),
|
||||
buf->DataV(), buf->StrideV(),
|
||||
rtc::KeepRefUntilDone(buf)));
|
||||
i420_buffer->DataY(), i420_buffer->StrideY(),
|
||||
i420_buffer->DataU(), i420_buffer->StrideU(),
|
||||
i420_buffer->DataV(), i420_buffer->StrideV(),
|
||||
rtc::KeepRefUntilDone(i420_buffer)));
|
||||
VideoFrame cropped_frame(
|
||||
cropped_buf, video_frame->timestamp(), video_frame->render_time_ms(),
|
||||
video_frame->rotation());
|
||||
|
||||
@ -332,8 +332,8 @@ int32_t H264EncoderImpl::Encode(const VideoFrame& input_frame,
|
||||
// (If every frame is a key frame we get lag/delays.)
|
||||
openh264_encoder_->ForceIntraFrame(true);
|
||||
}
|
||||
rtc::scoped_refptr<const VideoFrameBuffer> frame_buffer =
|
||||
input_frame.video_frame_buffer();
|
||||
rtc::scoped_refptr<const I420BufferInterface> frame_buffer =
|
||||
input_frame.video_frame_buffer()->ToI420();
|
||||
// EncodeFrame input.
|
||||
SSourcePicture picture;
|
||||
memset(&picture, 0, sizeof(SSourcePicture));
|
||||
|
||||
@ -200,9 +200,8 @@ int I420Decoder::Decode(const EncodedImage& inputImage,
|
||||
return WEBRTC_VIDEO_CODEC_ERROR;
|
||||
}
|
||||
// Set decoded image parameters.
|
||||
int half_width = (_width + 1) / 2;
|
||||
rtc::scoped_refptr<webrtc::I420Buffer> frame_buffer =
|
||||
I420Buffer::Create(_width, _height, _width, half_width, half_width);
|
||||
I420Buffer::Create(_width, _height);
|
||||
|
||||
// Converting from raw buffer I420Buffer.
|
||||
int ret = ConvertToI420(VideoType::kI420, buffer, 0, 0, _width, _height, 0,
|
||||
|
||||
@ -335,28 +335,25 @@ int SimulcastEncoderAdapter::Encode(
|
||||
// TODO(perkj): ensure that works going forward, and figure out how this
|
||||
// affects webrtc:5683.
|
||||
if ((dst_width == src_width && dst_height == src_height) ||
|
||||
input_image.video_frame_buffer()->native_handle()) {
|
||||
input_image.video_frame_buffer()->type() ==
|
||||
VideoFrameBuffer::Type::kNative) {
|
||||
int ret = streaminfos_[stream_idx].encoder->Encode(
|
||||
input_image, codec_specific_info, &stream_frame_types);
|
||||
if (ret != WEBRTC_VIDEO_CODEC_OK) {
|
||||
return ret;
|
||||
}
|
||||
} else {
|
||||
// Aligning stride values based on width.
|
||||
rtc::scoped_refptr<I420Buffer> dst_buffer =
|
||||
I420Buffer::Create(dst_width, dst_height, dst_width,
|
||||
(dst_width + 1) / 2, (dst_width + 1) / 2);
|
||||
libyuv::I420Scale(input_image.video_frame_buffer()->DataY(),
|
||||
input_image.video_frame_buffer()->StrideY(),
|
||||
input_image.video_frame_buffer()->DataU(),
|
||||
input_image.video_frame_buffer()->StrideU(),
|
||||
input_image.video_frame_buffer()->DataV(),
|
||||
input_image.video_frame_buffer()->StrideV(),
|
||||
src_width, src_height,
|
||||
dst_buffer->MutableDataY(), dst_buffer->StrideY(),
|
||||
dst_buffer->MutableDataU(), dst_buffer->StrideU(),
|
||||
dst_buffer->MutableDataV(), dst_buffer->StrideV(),
|
||||
dst_width, dst_height,
|
||||
I420Buffer::Create(dst_width, dst_height);
|
||||
rtc::scoped_refptr<I420BufferInterface> src_buffer =
|
||||
input_image.video_frame_buffer()->ToI420();
|
||||
libyuv::I420Scale(src_buffer->DataY(), src_buffer->StrideY(),
|
||||
src_buffer->DataU(), src_buffer->StrideU(),
|
||||
src_buffer->DataV(), src_buffer->StrideV(), src_width,
|
||||
src_height, dst_buffer->MutableDataY(),
|
||||
dst_buffer->StrideY(), dst_buffer->MutableDataU(),
|
||||
dst_buffer->StrideU(), dst_buffer->MutableDataV(),
|
||||
dst_buffer->StrideV(), dst_width, dst_height,
|
||||
libyuv::kFilterBilinear);
|
||||
|
||||
int ret = streaminfos_[stream_idx].encoder->Encode(
|
||||
|
||||
@ -717,14 +717,22 @@ TEST_F(TestSimulcastEncoderAdapterFake,
|
||||
}
|
||||
|
||||
// TODO(nisse): Reuse definition in webrtc/test/fake_texture_handle.h.
|
||||
class FakeNativeHandleBuffer : public NativeHandleBuffer {
|
||||
class FakeNativeBuffer : public VideoFrameBuffer {
|
||||
public:
|
||||
FakeNativeHandleBuffer(void* native_handle, int width, int height)
|
||||
: NativeHandleBuffer(native_handle, width, height) {}
|
||||
rtc::scoped_refptr<VideoFrameBuffer> NativeToI420Buffer() override {
|
||||
FakeNativeBuffer(int width, int height) : width_(width), height_(height) {}
|
||||
|
||||
Type type() const override { return Type::kNative; }
|
||||
int width() const override { return width_; }
|
||||
int height() const override { return height_; }
|
||||
|
||||
rtc::scoped_refptr<I420BufferInterface> ToI420() override {
|
||||
RTC_NOTREACHED();
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
private:
|
||||
const int width_;
|
||||
const int height_;
|
||||
};
|
||||
|
||||
TEST_F(TestSimulcastEncoderAdapterFake,
|
||||
@ -743,7 +751,7 @@ TEST_F(TestSimulcastEncoderAdapterFake,
|
||||
EXPECT_TRUE(adapter_->SupportsNativeHandle());
|
||||
|
||||
rtc::scoped_refptr<VideoFrameBuffer> buffer(
|
||||
new rtc::RefCountedObject<FakeNativeHandleBuffer>(this, 1280, 720));
|
||||
new rtc::RefCountedObject<FakeNativeBuffer>(1280, 720));
|
||||
VideoFrame input_frame(buffer, 100, 1000, kVideoRotation_180);
|
||||
// Expect calls with the given video frame verbatim, since it's a texture
|
||||
// frame and can't otherwise be modified/resized.
|
||||
@ -766,9 +774,8 @@ TEST_F(TestSimulcastEncoderAdapterFake, TestFailureReturnCodesFromEncodeCalls) {
|
||||
.WillOnce(Return(WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE));
|
||||
|
||||
// Send a fake frame and assert the return is software fallback.
|
||||
int half_width = (kDefaultWidth + 1) / 2;
|
||||
rtc::scoped_refptr<I420Buffer> input_buffer = I420Buffer::Create(
|
||||
kDefaultWidth, kDefaultHeight, kDefaultWidth, half_width, half_width);
|
||||
rtc::scoped_refptr<I420Buffer> input_buffer =
|
||||
I420Buffer::Create(kDefaultWidth, kDefaultHeight);
|
||||
input_buffer->InitializeData();
|
||||
VideoFrame input_frame(input_buffer, 0, 0, webrtc::kVideoRotation_0);
|
||||
std::vector<FrameType> frame_types(3, kVideoFrameKey);
|
||||
|
||||
@ -129,14 +129,16 @@ class Vp8TestDecodedImageCallback : public DecodedImageCallback {
|
||||
public:
|
||||
Vp8TestDecodedImageCallback() : decoded_frames_(0) {}
|
||||
int32_t Decoded(VideoFrame& decoded_image) override {
|
||||
rtc::scoped_refptr<I420BufferInterface> i420_buffer =
|
||||
decoded_image.video_frame_buffer()->ToI420();
|
||||
for (int i = 0; i < decoded_image.width(); ++i) {
|
||||
EXPECT_NEAR(kColorY, decoded_image.video_frame_buffer()->DataY()[i], 1);
|
||||
EXPECT_NEAR(kColorY, i420_buffer->DataY()[i], 1);
|
||||
}
|
||||
|
||||
// TODO(mikhal): Verify the difference between U,V and the original.
|
||||
for (int i = 0; i < ((decoded_image.width() + 1) / 2); ++i) {
|
||||
EXPECT_NEAR(kColorU, decoded_image.video_frame_buffer()->DataU()[i], 4);
|
||||
EXPECT_NEAR(kColorV, decoded_image.video_frame_buffer()->DataV()[i], 4);
|
||||
for (int i = 0; i < i420_buffer->ChromaWidth(); ++i) {
|
||||
EXPECT_NEAR(kColorU, i420_buffer->DataU()[i], 4);
|
||||
EXPECT_NEAR(kColorV, i420_buffer->DataV()[i], 4);
|
||||
}
|
||||
decoded_frames_++;
|
||||
return 0;
|
||||
@ -178,21 +180,14 @@ class TestVp8Simulcast : public ::testing::Test {
|
||||
// Fills in an I420Buffer from |plane_colors|.
|
||||
static void CreateImage(const rtc::scoped_refptr<I420Buffer>& buffer,
|
||||
int plane_colors[kNumOfPlanes]) {
|
||||
int width = buffer->width();
|
||||
int height = buffer->height();
|
||||
int chroma_width = (width + 1) / 2;
|
||||
int chroma_height = (height + 1) / 2;
|
||||
SetPlane(buffer->MutableDataY(), plane_colors[0], buffer->width(),
|
||||
buffer->height(), buffer->StrideY());
|
||||
|
||||
SetPlane(buffer->MutableDataY(), plane_colors[0],
|
||||
width, height, buffer->StrideY());
|
||||
SetPlane(buffer->MutableDataU(), plane_colors[1], buffer->ChromaWidth(),
|
||||
buffer->ChromaHeight(), buffer->StrideU());
|
||||
|
||||
SetPlane(buffer->MutableDataU(), plane_colors[1],
|
||||
chroma_width, chroma_height,
|
||||
buffer->StrideU());
|
||||
|
||||
SetPlane(buffer->MutableDataV(), plane_colors[2],
|
||||
chroma_width, chroma_height,
|
||||
buffer->StrideV());
|
||||
SetPlane(buffer->MutableDataV(), plane_colors[2], buffer->ChromaWidth(),
|
||||
buffer->ChromaHeight(), buffer->StrideV());
|
||||
}
|
||||
|
||||
static void DefaultSettings(VideoCodec* settings,
|
||||
@ -260,9 +255,7 @@ class TestVp8Simulcast : public ::testing::Test {
|
||||
SetUpRateAllocator();
|
||||
EXPECT_EQ(0, encoder_->InitEncode(&settings_, 1, 1200));
|
||||
EXPECT_EQ(0, decoder_->InitDecode(&settings_, 1));
|
||||
int half_width = (kDefaultWidth + 1) / 2;
|
||||
input_buffer_ = I420Buffer::Create(kDefaultWidth, kDefaultHeight,
|
||||
kDefaultWidth, half_width, half_width);
|
||||
input_buffer_ = I420Buffer::Create(kDefaultWidth, kDefaultHeight);
|
||||
input_buffer_->InitializeData();
|
||||
input_frame_.reset(
|
||||
new VideoFrame(input_buffer_, 0, 0, webrtc::kVideoRotation_0));
|
||||
@ -513,9 +506,7 @@ class TestVp8Simulcast : public ::testing::Test {
|
||||
settings_.simulcastStream[i].height = settings_.height;
|
||||
}
|
||||
// Setting input image to new resolution.
|
||||
int half_width = (settings_.width + 1) / 2;
|
||||
input_buffer_ = I420Buffer::Create(settings_.width, settings_.height,
|
||||
settings_.width, half_width, half_width);
|
||||
input_buffer_ = I420Buffer::Create(settings_.width, settings_.height);
|
||||
input_buffer_->InitializeData();
|
||||
|
||||
input_frame_.reset(
|
||||
@ -556,9 +547,7 @@ class TestVp8Simulcast : public ::testing::Test {
|
||||
SetRates(settings_.startBitrate, 30);
|
||||
ExpectStreams(kVideoFrameKey, 1);
|
||||
// Resize |input_frame_| to the new resolution.
|
||||
half_width = (settings_.width + 1) / 2;
|
||||
input_buffer_ = I420Buffer::Create(settings_.width, settings_.height,
|
||||
settings_.width, half_width, half_width);
|
||||
input_buffer_ = I420Buffer::Create(settings_.width, settings_.height);
|
||||
input_buffer_->InitializeData();
|
||||
input_frame_.reset(
|
||||
new VideoFrame(input_buffer_, 0, 0, webrtc::kVideoRotation_0));
|
||||
|
||||
@ -668,7 +668,8 @@ int VP8EncoderImpl::Encode(const VideoFrame& frame,
|
||||
if (encoded_complete_callback_ == NULL)
|
||||
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
|
||||
|
||||
rtc::scoped_refptr<VideoFrameBuffer> input_image = frame.video_frame_buffer();
|
||||
rtc::scoped_refptr<I420BufferInterface> input_image =
|
||||
frame.video_frame_buffer()->ToI420();
|
||||
// Since we are extracting raw pointers from |input_image| to
|
||||
// |raw_images_[0]|, the resolution of these frames must match.
|
||||
RTC_DCHECK_EQ(input_image->width(), raw_images_[0].d_w);
|
||||
|
||||
@ -507,17 +507,16 @@ int VP9EncoderImpl::Encode(const VideoFrame& input_image,
|
||||
// doing this.
|
||||
input_image_ = &input_image;
|
||||
|
||||
rtc::scoped_refptr<I420BufferInterface> i420_buffer =
|
||||
input_image.video_frame_buffer()->ToI420();
|
||||
// Image in vpx_image_t format.
|
||||
// Input image is const. VPX's raw image is not defined as const.
|
||||
raw_->planes[VPX_PLANE_Y] =
|
||||
const_cast<uint8_t*>(input_image.video_frame_buffer()->DataY());
|
||||
raw_->planes[VPX_PLANE_U] =
|
||||
const_cast<uint8_t*>(input_image.video_frame_buffer()->DataU());
|
||||
raw_->planes[VPX_PLANE_V] =
|
||||
const_cast<uint8_t*>(input_image.video_frame_buffer()->DataV());
|
||||
raw_->stride[VPX_PLANE_Y] = input_image.video_frame_buffer()->StrideY();
|
||||
raw_->stride[VPX_PLANE_U] = input_image.video_frame_buffer()->StrideU();
|
||||
raw_->stride[VPX_PLANE_V] = input_image.video_frame_buffer()->StrideV();
|
||||
raw_->planes[VPX_PLANE_Y] = const_cast<uint8_t*>(i420_buffer->DataY());
|
||||
raw_->planes[VPX_PLANE_U] = const_cast<uint8_t*>(i420_buffer->DataU());
|
||||
raw_->planes[VPX_PLANE_V] = const_cast<uint8_t*>(i420_buffer->DataV());
|
||||
raw_->stride[VPX_PLANE_Y] = i420_buffer->StrideY();
|
||||
raw_->stride[VPX_PLANE_U] = i420_buffer->StrideU();
|
||||
raw_->stride[VPX_PLANE_V] = i420_buffer->StrideV();
|
||||
|
||||
vpx_enc_frame_flags_t flags = 0;
|
||||
bool send_keyframe = (frame_type == kVideoFrameKey);
|
||||
|
||||
Reference in New Issue
Block a user