Add alpha channel to VideoFrameBuffer containers
- Add alpha accessors to PlanarYuvBuffer interface, null by defualt. - Add WrapI420ABuffer() that creates a container which implements these accessors. - Show the use via StereoDecoderAdapter. This CL is the step 2 for adding alpha channel support over the wire in webrtc. See https://webrtc-review.googlesource.com/c/src/+/7800 for the experimental CL that gives an idea about how it will come together. Design Doc: https://goo.gl/sFeSUT Bug: webrtc:7671 Change-Id: Id5691cde00088ec811b63d89080d33ad2d6e3939 Reviewed-on: https://webrtc-review.googlesource.com/21130 Reviewed-by: Magnus Jedvert <magjed@webrtc.org> Commit-Queue: Emircan Uysaler <emircan@webrtc.org> Cr-Commit-Position: refs/heads/master@{#20635}
This commit is contained in:
committed by
Commit Bot
parent
66cebbda35
commit
574eaa4cda
@ -11,6 +11,7 @@
|
||||
#include "modules/video_coding/codecs/stereo/include/stereo_decoder_adapter.h"
|
||||
|
||||
#include "api/video/i420_buffer.h"
|
||||
#include "api/video/video_frame_buffer.h"
|
||||
#include "api/video_codecs/sdp_video_format.h"
|
||||
#include "common_video/include/video_frame.h"
|
||||
#include "common_video/include/video_frame_buffer.h"
|
||||
@ -18,6 +19,11 @@
|
||||
#include "rtc_base/keep_ref_until_done.h"
|
||||
#include "rtc_base/logging.h"
|
||||
|
||||
namespace {
|
||||
void KeepBufferRefs(rtc::scoped_refptr<webrtc::VideoFrameBuffer>,
|
||||
rtc::scoped_refptr<webrtc::VideoFrameBuffer>) {}
|
||||
} // anonymous namespace
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
class StereoDecoderAdapter::AdapterDecodedImageCallback
|
||||
@ -173,11 +179,22 @@ void StereoDecoderAdapter::MergeAlphaImages(
|
||||
VideoFrame* alpha_decodedImage,
|
||||
const rtc::Optional<int32_t>& alpha_decode_time_ms,
|
||||
const rtc::Optional<uint8_t>& alpha_qp) {
|
||||
// TODO(emircan): Merge the output and put in a VideoFrame container that can
|
||||
// transport I420A.
|
||||
decoded_complete_callback_->Decoded(*decodedImage, decode_time_ms, qp);
|
||||
decoded_complete_callback_->Decoded(*alpha_decodedImage, alpha_decode_time_ms,
|
||||
alpha_qp);
|
||||
rtc::scoped_refptr<webrtc::I420BufferInterface> yuv_buffer =
|
||||
decodedImage->video_frame_buffer()->ToI420();
|
||||
rtc::scoped_refptr<webrtc::I420BufferInterface> alpha_buffer =
|
||||
alpha_decodedImage->video_frame_buffer()->ToI420();
|
||||
RTC_DCHECK_EQ(yuv_buffer->width(), alpha_buffer->width());
|
||||
RTC_DCHECK_EQ(yuv_buffer->height(), alpha_buffer->height());
|
||||
rtc::scoped_refptr<I420ABufferInterface> merged_buffer = WrapI420ABuffer(
|
||||
yuv_buffer->width(), yuv_buffer->height(), yuv_buffer->DataY(),
|
||||
yuv_buffer->StrideY(), yuv_buffer->DataU(), yuv_buffer->StrideU(),
|
||||
yuv_buffer->DataV(), yuv_buffer->StrideV(), alpha_buffer->DataY(),
|
||||
alpha_buffer->StrideY(),
|
||||
rtc::Bind(&KeepBufferRefs, yuv_buffer, alpha_buffer));
|
||||
|
||||
VideoFrame merged_image(merged_buffer, decodedImage->timestamp(),
|
||||
0 /* render_time_ms */, decodedImage->rotation());
|
||||
decoded_complete_callback_->Decoded(merged_image, decode_time_ms, qp);
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
@ -83,28 +83,30 @@ int StereoEncoderAdapter::Encode(const VideoFrame& input_image,
|
||||
if (!encoded_complete_callback_) {
|
||||
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
|
||||
}
|
||||
|
||||
// TODO(emircan): Extract alpha and create an alpha frame with dummy planes.
|
||||
// Since we don't have a way of transporting alpha yet, put a dummy output for
|
||||
// alpha consisting of YXX.
|
||||
|
||||
// Encode AXX
|
||||
rtc::scoped_refptr<I420BufferInterface> yuva_buffer =
|
||||
input_image.video_frame_buffer()->ToI420();
|
||||
rtc::scoped_refptr<WrappedI420Buffer> alpha_buffer(
|
||||
new rtc::RefCountedObject<webrtc::WrappedI420Buffer>(
|
||||
input_image.width(), input_image.height(), yuva_buffer->DataY(),
|
||||
yuva_buffer->StrideY(), stereo_dummy_planes_.data(),
|
||||
yuva_buffer->StrideU(), stereo_dummy_planes_.data(),
|
||||
yuva_buffer->StrideV(),
|
||||
rtc::KeepRefUntilDone(input_image.video_frame_buffer())));
|
||||
VideoFrame alpha_image(alpha_buffer, input_image.timestamp(),
|
||||
input_image.render_time_ms(), input_image.rotation());
|
||||
encoders_[kAXXStream]->Encode(alpha_image, codec_specific_info, frame_types);
|
||||
|
||||
// Encode YUV
|
||||
int rv = encoders_[kYUVStream]->Encode(input_image, codec_specific_info,
|
||||
frame_types);
|
||||
if (rv)
|
||||
return rv;
|
||||
|
||||
const bool has_alpha = input_image.video_frame_buffer()->type() ==
|
||||
VideoFrameBuffer::Type::kI420A;
|
||||
if (!has_alpha)
|
||||
return rv;
|
||||
|
||||
// Encode AXX
|
||||
const I420ABufferInterface* yuva_buffer =
|
||||
input_image.video_frame_buffer()->GetI420A();
|
||||
rtc::scoped_refptr<I420BufferInterface> alpha_buffer =
|
||||
WrapI420Buffer(input_image.width(), input_image.height(),
|
||||
yuva_buffer->DataA(), yuva_buffer->StrideA(),
|
||||
stereo_dummy_planes_.data(), yuva_buffer->StrideU(),
|
||||
stereo_dummy_planes_.data(), yuva_buffer->StrideV(),
|
||||
rtc::KeepRefUntilDone(input_image.video_frame_buffer()));
|
||||
VideoFrame alpha_image(alpha_buffer, input_image.timestamp(),
|
||||
input_image.render_time_ms(), input_image.rotation());
|
||||
rv = encoders_[kAXXStream]->Encode(alpha_image, codec_specific_info,
|
||||
frame_types);
|
||||
return rv;
|
||||
}
|
||||
|
||||
@ -158,6 +160,9 @@ EncodedImageCallback::Result StereoEncoderAdapter::OnEncodedImage(
|
||||
const EncodedImage& encodedImage,
|
||||
const CodecSpecificInfo* codecSpecificInfo,
|
||||
const RTPFragmentationHeader* fragmentation) {
|
||||
if (stream_idx == kAXXStream)
|
||||
return EncodedImageCallback::Result(EncodedImageCallback::Result::OK);
|
||||
|
||||
// TODO(emircan): Fill |codec_specific_info| with stereo parameters.
|
||||
encoded_complete_callback_->OnEncodedImage(encodedImage, codecSpecificInfo,
|
||||
fragmentation);
|
||||
|
||||
@ -10,11 +10,14 @@
|
||||
|
||||
#include "api/test/mock_video_decoder_factory.h"
|
||||
#include "api/test/mock_video_encoder_factory.h"
|
||||
#include "common_video/include/video_frame_buffer.h"
|
||||
#include "common_video/libyuv/include/webrtc_libyuv.h"
|
||||
#include "modules/video_coding/codecs/stereo/include/stereo_decoder_adapter.h"
|
||||
#include "modules/video_coding/codecs/stereo/include/stereo_encoder_adapter.h"
|
||||
#include "modules/video_coding/codecs/test/video_codec_test.h"
|
||||
#include "modules/video_coding/codecs/vp9/include/vp9.h"
|
||||
#include "rtc_base/keep_ref_until_done.h"
|
||||
#include "rtc_base/ptr_util.h"
|
||||
|
||||
using testing::_;
|
||||
using testing::Return;
|
||||
@ -44,6 +47,18 @@ class TestStereoAdapter : public VideoCodecTest {
|
||||
return codec_settings;
|
||||
}
|
||||
|
||||
std::unique_ptr<VideoFrame> CreateI420AInputFrame() {
|
||||
rtc::scoped_refptr<webrtc::I420BufferInterface> yuv_buffer =
|
||||
input_frame_->video_frame_buffer()->ToI420();
|
||||
rtc::scoped_refptr<I420ABufferInterface> yuva_buffer = WrapI420ABuffer(
|
||||
yuv_buffer->width(), yuv_buffer->height(), yuv_buffer->DataY(),
|
||||
yuv_buffer->StrideY(), yuv_buffer->DataU(), yuv_buffer->StrideU(),
|
||||
yuv_buffer->DataV(), yuv_buffer->StrideV(), yuv_buffer->DataY(),
|
||||
yuv_buffer->StrideY(), rtc::KeepRefUntilDone(yuv_buffer));
|
||||
return rtc::WrapUnique<VideoFrame>(
|
||||
new VideoFrame(yuva_buffer, kVideoRotation_0, 0));
|
||||
}
|
||||
|
||||
private:
|
||||
void SetUp() override {
|
||||
EXPECT_CALL(*decoder_factory_, Die());
|
||||
@ -93,4 +108,20 @@ TEST_F(TestStereoAdapter, EncodeDecodeI420Frame) {
|
||||
EXPECT_GT(I420PSNR(input_frame_.get(), decoded_frame.get()), 36);
|
||||
}
|
||||
|
||||
TEST_F(TestStereoAdapter, EncodeDecodeI420AFrame) {
|
||||
std::unique_ptr<VideoFrame> yuva_frame = CreateI420AInputFrame();
|
||||
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
|
||||
encoder_->Encode(*yuva_frame, nullptr, nullptr));
|
||||
EncodedImage encoded_frame;
|
||||
CodecSpecificInfo codec_specific_info;
|
||||
ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
|
||||
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
|
||||
decoder_->Decode(encoded_frame, false, nullptr));
|
||||
std::unique_ptr<VideoFrame> decoded_frame;
|
||||
rtc::Optional<uint8_t> decoded_qp;
|
||||
ASSERT_TRUE(WaitForDecodedFrame(&decoded_frame, &decoded_qp));
|
||||
ASSERT_TRUE(decoded_frame);
|
||||
EXPECT_GT(I420PSNR(yuva_frame.get(), decoded_frame.get()), 36);
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
Reference in New Issue
Block a user