Add Profile 2 configuration to VP9 Encoder and Decoder
Bug: webrtc:9376 Change-Id: I4f627fb2b6c146a90cfcaa815da459b09dc00003 Reviewed-on: https://webrtc-review.googlesource.com/81980 Commit-Queue: Emircan Uysaler <emircan@webrtc.org> Reviewed-by: Niklas Enbom <niklas.enbom@webrtc.org> Reviewed-by: Erik Språng <sprang@webrtc.org> Reviewed-by: Jerome Jiang <jianj@google.com> Cr-Commit-Position: refs/heads/master@{#23917}
This commit is contained in:

committed by
Commit Bot

parent
13f4c896d5
commit
fc9c4e88b5
@ -57,6 +57,7 @@ class VideoQualityTestFixtureInterface {
|
|||||||
bool automatic_scaling;
|
bool automatic_scaling;
|
||||||
std::string clip_name; // "Generator" to generate frames instead.
|
std::string clip_name; // "Generator" to generate frames instead.
|
||||||
size_t capture_device_index;
|
size_t capture_device_index;
|
||||||
|
SdpVideoFormat::Parameters sdp_params;
|
||||||
} video[2];
|
} video[2];
|
||||||
struct Audio {
|
struct Audio {
|
||||||
bool enabled;
|
bool enabled;
|
||||||
|
@ -44,7 +44,7 @@ std::unique_ptr<VideoEncoder> InternalEncoderFactory::CreateVideoEncoder(
|
|||||||
if (cricket::CodecNamesEq(format.name, cricket::kVp8CodecName))
|
if (cricket::CodecNamesEq(format.name, cricket::kVp8CodecName))
|
||||||
return VP8Encoder::Create();
|
return VP8Encoder::Create();
|
||||||
if (cricket::CodecNamesEq(format.name, cricket::kVp9CodecName))
|
if (cricket::CodecNamesEq(format.name, cricket::kVp9CodecName))
|
||||||
return VP9Encoder::Create();
|
return VP9Encoder::Create(cricket::VideoCodec(format));
|
||||||
if (cricket::CodecNamesEq(format.name, cricket::kH264CodecName))
|
if (cricket::CodecNamesEq(format.name, cricket::kH264CodecName))
|
||||||
return H264Encoder::Create(cricket::VideoCodec(format));
|
return H264Encoder::Create(cricket::VideoCodec(format));
|
||||||
RTC_LOG(LS_ERROR) << "Trying to created encoder of unsupported format "
|
RTC_LOG(LS_ERROR) << "Trying to created encoder of unsupported format "
|
||||||
|
@ -479,6 +479,7 @@ rtc_static_library("webrtc_vp9") {
|
|||||||
":webrtc_vp9_helpers",
|
":webrtc_vp9_helpers",
|
||||||
"..:module_api",
|
"..:module_api",
|
||||||
"../..:webrtc_common",
|
"../..:webrtc_common",
|
||||||
|
"../../api/video:video_frame_i010",
|
||||||
"../../api/video_codecs:video_codecs_api",
|
"../../api/video_codecs:video_codecs_api",
|
||||||
"../../common_video",
|
"../../common_video",
|
||||||
"../../media:rtc_media_base",
|
"../../media:rtc_media_base",
|
||||||
@ -724,6 +725,7 @@ if (rtc_include_tests) {
|
|||||||
"../../media:rtc_h264_profile_id",
|
"../../media:rtc_h264_profile_id",
|
||||||
"../../media:rtc_internal_video_codecs",
|
"../../media:rtc_internal_video_codecs",
|
||||||
"../../media:rtc_media_base",
|
"../../media:rtc_media_base",
|
||||||
|
"../../media:rtc_vp9_profile",
|
||||||
"../../rtc_base:rtc_base",
|
"../../rtc_base:rtc_base",
|
||||||
"../../test:fileutils",
|
"../../test:fileutils",
|
||||||
"../../test:test_common",
|
"../../test:test_common",
|
||||||
@ -732,6 +734,7 @@ if (rtc_include_tests) {
|
|||||||
"../rtp_rtcp:rtp_rtcp_format",
|
"../rtp_rtcp:rtp_rtcp_format",
|
||||||
"//third_party/abseil-cpp/absl/memory",
|
"//third_party/abseil-cpp/absl/memory",
|
||||||
"//third_party/abseil-cpp/absl/types:optional",
|
"//third_party/abseil-cpp/absl/types:optional",
|
||||||
|
"//third_party/libyuv",
|
||||||
]
|
]
|
||||||
|
|
||||||
data = video_coding_modules_tests_resources
|
data = video_coding_modules_tests_resources
|
||||||
|
@ -69,8 +69,7 @@ void VideoCodecUnitTest::SetUp() {
|
|||||||
|
|
||||||
input_frame_generator_ = test::FrameGenerator::CreateSquareGenerator(
|
input_frame_generator_ = test::FrameGenerator::CreateSquareGenerator(
|
||||||
codec_settings_.width, codec_settings_.height,
|
codec_settings_.width, codec_settings_.height,
|
||||||
absl::optional<test::FrameGenerator::OutputType>(),
|
test::FrameGenerator::OutputType::I420, absl::optional<int>());
|
||||||
absl::optional<int>());
|
|
||||||
|
|
||||||
encoder_ = CreateEncoder();
|
encoder_ = CreateEncoder();
|
||||||
decoder_ = CreateDecoder();
|
decoder_ = CreateDecoder();
|
||||||
|
@ -105,6 +105,7 @@ class VideoCodecUnitTest : public ::testing::Test {
|
|||||||
|
|
||||||
std::unique_ptr<VideoEncoder> encoder_;
|
std::unique_ptr<VideoEncoder> encoder_;
|
||||||
std::unique_ptr<VideoDecoder> decoder_;
|
std::unique_ptr<VideoDecoder> decoder_;
|
||||||
|
std::unique_ptr<test::FrameGenerator> input_frame_generator_;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
FakeEncodeCompleteCallback encode_complete_callback_;
|
FakeEncodeCompleteCallback encode_complete_callback_;
|
||||||
@ -124,7 +125,6 @@ class VideoCodecUnitTest : public ::testing::Test {
|
|||||||
RTC_GUARDED_BY(decoded_frame_section_);
|
RTC_GUARDED_BY(decoded_frame_section_);
|
||||||
absl::optional<uint8_t> decoded_qp_ RTC_GUARDED_BY(decoded_frame_section_);
|
absl::optional<uint8_t> decoded_qp_ RTC_GUARDED_BY(decoded_frame_section_);
|
||||||
|
|
||||||
std::unique_ptr<test::FrameGenerator> input_frame_generator_;
|
|
||||||
uint32_t last_input_frame_timestamp_;
|
uint32_t last_input_frame_timestamp_;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -10,11 +10,13 @@
|
|||||||
|
|
||||||
#include "api/video/i420_buffer.h"
|
#include "api/video/i420_buffer.h"
|
||||||
#include "common_video/libyuv/include/webrtc_libyuv.h"
|
#include "common_video/libyuv/include/webrtc_libyuv.h"
|
||||||
|
#include "media/base/vp9_profile.h"
|
||||||
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
|
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
|
||||||
#include "modules/video_coding/codecs/test/video_codec_unittest.h"
|
#include "modules/video_coding/codecs/test/video_codec_unittest.h"
|
||||||
#include "modules/video_coding/codecs/vp9/include/vp9.h"
|
#include "modules/video_coding/codecs/vp9/include/vp9.h"
|
||||||
#include "modules/video_coding/codecs/vp9/svc_config.h"
|
#include "modules/video_coding/codecs/vp9/svc_config.h"
|
||||||
#include "test/video_codec_settings.h"
|
#include "test/video_codec_settings.h"
|
||||||
|
#include "third_party/libyuv/include/libyuv/convert.h"
|
||||||
|
|
||||||
namespace webrtc {
|
namespace webrtc {
|
||||||
|
|
||||||
@ -460,4 +462,54 @@ TEST_F(TestVp9ImplFrameDropping, PreEncodeFrameDropping) {
|
|||||||
max_abs_framerate_error_fps);
|
max_abs_framerate_error_fps);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
class TestVp9ImplProfile2 : public TestVp9Impl {
|
||||||
|
protected:
|
||||||
|
void SetUp() override {
|
||||||
|
TestVp9Impl::SetUp();
|
||||||
|
input_frame_generator_ = test::FrameGenerator::CreateSquareGenerator(
|
||||||
|
codec_settings_.width, codec_settings_.height,
|
||||||
|
test::FrameGenerator::OutputType::I010, absl::optional<int>());
|
||||||
|
}
|
||||||
|
|
||||||
|
std::unique_ptr<VideoEncoder> CreateEncoder() override {
|
||||||
|
cricket::VideoCodec profile2_codec;
|
||||||
|
profile2_codec.SetParam(kVP9FmtpProfileId,
|
||||||
|
VP9ProfileToString(VP9Profile::kProfile2));
|
||||||
|
return VP9Encoder::Create(profile2_codec);
|
||||||
|
}
|
||||||
|
|
||||||
|
std::unique_ptr<VideoDecoder> CreateDecoder() override {
|
||||||
|
return VP9Decoder::Create();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Disabled until https://bugs.chromium.org/p/webm/issues/detail?id=1544 is
|
||||||
|
// solved.
|
||||||
|
#if defined(WEBRTC_ANDROID)
|
||||||
|
#define MAYBE_EncodeDecode DISABLED_EncodeDecode
|
||||||
|
#else
|
||||||
|
#define MAYBE_EncodeDecode EncodeDecode
|
||||||
|
#endif
|
||||||
|
TEST_F(TestVp9ImplProfile2, MAYBE_EncodeDecode) {
|
||||||
|
VideoFrame* input_frame = NextInputFrame();
|
||||||
|
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
|
||||||
|
encoder_->Encode(*input_frame, nullptr, nullptr));
|
||||||
|
EncodedImage encoded_frame;
|
||||||
|
CodecSpecificInfo codec_specific_info;
|
||||||
|
ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
|
||||||
|
// First frame should be a key frame.
|
||||||
|
encoded_frame._frameType = kVideoFrameKey;
|
||||||
|
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
|
||||||
|
decoder_->Decode(encoded_frame, false, nullptr, 0));
|
||||||
|
std::unique_ptr<VideoFrame> decoded_frame;
|
||||||
|
absl::optional<uint8_t> decoded_qp;
|
||||||
|
ASSERT_TRUE(WaitForDecodedFrame(&decoded_frame, &decoded_qp));
|
||||||
|
ASSERT_TRUE(decoded_frame);
|
||||||
|
|
||||||
|
// TODO(emircan): Add PSNR for different color depths.
|
||||||
|
EXPECT_GT(I420PSNR(*input_frame->video_frame_buffer()->ToI420(),
|
||||||
|
*decoded_frame->video_frame_buffer()->ToI420()),
|
||||||
|
31);
|
||||||
|
}
|
||||||
|
|
||||||
} // namespace webrtc
|
} // namespace webrtc
|
||||||
|
@ -21,6 +21,7 @@
|
|||||||
#include "vpx/vpx_encoder.h"
|
#include "vpx/vpx_encoder.h"
|
||||||
|
|
||||||
#include "absl/memory/memory.h"
|
#include "absl/memory/memory.h"
|
||||||
|
#include "api/video/i010_buffer.h"
|
||||||
#include "common_video/include/video_frame_buffer.h"
|
#include "common_video/include/video_frame_buffer.h"
|
||||||
#include "common_video/libyuv/include/webrtc_libyuv.h"
|
#include "common_video/libyuv/include/webrtc_libyuv.h"
|
||||||
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
|
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
|
||||||
@ -97,7 +98,6 @@ VP9EncoderImpl::VP9EncoderImpl(const cricket::VideoCodec& codec)
|
|||||||
is_flexible_mode_(false) {
|
is_flexible_mode_(false) {
|
||||||
memset(&codec_, 0, sizeof(codec_));
|
memset(&codec_, 0, sizeof(codec_));
|
||||||
memset(&svc_params_, 0, sizeof(vpx_svc_extra_cfg_t));
|
memset(&svc_params_, 0, sizeof(vpx_svc_extra_cfg_t));
|
||||||
RTC_DCHECK(VP9Profile::kProfile0 == profile_);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
VP9EncoderImpl::~VP9EncoderImpl() {
|
VP9EncoderImpl::~VP9EncoderImpl() {
|
||||||
@ -313,15 +313,37 @@ int VP9EncoderImpl::InitEncode(const VideoCodec* inst,
|
|||||||
CalcBufferSize(VideoType::kI420, codec_.width, codec_.height);
|
CalcBufferSize(VideoType::kI420, codec_.width, codec_.height);
|
||||||
encoded_image_._buffer = new uint8_t[encoded_image_._size];
|
encoded_image_._buffer = new uint8_t[encoded_image_._size];
|
||||||
encoded_image_._completeFrame = true;
|
encoded_image_._completeFrame = true;
|
||||||
// Creating a wrapper to the image - setting image data to nullptr. Actual
|
|
||||||
// pointer will be set in encode. Setting align to 1, as it is meaningless
|
|
||||||
// (actual memory is not allocated).
|
|
||||||
raw_ = vpx_img_wrap(nullptr, VPX_IMG_FMT_I420, codec_.width, codec_.height, 1,
|
|
||||||
nullptr);
|
|
||||||
// Populate encoder configuration with default values.
|
// Populate encoder configuration with default values.
|
||||||
if (vpx_codec_enc_config_default(vpx_codec_vp9_cx(), config_, 0)) {
|
if (vpx_codec_enc_config_default(vpx_codec_vp9_cx(), config_, 0)) {
|
||||||
return WEBRTC_VIDEO_CODEC_ERROR;
|
return WEBRTC_VIDEO_CODEC_ERROR;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
vpx_img_fmt img_fmt = VPX_IMG_FMT_NONE;
|
||||||
|
unsigned int bits_for_storage = 8;
|
||||||
|
switch (profile_) {
|
||||||
|
case VP9Profile::kProfile0:
|
||||||
|
img_fmt = VPX_IMG_FMT_I420;
|
||||||
|
bits_for_storage = 8;
|
||||||
|
config_->g_bit_depth = VPX_BITS_8;
|
||||||
|
config_->g_profile = 0;
|
||||||
|
config_->g_input_bit_depth = 8;
|
||||||
|
break;
|
||||||
|
case VP9Profile::kProfile2:
|
||||||
|
img_fmt = VPX_IMG_FMT_I42016;
|
||||||
|
bits_for_storage = 16;
|
||||||
|
config_->g_bit_depth = VPX_BITS_10;
|
||||||
|
config_->g_profile = 2;
|
||||||
|
config_->g_input_bit_depth = 10;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Creating a wrapper to the image - setting image data to nullptr. Actual
|
||||||
|
// pointer will be set in encode. Setting align to 1, as it is meaningless
|
||||||
|
// (actual memory is not allocated).
|
||||||
|
raw_ =
|
||||||
|
vpx_img_wrap(nullptr, img_fmt, codec_.width, codec_.height, 1, nullptr);
|
||||||
|
raw_->bit_depth = bits_for_storage;
|
||||||
|
|
||||||
config_->g_w = codec_.width;
|
config_->g_w = codec_.width;
|
||||||
config_->g_h = codec_.height;
|
config_->g_h = codec_.height;
|
||||||
config_->rc_target_bitrate = inst->startBitrate; // in kbit/s
|
config_->rc_target_bitrate = inst->startBitrate; // in kbit/s
|
||||||
@ -478,7 +500,11 @@ int VP9EncoderImpl::InitAndSetControlSettings(const VideoCodec* inst) {
|
|||||||
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
|
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (vpx_codec_enc_init(encoder_, vpx_codec_vp9_cx(), config_, 0)) {
|
const vpx_codec_err_t rv = vpx_codec_enc_init(
|
||||||
|
encoder_, vpx_codec_vp9_cx(), config_,
|
||||||
|
config_->g_bit_depth == VPX_BITS_8 ? 0 : VPX_CODEC_USE_HIGHBITDEPTH);
|
||||||
|
if (rv != VPX_CODEC_OK) {
|
||||||
|
RTC_LOG(LS_ERROR) << "Init error: " << vpx_codec_err_to_string(rv);
|
||||||
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
|
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
|
||||||
}
|
}
|
||||||
vpx_codec_control(encoder_, VP8E_SET_CPUUSED, cpu_speed_);
|
vpx_codec_control(encoder_, VP8E_SET_CPUUSED, cpu_speed_);
|
||||||
@ -603,16 +629,47 @@ int VP9EncoderImpl::Encode(const VideoFrame& input_image,
|
|||||||
// doing this.
|
// doing this.
|
||||||
input_image_ = &input_image;
|
input_image_ = &input_image;
|
||||||
|
|
||||||
rtc::scoped_refptr<I420BufferInterface> i420_buffer =
|
// Keep reference to buffer until encode completes.
|
||||||
input_image.video_frame_buffer()->ToI420();
|
rtc::scoped_refptr<I420BufferInterface> i420_buffer;
|
||||||
// Image in vpx_image_t format.
|
rtc::scoped_refptr<I010BufferInterface> i010_buffer;
|
||||||
// Input image is const. VPX's raw image is not defined as const.
|
switch (profile_) {
|
||||||
raw_->planes[VPX_PLANE_Y] = const_cast<uint8_t*>(i420_buffer->DataY());
|
case VP9Profile::kProfile0: {
|
||||||
raw_->planes[VPX_PLANE_U] = const_cast<uint8_t*>(i420_buffer->DataU());
|
i420_buffer = input_image.video_frame_buffer()->ToI420();
|
||||||
raw_->planes[VPX_PLANE_V] = const_cast<uint8_t*>(i420_buffer->DataV());
|
// Image in vpx_image_t format.
|
||||||
raw_->stride[VPX_PLANE_Y] = i420_buffer->StrideY();
|
// Input image is const. VPX's raw image is not defined as const.
|
||||||
raw_->stride[VPX_PLANE_U] = i420_buffer->StrideU();
|
raw_->planes[VPX_PLANE_Y] = const_cast<uint8_t*>(i420_buffer->DataY());
|
||||||
raw_->stride[VPX_PLANE_V] = i420_buffer->StrideV();
|
raw_->planes[VPX_PLANE_U] = const_cast<uint8_t*>(i420_buffer->DataU());
|
||||||
|
raw_->planes[VPX_PLANE_V] = const_cast<uint8_t*>(i420_buffer->DataV());
|
||||||
|
raw_->stride[VPX_PLANE_Y] = i420_buffer->StrideY();
|
||||||
|
raw_->stride[VPX_PLANE_U] = i420_buffer->StrideU();
|
||||||
|
raw_->stride[VPX_PLANE_V] = i420_buffer->StrideV();
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case VP9Profile::kProfile2: {
|
||||||
|
// We can inject kI010 frames directly for encode. All other formats
|
||||||
|
// should be converted to it.
|
||||||
|
switch (input_image.video_frame_buffer()->type()) {
|
||||||
|
case VideoFrameBuffer::Type::kI010: {
|
||||||
|
i010_buffer = input_image.video_frame_buffer()->GetI010();
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
default: {
|
||||||
|
i010_buffer =
|
||||||
|
I010Buffer::Copy(*input_image.video_frame_buffer()->ToI420());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
raw_->planes[VPX_PLANE_Y] = const_cast<uint8_t*>(
|
||||||
|
reinterpret_cast<const uint8_t*>(i010_buffer->DataY()));
|
||||||
|
raw_->planes[VPX_PLANE_U] = const_cast<uint8_t*>(
|
||||||
|
reinterpret_cast<const uint8_t*>(i010_buffer->DataU()));
|
||||||
|
raw_->planes[VPX_PLANE_V] = const_cast<uint8_t*>(
|
||||||
|
reinterpret_cast<const uint8_t*>(i010_buffer->DataV()));
|
||||||
|
raw_->stride[VPX_PLANE_Y] = i010_buffer->StrideY() * 2;
|
||||||
|
raw_->stride[VPX_PLANE_U] = i010_buffer->StrideU() * 2;
|
||||||
|
raw_->stride[VPX_PLANE_V] = i010_buffer->StrideV() * 2;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
vpx_enc_frame_flags_t flags = 0;
|
vpx_enc_frame_flags_t flags = 0;
|
||||||
if (force_key_frame_) {
|
if (force_key_frame_) {
|
||||||
@ -622,8 +679,13 @@ int VP9EncoderImpl::Encode(const VideoFrame& input_image,
|
|||||||
RTC_CHECK_GT(codec_.maxFramerate, 0);
|
RTC_CHECK_GT(codec_.maxFramerate, 0);
|
||||||
uint32_t duration =
|
uint32_t duration =
|
||||||
90000 / target_framerate_fps_.value_or(codec_.maxFramerate);
|
90000 / target_framerate_fps_.value_or(codec_.maxFramerate);
|
||||||
if (vpx_codec_encode(encoder_, raw_, timestamp_, duration, flags,
|
const vpx_codec_err_t rv = vpx_codec_encode(encoder_, raw_, timestamp_,
|
||||||
VPX_DL_REALTIME)) {
|
duration, flags, VPX_DL_REALTIME);
|
||||||
|
if (rv != VPX_CODEC_OK) {
|
||||||
|
RTC_LOG(LS_ERROR) << "Encoding error: " << vpx_codec_err_to_string(rv)
|
||||||
|
<< "\n"
|
||||||
|
<< "Details: " << vpx_codec_error(encoder_) << "\n"
|
||||||
|
<< vpx_codec_error_detail(encoder_);
|
||||||
return WEBRTC_VIDEO_CODEC_ERROR;
|
return WEBRTC_VIDEO_CODEC_ERROR;
|
||||||
}
|
}
|
||||||
timestamp_ += duration;
|
timestamp_ += duration;
|
||||||
@ -1084,10 +1146,13 @@ int VP9DecoderImpl::ReturnFrame(const vpx_image_t* img,
|
|||||||
// vpx_codec_decode calls or vpx_codec_destroy).
|
// vpx_codec_decode calls or vpx_codec_destroy).
|
||||||
Vp9FrameBufferPool::Vp9FrameBuffer* img_buffer =
|
Vp9FrameBufferPool::Vp9FrameBuffer* img_buffer =
|
||||||
static_cast<Vp9FrameBufferPool::Vp9FrameBuffer*>(img->fb_priv);
|
static_cast<Vp9FrameBufferPool::Vp9FrameBuffer*>(img->fb_priv);
|
||||||
|
|
||||||
// The buffer can be used directly by the VideoFrame (without copy) by
|
// The buffer can be used directly by the VideoFrame (without copy) by
|
||||||
// using a WrappedI420Buffer.
|
// using a Wrapped*Buffer.
|
||||||
rtc::scoped_refptr<WrappedI420Buffer> img_wrapped_buffer(
|
rtc::scoped_refptr<VideoFrameBuffer> img_wrapped_buffer;
|
||||||
new rtc::RefCountedObject<webrtc::WrappedI420Buffer>(
|
switch (img->bit_depth) {
|
||||||
|
case 8:
|
||||||
|
img_wrapped_buffer = WrapI420Buffer(
|
||||||
img->d_w, img->d_h, img->planes[VPX_PLANE_Y],
|
img->d_w, img->d_h, img->planes[VPX_PLANE_Y],
|
||||||
img->stride[VPX_PLANE_Y], img->planes[VPX_PLANE_U],
|
img->stride[VPX_PLANE_Y], img->planes[VPX_PLANE_U],
|
||||||
img->stride[VPX_PLANE_U], img->planes[VPX_PLANE_V],
|
img->stride[VPX_PLANE_U], img->planes[VPX_PLANE_V],
|
||||||
@ -1095,8 +1160,22 @@ int VP9DecoderImpl::ReturnFrame(const vpx_image_t* img,
|
|||||||
// WrappedI420Buffer's mechanism for allowing the release of its frame
|
// WrappedI420Buffer's mechanism for allowing the release of its frame
|
||||||
// buffer is through a callback function. This is where we should
|
// buffer is through a callback function. This is where we should
|
||||||
// release |img_buffer|.
|
// release |img_buffer|.
|
||||||
rtc::KeepRefUntilDone(img_buffer)));
|
rtc::KeepRefUntilDone(img_buffer));
|
||||||
|
break;
|
||||||
|
case 10:
|
||||||
|
img_wrapped_buffer = WrapI010Buffer(
|
||||||
|
img->d_w, img->d_h,
|
||||||
|
reinterpret_cast<const uint16_t*>(img->planes[VPX_PLANE_Y]),
|
||||||
|
img->stride[VPX_PLANE_Y] / 2,
|
||||||
|
reinterpret_cast<const uint16_t*>(img->planes[VPX_PLANE_U]),
|
||||||
|
img->stride[VPX_PLANE_U] / 2,
|
||||||
|
reinterpret_cast<const uint16_t*>(img->planes[VPX_PLANE_V]),
|
||||||
|
img->stride[VPX_PLANE_V] / 2, rtc::KeepRefUntilDone(img_buffer));
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
RTC_NOTREACHED();
|
||||||
|
return WEBRTC_VIDEO_CODEC_NO_OUTPUT;
|
||||||
|
}
|
||||||
VideoFrame decoded_image(img_wrapped_buffer, timestamp,
|
VideoFrame decoded_image(img_wrapped_buffer, timestamp,
|
||||||
0 /* render_time_ms */, webrtc::kVideoRotation_0);
|
0 /* render_time_ms */, webrtc::kVideoRotation_0);
|
||||||
decoded_image.set_ntp_time_ms(ntp_time_ms);
|
decoded_image.set_ntp_time_ms(ntp_time_ms);
|
||||||
|
@ -64,6 +64,7 @@ rtc_source_set("video_test_common") {
|
|||||||
"../:typedefs",
|
"../:typedefs",
|
||||||
"../api:libjingle_peerconnection_api",
|
"../api:libjingle_peerconnection_api",
|
||||||
"../api/video:video_frame",
|
"../api/video:video_frame",
|
||||||
|
"../api/video:video_frame_i010",
|
||||||
"../api/video:video_frame_i420",
|
"../api/video:video_frame_i420",
|
||||||
"../api/video_codecs:video_codecs_api",
|
"../api/video_codecs:video_codecs_api",
|
||||||
"../call:video_stream_api",
|
"../call:video_stream_api",
|
||||||
|
@ -15,6 +15,7 @@
|
|||||||
|
|
||||||
#include <memory>
|
#include <memory>
|
||||||
|
|
||||||
|
#include "api/video/i010_buffer.h"
|
||||||
#include "api/video/i420_buffer.h"
|
#include "api/video/i420_buffer.h"
|
||||||
#include "common_video/include/video_frame_buffer.h"
|
#include "common_video/include/video_frame_buffer.h"
|
||||||
#include "common_video/libyuv/include/webrtc_libyuv.h"
|
#include "common_video/libyuv/include/webrtc_libyuv.h"
|
||||||
@ -68,7 +69,8 @@ class SquareGenerator : public FrameGenerator {
|
|||||||
|
|
||||||
rtc::scoped_refptr<VideoFrameBuffer> buffer = nullptr;
|
rtc::scoped_refptr<VideoFrameBuffer> buffer = nullptr;
|
||||||
switch (type_) {
|
switch (type_) {
|
||||||
case OutputType::I420: {
|
case OutputType::I420:
|
||||||
|
case OutputType::I010: {
|
||||||
buffer = CreateI420Buffer(width_, height_);
|
buffer = CreateI420Buffer(width_, height_);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@ -90,6 +92,10 @@ class SquareGenerator : public FrameGenerator {
|
|||||||
for (const auto& square : squares_)
|
for (const auto& square : squares_)
|
||||||
square->Draw(buffer);
|
square->Draw(buffer);
|
||||||
|
|
||||||
|
if (type_ == OutputType::I010) {
|
||||||
|
buffer = I010Buffer::Copy(*buffer->ToI420());
|
||||||
|
}
|
||||||
|
|
||||||
frame_.reset(
|
frame_.reset(
|
||||||
new VideoFrame(buffer, webrtc::kVideoRotation_0, 0 /* timestamp_us */));
|
new VideoFrame(buffer, webrtc::kVideoRotation_0, 0 /* timestamp_us */));
|
||||||
return frame_.get();
|
return frame_.get();
|
||||||
|
@ -58,7 +58,7 @@ class FrameGenerator {
|
|||||||
RTC_NOTREACHED();
|
RTC_NOTREACHED();
|
||||||
}
|
}
|
||||||
|
|
||||||
enum class OutputType { I420, I420A };
|
enum class OutputType { I420, I420A, I010 };
|
||||||
|
|
||||||
// Creates a frame generator that produces frames with small squares that
|
// Creates a frame generator that produces frames with small squares that
|
||||||
// move randomly towards the lower right corner.
|
// move randomly towards the lower right corner.
|
||||||
|
@ -203,6 +203,7 @@ if (rtc_include_tests) {
|
|||||||
]
|
]
|
||||||
deps = [
|
deps = [
|
||||||
":video_quality_test",
|
":video_quality_test",
|
||||||
|
"../media:rtc_vp9_profile",
|
||||||
"../modules/pacing:pacing",
|
"../modules/pacing:pacing",
|
||||||
"../rtc_base:rtc_base_approved",
|
"../rtc_base:rtc_base_approved",
|
||||||
"../rtc_base/experiments:alr_experiment",
|
"../rtc_base/experiments:alr_experiment",
|
||||||
|
@ -9,6 +9,7 @@
|
|||||||
*/
|
*/
|
||||||
#include <stdio.h>
|
#include <stdio.h>
|
||||||
|
|
||||||
|
#include "media/base/vp9_profile.h"
|
||||||
#include "rtc_base/experiments/alr_experiment.h"
|
#include "rtc_base/experiments/alr_experiment.h"
|
||||||
#include "rtc_base/flags.h"
|
#include "rtc_base/flags.h"
|
||||||
#include "test/field_trial.h"
|
#include "test/field_trial.h"
|
||||||
@ -111,6 +112,30 @@ TEST(FullStackTest, ForemanCifPlr5Vp9) {
|
|||||||
fixture->RunWithAnalyzer(foreman_cif);
|
fixture->RunWithAnalyzer(foreman_cif);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Disabled until https://bugs.chromium.org/p/webm/issues/detail?id=1544 is
|
||||||
|
// solved.
|
||||||
|
#if defined(WEBRTC_ANDROID)
|
||||||
|
#define MAYBE_GeneratorWithoutPacketLossVp9Profile2 \
|
||||||
|
DISABLED_GeneratorWithoutPacketLossVp9Profile2
|
||||||
|
#else
|
||||||
|
#define MAYBE_GeneratorWithoutPacketLossVp9Profile2 \
|
||||||
|
GeneratorWithoutPacketLossVp9Profile2
|
||||||
|
#endif
|
||||||
|
TEST(FullStackTest, MAYBE_GeneratorWithoutPacketLossVp9Profile2) {
|
||||||
|
auto fixture = CreateVideoQualityTestFixture();
|
||||||
|
|
||||||
|
SdpVideoFormat::Parameters vp92 = {
|
||||||
|
{kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile2)}};
|
||||||
|
ParamsWithLogging generator;
|
||||||
|
generator.call.send_side_bwe = true;
|
||||||
|
generator.video[0] = {
|
||||||
|
true, 352, 288, 30, 700000, 700000, 700000, false, "VP9",
|
||||||
|
1, 0, 0, false, false, false, "GeneratorI010", 0, vp92};
|
||||||
|
generator.analyzer = {"generator_net_delay_0_0_plr_0_VP9Profile2", 0.0, 0.0,
|
||||||
|
kFullStackTestDurationSecs};
|
||||||
|
fixture->RunWithAnalyzer(generator);
|
||||||
|
}
|
||||||
|
|
||||||
TEST(FullStackTest, ForemanCifWithoutPacketLossMultiplexI420Frame) {
|
TEST(FullStackTest, ForemanCifWithoutPacketLossMultiplexI420Frame) {
|
||||||
auto fixture = CreateVideoQualityTestFixture();
|
auto fixture = CreateVideoQualityTestFixture();
|
||||||
ParamsWithLogging foreman_cif;
|
ParamsWithLogging foreman_cif;
|
||||||
|
@ -92,9 +92,8 @@ VideoQualityTest::TestVideoEncoderFactory::CreateVideoEncoder(
|
|||||||
} else if (format.name == "multiplex") {
|
} else if (format.name == "multiplex") {
|
||||||
return absl::make_unique<MultiplexEncoderAdapter>(
|
return absl::make_unique<MultiplexEncoderAdapter>(
|
||||||
&internal_encoder_factory_, SdpVideoFormat(cricket::kVp9CodecName));
|
&internal_encoder_factory_, SdpVideoFormat(cricket::kVp9CodecName));
|
||||||
} else {
|
|
||||||
return internal_encoder_factory_.CreateVideoEncoder(format);
|
|
||||||
}
|
}
|
||||||
|
return internal_encoder_factory_.CreateVideoEncoder(format);
|
||||||
}
|
}
|
||||||
|
|
||||||
VideoQualityTest::VideoQualityTest(
|
VideoQualityTest::VideoQualityTest(
|
||||||
@ -391,7 +390,6 @@ void VideoQualityTest::SetupVideo(Transport* send_transport,
|
|||||||
RTC_CHECK_GT(num_video_substreams, 0);
|
RTC_CHECK_GT(num_video_substreams, 0);
|
||||||
CreateVideoSendConfig(&video_send_configs_[video_idx], num_video_substreams,
|
CreateVideoSendConfig(&video_send_configs_[video_idx], num_video_substreams,
|
||||||
total_streams_used, send_transport);
|
total_streams_used, send_transport);
|
||||||
|
|
||||||
int payload_type;
|
int payload_type;
|
||||||
if (params_.video[video_idx].codec == "H264") {
|
if (params_.video[video_idx].codec == "H264") {
|
||||||
payload_type = kPayloadTypeH264;
|
payload_type = kPayloadTypeH264;
|
||||||
@ -435,6 +433,9 @@ void VideoQualityTest::SetupVideo(Transport* send_transport,
|
|||||||
video_encoder_configs_[video_idx].video_format.name =
|
video_encoder_configs_[video_idx].video_format.name =
|
||||||
params_.video[video_idx].codec;
|
params_.video[video_idx].codec;
|
||||||
|
|
||||||
|
video_encoder_configs_[video_idx].video_format.parameters =
|
||||||
|
params_.video[video_idx].sdp_params;
|
||||||
|
|
||||||
video_encoder_configs_[video_idx].codec_type =
|
video_encoder_configs_[video_idx].codec_type =
|
||||||
PayloadStringToCodecType(params_.video[video_idx].codec);
|
PayloadStringToCodecType(params_.video[video_idx].codec);
|
||||||
|
|
||||||
@ -814,6 +815,12 @@ void VideoQualityTest::CreateCapturers() {
|
|||||||
static_cast<int>(params_.video[video_idx].height),
|
static_cast<int>(params_.video[video_idx].height),
|
||||||
test::FrameGenerator::OutputType::I420A, absl::nullopt,
|
test::FrameGenerator::OutputType::I420A, absl::nullopt,
|
||||||
params_.video[video_idx].fps, clock_));
|
params_.video[video_idx].fps, clock_));
|
||||||
|
} else if (params_.video[video_idx].clip_name == "GeneratorI010") {
|
||||||
|
video_capturers_[video_idx].reset(test::FrameGeneratorCapturer::Create(
|
||||||
|
static_cast<int>(params_.video[video_idx].width),
|
||||||
|
static_cast<int>(params_.video[video_idx].height),
|
||||||
|
test::FrameGenerator::OutputType::I010, absl::nullopt,
|
||||||
|
params_.video[video_idx].fps, clock_));
|
||||||
} else if (params_.video[video_idx].clip_name.empty()) {
|
} else if (params_.video[video_idx].clip_name.empty()) {
|
||||||
video_capturers_[video_idx].reset(test::VcmCapturer::Create(
|
video_capturers_[video_idx].reset(test::VcmCapturer::Create(
|
||||||
params_.video[video_idx].width, params_.video[video_idx].height,
|
params_.video[video_idx].width, params_.video[video_idx].height,
|
||||||
|
Reference in New Issue
Block a user