Use frame generator in video codec unit tests.
There is no need to use real video as input for encoder in unit tests. Using generator simplifies testing on mobile devices (no need to upload files to device). Bug: none Change-Id: Ic48609cc6f8eecf90d9956edfdd33135be949398 Reviewed-on: https://webrtc-review.googlesource.com/64526 Commit-Queue: Sergey Silkin <ssilkin@webrtc.org> Reviewed-by: Rasmus Brandt <brandtr@webrtc.org> Cr-Commit-Position: refs/heads/master@{#22648}
This commit is contained in:
committed by
Commit Bot
parent
8ac9bb4d52
commit
1d2b627438
@ -11,6 +11,7 @@
|
||||
#include "common_video/libyuv/include/webrtc_libyuv.h"
|
||||
#include "modules/video_coding/codecs/h264/include/h264.h"
|
||||
#include "modules/video_coding/codecs/test/video_codec_unittest.h"
|
||||
#include "test/video_codec_settings.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
@ -24,13 +25,8 @@ class TestH264Impl : public VideoCodecUnitTest {
|
||||
return H264Decoder::Create();
|
||||
}
|
||||
|
||||
VideoCodec codec_settings() override {
|
||||
VideoCodec codec_inst;
|
||||
codec_inst.codecType = webrtc::kVideoCodecH264;
|
||||
// If frame dropping is false, we get a warning that bitrate can't
|
||||
// be controlled for RC_QUALITY_MODE; RC_BITRATE_MODE and RC_TIMESTAMP_MODE
|
||||
codec_inst.H264()->frameDroppingOn = true;
|
||||
return codec_inst;
|
||||
void ModifyCodecSettings(VideoCodec* codec_settings) override {
|
||||
webrtc::test::CodecSettings(kVideoCodecH264, codec_settings);
|
||||
}
|
||||
};
|
||||
|
||||
@ -43,8 +39,9 @@ class TestH264Impl : public VideoCodecUnitTest {
|
||||
#endif
|
||||
|
||||
TEST_F(TestH264Impl, MAYBE_EncodeDecode) {
|
||||
VideoFrame* input_frame = NextInputFrame();
|
||||
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
|
||||
encoder_->Encode(*input_frame_, nullptr, nullptr));
|
||||
encoder_->Encode(*input_frame, nullptr, nullptr));
|
||||
EncodedImage encoded_frame;
|
||||
CodecSpecificInfo codec_specific_info;
|
||||
ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
|
||||
@ -56,12 +53,12 @@ TEST_F(TestH264Impl, MAYBE_EncodeDecode) {
|
||||
rtc::Optional<uint8_t> decoded_qp;
|
||||
ASSERT_TRUE(WaitForDecodedFrame(&decoded_frame, &decoded_qp));
|
||||
ASSERT_TRUE(decoded_frame);
|
||||
EXPECT_GT(I420PSNR(input_frame_.get(), decoded_frame.get()), 36);
|
||||
EXPECT_GT(I420PSNR(input_frame, decoded_frame.get()), 36);
|
||||
}
|
||||
|
||||
TEST_F(TestH264Impl, MAYBE_DecodedQpEqualsEncodedQp) {
|
||||
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
|
||||
encoder_->Encode(*input_frame_, nullptr, nullptr));
|
||||
encoder_->Encode(*NextInputFrame(), nullptr, nullptr));
|
||||
EncodedImage encoded_frame;
|
||||
CodecSpecificInfo codec_specific_info;
|
||||
ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
|
||||
|
||||
@ -21,6 +21,7 @@
|
||||
#include "modules/video_coding/codecs/vp9/include/vp9.h"
|
||||
#include "rtc_base/keep_ref_until_done.h"
|
||||
#include "rtc_base/ptr_util.h"
|
||||
#include "test/video_codec_settings.h"
|
||||
|
||||
using testing::_;
|
||||
using testing::Return;
|
||||
@ -48,18 +49,17 @@ class TestMultiplexAdapter : public VideoCodecUnitTest {
|
||||
encoder_factory_.get(), SdpVideoFormat(kMultiplexAssociatedCodecName));
|
||||
}
|
||||
|
||||
VideoCodec codec_settings() override {
|
||||
VideoCodec codec_settings;
|
||||
codec_settings.codecType = kMultiplexAssociatedCodecType;
|
||||
codec_settings.VP9()->numberOfTemporalLayers = 1;
|
||||
codec_settings.VP9()->numberOfSpatialLayers = 1;
|
||||
codec_settings.codecType = webrtc::kVideoCodecMultiplex;
|
||||
return codec_settings;
|
||||
void ModifyCodecSettings(VideoCodec* codec_settings) override {
|
||||
webrtc::test::CodecSettings(kMultiplexAssociatedCodecType, codec_settings);
|
||||
codec_settings->VP9()->numberOfTemporalLayers = 1;
|
||||
codec_settings->VP9()->numberOfSpatialLayers = 1;
|
||||
codec_settings->codecType = webrtc::kVideoCodecMultiplex;
|
||||
}
|
||||
|
||||
std::unique_ptr<VideoFrame> CreateI420AInputFrame() {
|
||||
VideoFrame* input_frame = NextInputFrame();
|
||||
rtc::scoped_refptr<webrtc::I420BufferInterface> yuv_buffer =
|
||||
input_frame_->video_frame_buffer()->ToI420();
|
||||
input_frame->video_frame_buffer()->ToI420();
|
||||
rtc::scoped_refptr<I420ABufferInterface> yuva_buffer = WrapI420ABuffer(
|
||||
yuv_buffer->width(), yuv_buffer->height(), yuv_buffer->DataY(),
|
||||
yuv_buffer->StrideY(), yuv_buffer->DataU(), yuv_buffer->StrideU(),
|
||||
@ -120,8 +120,9 @@ TEST_F(TestMultiplexAdapter, ConstructAndDestructEncoder) {
|
||||
}
|
||||
|
||||
TEST_F(TestMultiplexAdapter, EncodeDecodeI420Frame) {
|
||||
VideoFrame* input_frame = NextInputFrame();
|
||||
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
|
||||
encoder_->Encode(*input_frame_, nullptr, nullptr));
|
||||
encoder_->Encode(*input_frame, nullptr, nullptr));
|
||||
EncodedImage encoded_frame;
|
||||
CodecSpecificInfo codec_specific_info;
|
||||
ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
|
||||
@ -134,7 +135,7 @@ TEST_F(TestMultiplexAdapter, EncodeDecodeI420Frame) {
|
||||
rtc::Optional<uint8_t> decoded_qp;
|
||||
ASSERT_TRUE(WaitForDecodedFrame(&decoded_frame, &decoded_qp));
|
||||
ASSERT_TRUE(decoded_frame);
|
||||
EXPECT_GT(I420PSNR(input_frame_.get(), decoded_frame.get()), 36);
|
||||
EXPECT_GT(I420PSNR(input_frame, decoded_frame.get()), 36);
|
||||
}
|
||||
|
||||
TEST_F(TestMultiplexAdapter, EncodeDecodeI420AFrame) {
|
||||
@ -162,8 +163,9 @@ TEST_F(TestMultiplexAdapter, EncodeDecodeI420AFrame) {
|
||||
}
|
||||
|
||||
TEST_F(TestMultiplexAdapter, CheckSingleFrameEncodedBitstream) {
|
||||
VideoFrame* input_frame = NextInputFrame();
|
||||
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
|
||||
encoder_->Encode(*input_frame_, nullptr, nullptr));
|
||||
encoder_->Encode(*input_frame, nullptr, nullptr));
|
||||
EncodedImage encoded_frame;
|
||||
CodecSpecificInfo codec_specific_info;
|
||||
ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
|
||||
|
||||
@ -10,10 +10,9 @@
|
||||
|
||||
#include "modules/video_coding/codecs/test/video_codec_unittest.h"
|
||||
|
||||
#include "api/video/i420_buffer.h"
|
||||
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
|
||||
#include "modules/video_coding/include/video_error_codes.h"
|
||||
#include "test/frame_utils.h"
|
||||
#include "test/testsupport/fileutils.h"
|
||||
#include "test/video_codec_settings.h"
|
||||
|
||||
static const int kEncodeTimeoutMs = 100;
|
||||
static const int kDecodeTimeoutMs = 25;
|
||||
@ -21,7 +20,7 @@ static const int kDecodeTimeoutMs = 25;
|
||||
static const int kStartBitrate = 300;
|
||||
static const int kTargetBitrate = 2000;
|
||||
static const int kMaxBitrate = 4000;
|
||||
static const int kWidth = 172; // Width of the input image.
|
||||
static const int kWidth = 176; // Width of the input image.
|
||||
static const int kHeight = 144; // Height of the input image.
|
||||
static const int kMaxFramerate = 30; // Arbitrary value.
|
||||
|
||||
@ -60,21 +59,44 @@ void VideoCodecUnitTest::FakeDecodeCompleteCallback::Decoded(
|
||||
}
|
||||
|
||||
void VideoCodecUnitTest::SetUp() {
|
||||
// Using a QCIF image. Processing only one frame.
|
||||
FILE* source_file_ =
|
||||
fopen(test::ResourcePath("paris_qcif", "yuv").c_str(), "rb");
|
||||
ASSERT_TRUE(source_file_ != NULL);
|
||||
rtc::scoped_refptr<VideoFrameBuffer> video_frame_buffer(
|
||||
test::ReadI420Buffer(kWidth, kHeight, source_file_));
|
||||
input_frame_.reset(new VideoFrame(video_frame_buffer, kVideoRotation_0, 0));
|
||||
fclose(source_file_);
|
||||
webrtc::test::CodecSettings(kVideoCodecVP8, &codec_settings_);
|
||||
codec_settings_.startBitrate = kStartBitrate;
|
||||
codec_settings_.targetBitrate = kTargetBitrate;
|
||||
codec_settings_.maxBitrate = kMaxBitrate;
|
||||
codec_settings_.maxFramerate = kMaxFramerate;
|
||||
codec_settings_.width = kWidth;
|
||||
codec_settings_.height = kHeight;
|
||||
|
||||
ModifyCodecSettings(&codec_settings_);
|
||||
|
||||
input_frame_generator_ = test::FrameGenerator::CreateSquareGenerator(
|
||||
codec_settings_.width, codec_settings_.height,
|
||||
rtc::Optional<test::FrameGenerator::OutputType>(), rtc::Optional<int>());
|
||||
|
||||
encoder_ = CreateEncoder();
|
||||
decoder_ = CreateDecoder();
|
||||
encoder_->RegisterEncodeCompleteCallback(&encode_complete_callback_);
|
||||
decoder_->RegisterDecodeCompleteCallback(&decode_complete_callback_);
|
||||
|
||||
InitCodecs();
|
||||
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
|
||||
encoder_->InitEncode(&codec_settings_, 1 /* number of cores */,
|
||||
0 /* max payload size (unused) */));
|
||||
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
|
||||
decoder_->InitDecode(&codec_settings_, 1 /* number of cores */));
|
||||
}
|
||||
|
||||
void VideoCodecUnitTest::ModifyCodecSettings(VideoCodec* codec_settings) {}
|
||||
|
||||
VideoFrame* VideoCodecUnitTest::NextInputFrame() {
|
||||
VideoFrame* input_frame = input_frame_generator_->NextFrame();
|
||||
|
||||
const uint32_t timestamp =
|
||||
last_input_frame_timestamp_ +
|
||||
kVideoPayloadTypeFrequency / codec_settings_.maxFramerate;
|
||||
input_frame->set_timestamp(timestamp);
|
||||
|
||||
last_input_frame_timestamp_ = timestamp;
|
||||
return input_frame;
|
||||
}
|
||||
|
||||
bool VideoCodecUnitTest::WaitForEncodedFrame(
|
||||
@ -135,19 +157,4 @@ bool VideoCodecUnitTest::WaitForDecodedFrame(std::unique_ptr<VideoFrame>* frame,
|
||||
}
|
||||
}
|
||||
|
||||
void VideoCodecUnitTest::InitCodecs() {
|
||||
codec_settings_ = codec_settings();
|
||||
codec_settings_.startBitrate = kStartBitrate;
|
||||
codec_settings_.targetBitrate = kTargetBitrate;
|
||||
codec_settings_.maxBitrate = kMaxBitrate;
|
||||
codec_settings_.maxFramerate = kMaxFramerate;
|
||||
codec_settings_.width = kWidth;
|
||||
codec_settings_.height = kHeight;
|
||||
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
|
||||
encoder_->InitEncode(&codec_settings_, 1 /* number of cores */,
|
||||
0 /* max payload size (unused) */));
|
||||
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
|
||||
decoder_->InitDecode(&codec_settings_, 1 /* number of cores */));
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
@ -22,6 +22,7 @@
|
||||
#include "rtc_base/criticalsection.h"
|
||||
#include "rtc_base/event.h"
|
||||
#include "rtc_base/thread_annotations.h"
|
||||
#include "test/frame_generator.h"
|
||||
#include "test/gtest.h"
|
||||
|
||||
namespace webrtc {
|
||||
@ -35,7 +36,8 @@ class VideoCodecUnitTest : public ::testing::Test {
|
||||
false /* initially signaled */),
|
||||
wait_for_encoded_frames_threshold_(1),
|
||||
decoded_frame_event_(false /* manual reset */,
|
||||
false /* initially signaled */) {}
|
||||
false /* initially signaled */),
|
||||
last_input_frame_timestamp_(0) {}
|
||||
|
||||
protected:
|
||||
class FakeEncodeCompleteCallback : public webrtc::EncodedImageCallback {
|
||||
@ -74,10 +76,13 @@ class VideoCodecUnitTest : public ::testing::Test {
|
||||
|
||||
virtual std::unique_ptr<VideoEncoder> CreateEncoder() = 0;
|
||||
virtual std::unique_ptr<VideoDecoder> CreateDecoder() = 0;
|
||||
virtual VideoCodec codec_settings() = 0;
|
||||
|
||||
void SetUp() override;
|
||||
|
||||
virtual void ModifyCodecSettings(VideoCodec* codec_settings);
|
||||
|
||||
VideoFrame* NextInputFrame();
|
||||
|
||||
// Helper method for waiting a single encoded frame.
|
||||
bool WaitForEncodedFrame(EncodedImage* frame,
|
||||
CodecSpecificInfo* codec_specific_info);
|
||||
@ -94,17 +99,12 @@ class VideoCodecUnitTest : public ::testing::Test {
|
||||
bool WaitForDecodedFrame(std::unique_ptr<VideoFrame>* frame,
|
||||
rtc::Optional<uint8_t>* qp);
|
||||
|
||||
// Populated by InitCodecs().
|
||||
VideoCodec codec_settings_;
|
||||
|
||||
std::unique_ptr<VideoFrame> input_frame_;
|
||||
|
||||
std::unique_ptr<VideoEncoder> encoder_;
|
||||
std::unique_ptr<VideoDecoder> decoder_;
|
||||
|
||||
private:
|
||||
void InitCodecs();
|
||||
|
||||
FakeEncodeCompleteCallback encode_complete_callback_;
|
||||
FakeDecodeCompleteCallback decode_complete_callback_;
|
||||
|
||||
@ -121,6 +121,9 @@ class VideoCodecUnitTest : public ::testing::Test {
|
||||
rtc::Optional<VideoFrame> decoded_frame_
|
||||
RTC_GUARDED_BY(decoded_frame_section_);
|
||||
rtc::Optional<uint8_t> decoded_qp_ RTC_GUARDED_BY(decoded_frame_section_);
|
||||
|
||||
std::unique_ptr<test::FrameGenerator> input_frame_generator_;
|
||||
uint32_t last_input_frame_timestamp_;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
@ -12,16 +12,12 @@
|
||||
|
||||
#include <memory>
|
||||
|
||||
#include "api/optional.h"
|
||||
#include "api/video/i420_buffer.h"
|
||||
#include "common_video/libyuv/include/webrtc_libyuv.h"
|
||||
#include "modules/video_coding/codecs/test/video_codec_unittest.h"
|
||||
#include "modules/video_coding/codecs/vp8/include/vp8.h"
|
||||
#include "modules/video_coding/codecs/vp8/temporal_layers.h"
|
||||
#include "modules/video_coding/utility/vp8_header_parser.h"
|
||||
#include "test/frame_utils.h"
|
||||
#include "test/gtest.h"
|
||||
#include "test/testsupport/fileutils.h"
|
||||
#include "rtc_base/timeutils.h"
|
||||
#include "test/video_codec_settings.h"
|
||||
|
||||
namespace webrtc {
|
||||
@ -30,17 +26,11 @@ namespace {
|
||||
constexpr uint32_t kInitialTimestampRtp = 123;
|
||||
constexpr int64_t kTestNtpTimeMs = 456;
|
||||
constexpr int64_t kInitialTimestampMs = 789;
|
||||
constexpr uint32_t kTimestampIncrement = 3000;
|
||||
constexpr int kNumCores = 1;
|
||||
constexpr size_t kMaxPayloadSize = 1440;
|
||||
constexpr int kDefaultMinPixelsPerFrame = 320 * 180;
|
||||
constexpr int kWidth = 172;
|
||||
constexpr int kHeight = 144;
|
||||
|
||||
void Calc16ByteAlignedStride(int width, int* stride_y, int* stride_uv) {
|
||||
*stride_y = 16 * ((width + 15) / 16);
|
||||
*stride_uv = 16 * ((width + 31) / 32);
|
||||
}
|
||||
} // namespace
|
||||
|
||||
class TestVp8Impl : public VideoCodecUnitTest {
|
||||
@ -53,46 +43,21 @@ class TestVp8Impl : public VideoCodecUnitTest {
|
||||
return VP8Decoder::Create();
|
||||
}
|
||||
|
||||
VideoCodec codec_settings() override {
|
||||
VideoCodec codec_settings;
|
||||
webrtc::test::CodecSettings(kVideoCodecVP8, &codec_settings);
|
||||
codec_settings.VP8()->denoisingOn = true;
|
||||
codec_settings.VP8()->frameDroppingOn = false;
|
||||
codec_settings.VP8()->automaticResizeOn = false;
|
||||
codec_settings.VP8()->complexity = kComplexityNormal;
|
||||
return codec_settings;
|
||||
void ModifyCodecSettings(VideoCodec* codec_settings) override {
|
||||
webrtc::test::CodecSettings(kVideoCodecVP8, codec_settings);
|
||||
codec_settings->width = kWidth;
|
||||
codec_settings->height = kHeight;
|
||||
codec_settings->VP8()->denoisingOn = true;
|
||||
codec_settings->VP8()->frameDroppingOn = false;
|
||||
codec_settings->VP8()->automaticResizeOn = false;
|
||||
codec_settings->VP8()->complexity = kComplexityNormal;
|
||||
}
|
||||
|
||||
void SetupInputFrame() {
|
||||
// Using a QCIF image (aligned stride (u,v planes) > width).
|
||||
// Processing only one frame.
|
||||
FILE* file = fopen(test::ResourcePath("paris_qcif", "yuv").c_str(), "rb");
|
||||
ASSERT_TRUE(file != nullptr);
|
||||
rtc::scoped_refptr<I420BufferInterface> compact_buffer(
|
||||
test::ReadI420Buffer(kWidth, kHeight, file));
|
||||
ASSERT_TRUE(compact_buffer);
|
||||
|
||||
// Setting aligned stride values.
|
||||
int stride_uv;
|
||||
int stride_y;
|
||||
Calc16ByteAlignedStride(kWidth, &stride_y, &stride_uv);
|
||||
EXPECT_EQ(stride_y, 176);
|
||||
EXPECT_EQ(stride_uv, 96);
|
||||
rtc::scoped_refptr<I420Buffer> stride_buffer(
|
||||
I420Buffer::Create(kWidth, kHeight, stride_y, stride_uv, stride_uv));
|
||||
|
||||
// No scaling in our case, just a copy, to add stride to the image.
|
||||
stride_buffer->ScaleFrom(*compact_buffer);
|
||||
|
||||
input_frame_.reset(new VideoFrame(stride_buffer, kInitialTimestampRtp,
|
||||
kInitialTimestampMs, kVideoRotation_0));
|
||||
fclose(file);
|
||||
}
|
||||
|
||||
void EncodeAndWaitForFrame(EncodedImage* encoded_frame,
|
||||
void EncodeAndWaitForFrame(const VideoFrame& input_frame,
|
||||
EncodedImage* encoded_frame,
|
||||
CodecSpecificInfo* codec_specific_info) {
|
||||
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
|
||||
encoder_->Encode(*input_frame_, nullptr, nullptr));
|
||||
encoder_->Encode(input_frame, nullptr, nullptr));
|
||||
ASSERT_TRUE(WaitForEncodedFrame(encoded_frame, codec_specific_info));
|
||||
VerifyQpParser(*encoded_frame);
|
||||
EXPECT_STREQ("libvpx", codec_specific_info->codec_name);
|
||||
@ -100,10 +65,11 @@ class TestVp8Impl : public VideoCodecUnitTest {
|
||||
EXPECT_EQ(0u, codec_specific_info->codecSpecific.VP8.simulcastIdx);
|
||||
}
|
||||
|
||||
void EncodeAndExpectFrameWith(uint8_t temporal_idx) {
|
||||
void EncodeAndExpectFrameWith(const VideoFrame& input_frame,
|
||||
uint8_t temporal_idx) {
|
||||
EncodedImage encoded_frame;
|
||||
CodecSpecificInfo codec_specific_info;
|
||||
EncodeAndWaitForFrame(&encoded_frame, &codec_specific_info);
|
||||
EncodeAndWaitForFrame(input_frame, &encoded_frame, &codec_specific_info);
|
||||
EXPECT_EQ(temporal_idx, codec_specific_info.codecSpecific.VP8.temporalIdx);
|
||||
}
|
||||
|
||||
@ -138,11 +104,12 @@ TEST_F(TestVp8Impl, EncodeFrameAndRelease) {
|
||||
|
||||
EncodedImage encoded_frame;
|
||||
CodecSpecificInfo codec_specific_info;
|
||||
EncodeAndWaitForFrame(&encoded_frame, &codec_specific_info);
|
||||
EncodeAndWaitForFrame(*NextInputFrame(), &encoded_frame,
|
||||
&codec_specific_info);
|
||||
|
||||
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release());
|
||||
EXPECT_EQ(WEBRTC_VIDEO_CODEC_UNINITIALIZED,
|
||||
encoder_->Encode(*input_frame_, nullptr, nullptr));
|
||||
encoder_->Encode(*NextInputFrame(), nullptr, nullptr));
|
||||
}
|
||||
|
||||
TEST_F(TestVp8Impl, InitDecode) {
|
||||
@ -152,10 +119,13 @@ TEST_F(TestVp8Impl, InitDecode) {
|
||||
}
|
||||
|
||||
TEST_F(TestVp8Impl, OnEncodedImageReportsInfo) {
|
||||
SetupInputFrame();
|
||||
VideoFrame* input_frame = NextInputFrame();
|
||||
input_frame->set_timestamp(kInitialTimestampRtp);
|
||||
input_frame->set_timestamp_us(kInitialTimestampMs *
|
||||
rtc::kNumMicrosecsPerMillisec);
|
||||
EncodedImage encoded_frame;
|
||||
CodecSpecificInfo codec_specific_info;
|
||||
EncodeAndWaitForFrame(&encoded_frame, &codec_specific_info);
|
||||
EncodeAndWaitForFrame(*input_frame, &encoded_frame, &codec_specific_info);
|
||||
|
||||
EXPECT_EQ(kInitialTimestampRtp, encoded_frame._timeStamp);
|
||||
EXPECT_EQ(kInitialTimestampMs, encoded_frame.capture_time_ms_);
|
||||
@ -168,22 +138,24 @@ TEST_F(TestVp8Impl, OnEncodedImageReportsInfo) {
|
||||
// TODO(brandtr): Consider passing through the rotation flag through the decoder
|
||||
// in the same way as done in the encoder.
|
||||
TEST_F(TestVp8Impl, EncodedRotationEqualsInputRotation) {
|
||||
input_frame_->set_rotation(kVideoRotation_0);
|
||||
VideoFrame* input_frame = NextInputFrame();
|
||||
input_frame->set_rotation(kVideoRotation_0);
|
||||
|
||||
EncodedImage encoded_frame;
|
||||
CodecSpecificInfo codec_specific_info;
|
||||
EncodeAndWaitForFrame(&encoded_frame, &codec_specific_info);
|
||||
EncodeAndWaitForFrame(*input_frame, &encoded_frame, &codec_specific_info);
|
||||
EXPECT_EQ(kVideoRotation_0, encoded_frame.rotation_);
|
||||
|
||||
input_frame_->set_rotation(kVideoRotation_90);
|
||||
EncodeAndWaitForFrame(&encoded_frame, &codec_specific_info);
|
||||
input_frame->set_rotation(kVideoRotation_90);
|
||||
EncodeAndWaitForFrame(*input_frame, &encoded_frame, &codec_specific_info);
|
||||
EXPECT_EQ(kVideoRotation_90, encoded_frame.rotation_);
|
||||
}
|
||||
|
||||
TEST_F(TestVp8Impl, DecodedQpEqualsEncodedQp) {
|
||||
VideoFrame* input_frame = NextInputFrame();
|
||||
EncodedImage encoded_frame;
|
||||
CodecSpecificInfo codec_specific_info;
|
||||
EncodeAndWaitForFrame(&encoded_frame, &codec_specific_info);
|
||||
EncodeAndWaitForFrame(*input_frame, &encoded_frame, &codec_specific_info);
|
||||
|
||||
// First frame should be a key frame.
|
||||
encoded_frame._frameType = kVideoFrameKey;
|
||||
@ -194,7 +166,7 @@ TEST_F(TestVp8Impl, DecodedQpEqualsEncodedQp) {
|
||||
ASSERT_TRUE(WaitForDecodedFrame(&decoded_frame, &decoded_qp));
|
||||
ASSERT_TRUE(decoded_frame);
|
||||
ASSERT_TRUE(decoded_qp);
|
||||
EXPECT_GT(I420PSNR(input_frame_.get(), decoded_frame.get()), 36);
|
||||
EXPECT_GT(I420PSNR(input_frame, decoded_frame.get()), 36);
|
||||
EXPECT_EQ(encoded_frame.qp_, *decoded_qp);
|
||||
}
|
||||
|
||||
@ -262,10 +234,13 @@ TEST_F(TestVp8Impl, ChecksSimulcastSettings) {
|
||||
#define MAYBE_AlignedStrideEncodeDecode AlignedStrideEncodeDecode
|
||||
#endif
|
||||
TEST_F(TestVp8Impl, MAYBE_AlignedStrideEncodeDecode) {
|
||||
SetupInputFrame();
|
||||
VideoFrame* input_frame = NextInputFrame();
|
||||
input_frame->set_timestamp(kInitialTimestampRtp);
|
||||
input_frame->set_timestamp_us(kInitialTimestampMs *
|
||||
rtc::kNumMicrosecsPerMillisec);
|
||||
EncodedImage encoded_frame;
|
||||
CodecSpecificInfo codec_specific_info;
|
||||
EncodeAndWaitForFrame(&encoded_frame, &codec_specific_info);
|
||||
EncodeAndWaitForFrame(*input_frame, &encoded_frame, &codec_specific_info);
|
||||
|
||||
// First frame should be a key frame.
|
||||
encoded_frame._frameType = kVideoFrameKey;
|
||||
@ -278,7 +253,7 @@ TEST_F(TestVp8Impl, MAYBE_AlignedStrideEncodeDecode) {
|
||||
ASSERT_TRUE(WaitForDecodedFrame(&decoded_frame, &decoded_qp));
|
||||
ASSERT_TRUE(decoded_frame);
|
||||
// Compute PSNR on all planes (faster than SSIM).
|
||||
EXPECT_GT(I420PSNR(input_frame_.get(), decoded_frame.get()), 36);
|
||||
EXPECT_GT(I420PSNR(input_frame, decoded_frame.get()), 36);
|
||||
EXPECT_EQ(kInitialTimestampRtp, decoded_frame->timestamp());
|
||||
EXPECT_EQ(kTestNtpTimeMs, decoded_frame->ntp_time_ms());
|
||||
}
|
||||
@ -289,9 +264,10 @@ TEST_F(TestVp8Impl, MAYBE_AlignedStrideEncodeDecode) {
|
||||
#define MAYBE_DecodeWithACompleteKeyFrame DecodeWithACompleteKeyFrame
|
||||
#endif
|
||||
TEST_F(TestVp8Impl, MAYBE_DecodeWithACompleteKeyFrame) {
|
||||
VideoFrame* input_frame = NextInputFrame();
|
||||
EncodedImage encoded_frame;
|
||||
CodecSpecificInfo codec_specific_info;
|
||||
EncodeAndWaitForFrame(&encoded_frame, &codec_specific_info);
|
||||
EncodeAndWaitForFrame(*input_frame, &encoded_frame, &codec_specific_info);
|
||||
|
||||
// Setting complete to false -> should return an error.
|
||||
encoded_frame._completeFrame = false;
|
||||
@ -310,7 +286,7 @@ TEST_F(TestVp8Impl, MAYBE_DecodeWithACompleteKeyFrame) {
|
||||
rtc::Optional<uint8_t> decoded_qp;
|
||||
ASSERT_TRUE(WaitForDecodedFrame(&decoded_frame, &decoded_qp));
|
||||
ASSERT_TRUE(decoded_frame);
|
||||
EXPECT_GT(I420PSNR(input_frame_.get(), decoded_frame.get()), 36);
|
||||
EXPECT_GT(I420PSNR(input_frame, decoded_frame.get()), 36);
|
||||
}
|
||||
|
||||
TEST_F(TestVp8Impl, EncoderWith2TemporalLayers) {
|
||||
@ -321,19 +297,16 @@ TEST_F(TestVp8Impl, EncoderWith2TemporalLayers) {
|
||||
// Temporal layer 0.
|
||||
EncodedImage encoded_frame;
|
||||
CodecSpecificInfo codec_specific_info;
|
||||
EncodeAndWaitForFrame(&encoded_frame, &codec_specific_info);
|
||||
EncodeAndWaitForFrame(*NextInputFrame(), &encoded_frame,
|
||||
&codec_specific_info);
|
||||
|
||||
EXPECT_EQ(0, codec_specific_info.codecSpecific.VP8.temporalIdx);
|
||||
// Temporal layer 1.
|
||||
input_frame_->set_timestamp(input_frame_->timestamp() + kTimestampIncrement);
|
||||
|
||||
EncodeAndExpectFrameWith(1);
|
||||
EncodeAndExpectFrameWith(*NextInputFrame(), 1);
|
||||
// Temporal layer 0.
|
||||
input_frame_->set_timestamp(input_frame_->timestamp() + kTimestampIncrement);
|
||||
EncodeAndExpectFrameWith(0);
|
||||
EncodeAndExpectFrameWith(*NextInputFrame(), 0);
|
||||
// Temporal layer 1.
|
||||
input_frame_->set_timestamp(input_frame_->timestamp() + kTimestampIncrement);
|
||||
EncodeAndExpectFrameWith(1);
|
||||
EncodeAndExpectFrameWith(*NextInputFrame(), 1);
|
||||
}
|
||||
|
||||
TEST_F(TestVp8Impl, ScalingDisabledIfAutomaticResizeOff) {
|
||||
|
||||
@ -11,11 +11,13 @@
|
||||
#include "common_video/libyuv/include/webrtc_libyuv.h"
|
||||
#include "modules/video_coding/codecs/test/video_codec_unittest.h"
|
||||
#include "modules/video_coding/codecs/vp9/include/vp9.h"
|
||||
#include "test/video_codec_settings.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
namespace {
|
||||
constexpr uint32_t kTimestampIncrementPerFrame = 3000;
|
||||
const size_t kWidth = 1280;
|
||||
const size_t kHeight = 720;
|
||||
} // namespace
|
||||
|
||||
class TestVp9Impl : public VideoCodecUnitTest {
|
||||
@ -28,12 +30,12 @@ class TestVp9Impl : public VideoCodecUnitTest {
|
||||
return VP9Decoder::Create();
|
||||
}
|
||||
|
||||
VideoCodec codec_settings() override {
|
||||
VideoCodec codec_settings;
|
||||
codec_settings.codecType = webrtc::kVideoCodecVP9;
|
||||
codec_settings.VP9()->numberOfTemporalLayers = 1;
|
||||
codec_settings.VP9()->numberOfSpatialLayers = 1;
|
||||
return codec_settings;
|
||||
void ModifyCodecSettings(VideoCodec* codec_settings) override {
|
||||
webrtc::test::CodecSettings(kVideoCodecVP9, codec_settings);
|
||||
codec_settings->width = kWidth;
|
||||
codec_settings->height = kHeight;
|
||||
codec_settings->VP9()->numberOfTemporalLayers = 1;
|
||||
codec_settings->VP9()->numberOfSpatialLayers = 1;
|
||||
}
|
||||
|
||||
void ExpectFrameWith(uint8_t temporal_idx) {
|
||||
@ -50,8 +52,9 @@ TEST_F(TestVp9Impl, DISABLED_EncodeDecode) {
|
||||
#else
|
||||
TEST_F(TestVp9Impl, EncodeDecode) {
|
||||
#endif
|
||||
VideoFrame* input_frame = NextInputFrame();
|
||||
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
|
||||
encoder_->Encode(*input_frame_, nullptr, nullptr));
|
||||
encoder_->Encode(*input_frame, nullptr, nullptr));
|
||||
EncodedImage encoded_frame;
|
||||
CodecSpecificInfo codec_specific_info;
|
||||
ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
|
||||
@ -63,7 +66,7 @@ TEST_F(TestVp9Impl, EncodeDecode) {
|
||||
rtc::Optional<uint8_t> decoded_qp;
|
||||
ASSERT_TRUE(WaitForDecodedFrame(&decoded_frame, &decoded_qp));
|
||||
ASSERT_TRUE(decoded_frame);
|
||||
EXPECT_GT(I420PSNR(input_frame_.get(), decoded_frame.get()), 36);
|
||||
EXPECT_GT(I420PSNR(input_frame, decoded_frame.get()), 36);
|
||||
}
|
||||
|
||||
// We only test the encoder here, since the decoded frame rotation is set based
|
||||
@ -71,24 +74,25 @@ TEST_F(TestVp9Impl, EncodeDecode) {
|
||||
// TODO(brandtr): Consider passing through the rotation flag through the decoder
|
||||
// in the same way as done in the encoder.
|
||||
TEST_F(TestVp9Impl, EncodedRotationEqualsInputRotation) {
|
||||
input_frame_->set_rotation(kVideoRotation_0);
|
||||
VideoFrame* input_frame = NextInputFrame();
|
||||
input_frame->set_rotation(kVideoRotation_0);
|
||||
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
|
||||
encoder_->Encode(*input_frame_, nullptr, nullptr));
|
||||
encoder_->Encode(*input_frame, nullptr, nullptr));
|
||||
EncodedImage encoded_frame;
|
||||
CodecSpecificInfo codec_specific_info;
|
||||
ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
|
||||
EXPECT_EQ(kVideoRotation_0, encoded_frame.rotation_);
|
||||
|
||||
input_frame_->set_rotation(kVideoRotation_90);
|
||||
input_frame->set_rotation(kVideoRotation_90);
|
||||
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
|
||||
encoder_->Encode(*input_frame_, nullptr, nullptr));
|
||||
encoder_->Encode(*input_frame, nullptr, nullptr));
|
||||
ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
|
||||
EXPECT_EQ(kVideoRotation_90, encoded_frame.rotation_);
|
||||
}
|
||||
|
||||
TEST_F(TestVp9Impl, DecodedQpEqualsEncodedQp) {
|
||||
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
|
||||
encoder_->Encode(*input_frame_, nullptr, nullptr));
|
||||
encoder_->Encode(*NextInputFrame(), nullptr, nullptr));
|
||||
EncodedImage encoded_frame;
|
||||
CodecSpecificInfo codec_specific_info;
|
||||
ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
|
||||
@ -106,7 +110,7 @@ TEST_F(TestVp9Impl, DecodedQpEqualsEncodedQp) {
|
||||
|
||||
TEST_F(TestVp9Impl, ParserQpEqualsEncodedQp) {
|
||||
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
|
||||
encoder_->Encode(*input_frame_, nullptr, nullptr));
|
||||
encoder_->Encode(*NextInputFrame(), nullptr, nullptr));
|
||||
EncodedImage encoded_frame;
|
||||
CodecSpecificInfo codec_specific_info;
|
||||
ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
|
||||
@ -128,31 +132,25 @@ TEST_F(TestVp9Impl, EncoderWith2TemporalLayers) {
|
||||
|
||||
// Temporal layer 0.
|
||||
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
|
||||
encoder_->Encode(*input_frame_, nullptr, nullptr));
|
||||
encoder_->Encode(*NextInputFrame(), nullptr, nullptr));
|
||||
EncodedImage encoded_frame;
|
||||
CodecSpecificInfo codec_specific_info;
|
||||
ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
|
||||
EXPECT_EQ(0, codec_specific_info.codecSpecific.VP9.temporal_idx);
|
||||
|
||||
// Temporal layer 1.
|
||||
input_frame_->set_timestamp(input_frame_->timestamp() +
|
||||
kTimestampIncrementPerFrame);
|
||||
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
|
||||
encoder_->Encode(*input_frame_, nullptr, nullptr));
|
||||
encoder_->Encode(*NextInputFrame(), nullptr, nullptr));
|
||||
ExpectFrameWith(1);
|
||||
|
||||
// Temporal layer 0.
|
||||
input_frame_->set_timestamp(input_frame_->timestamp() +
|
||||
kTimestampIncrementPerFrame);
|
||||
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
|
||||
encoder_->Encode(*input_frame_, nullptr, nullptr));
|
||||
encoder_->Encode(*NextInputFrame(), nullptr, nullptr));
|
||||
ExpectFrameWith(0);
|
||||
|
||||
// Temporal layer 1.
|
||||
input_frame_->set_timestamp(input_frame_->timestamp() +
|
||||
kTimestampIncrementPerFrame);
|
||||
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
|
||||
encoder_->Encode(*input_frame_, nullptr, nullptr));
|
||||
encoder_->Encode(*NextInputFrame(), nullptr, nullptr));
|
||||
ExpectFrameWith(1);
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user