Revert "Add unit tests covering MultiplexImageComponent"

This reverts commit 4dc891f5e3a4bcad4db31e1af0ad45b6c471eef2.

Reason for revert: Reverting this CL to make it possible to revert https://webrtc-review.googlesource.com/c/src/+/43242

Original change's description:
> Add unit tests covering MultiplexImageComponent
> 
> This CL changes some types in MultiplexImage and MultiplexImageComponent. Also,
> adds unit test coverage in TestMultiplexAdapter for these structs.
> 
> Bug: webrtc:7671
> Change-Id: I832d0466dc67d3b6b7fa0d3fb76f02c0190e474f
> Reviewed-on: https://webrtc-review.googlesource.com/44081
> Commit-Queue: Emircan Uysaler <emircan@webrtc.org>
> Reviewed-by: Qiang Chen <qiangchen@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#21770}

TBR=qiangchen@chromium.org,emircan@webrtc.org

Change-Id: I9cce6ed5f2990a2f443e04a9e5913cbd296242e4
No-Presubmit: true
No-Tree-Checks: true
No-Try: true
Bug: webrtc:7671
Reviewed-on: https://webrtc-review.googlesource.com/44341
Reviewed-by: Ivo Creusen <ivoc@webrtc.org>
Commit-Queue: Ivo Creusen <ivoc@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#21773}
This commit is contained in:
Ivo Creusen
2018-01-26 12:43:22 +00:00
committed by Commit Bot
parent 665d18ea29
commit 15eeef4189
3 changed files with 10 additions and 88 deletions

View File

@ -71,7 +71,7 @@ struct MultiplexImageComponent {
// Identifies which component this frame represent, i.e. YUV frame vs Alpha // Identifies which component this frame represent, i.e. YUV frame vs Alpha
// frame. // frame.
uint8_t component_index; int component_index;
// Stores the actual frame data of the encoded image. // Stores the actual frame data of the encoded image.
EncodedImage encoded_image; EncodedImage encoded_image;
@ -79,11 +79,11 @@ struct MultiplexImageComponent {
// Struct holding the whole frame bundle of components of an image. // Struct holding the whole frame bundle of components of an image.
struct MultiplexImage { struct MultiplexImage {
uint16_t image_index; int image_index;
uint8_t component_count; int component_count;
std::vector<MultiplexImageComponent> image_components; std::vector<MultiplexImageComponent> image_components;
MultiplexImage(uint16_t picture_index, uint8_t component_count); MultiplexImage(int picture_index, int frame_count);
}; };
// A utility class providing conversion between two representations of a // A utility class providing conversion between two representations of a

View File

@ -113,7 +113,7 @@ void PackBitstream(uint8_t* buffer, MultiplexImageComponent image) {
memcpy(buffer, image.encoded_image._buffer, image.encoded_image._length); memcpy(buffer, image.encoded_image._buffer, image.encoded_image._length);
} }
MultiplexImage::MultiplexImage(uint16_t picture_index, uint8_t frame_count) MultiplexImage::MultiplexImage(int picture_index, int frame_count)
: image_index(picture_index), component_count(frame_count) {} : image_index(picture_index), component_count(frame_count) {}
EncodedImage MultiplexEncodedImagePacker::PackAndRelease( EncodedImage MultiplexEncodedImagePacker::PackAndRelease(
@ -195,7 +195,9 @@ MultiplexImage MultiplexEncodedImagePacker::Unpack(
const MultiplexImageHeader& header = UnpackHeader(combined_image._buffer); const MultiplexImageHeader& header = UnpackHeader(combined_image._buffer);
MultiplexImage multiplex_image(header.image_index, header.component_count); MultiplexImage multiplex_image(header.image_index, header.component_count);
std::vector<MultiplexImageComponentHeader> frame_headers; std::vector<MultiplexImageComponentHeader> frame_headers;
int header_offset = header.first_component_header_offset; int header_offset = header.first_component_header_offset;
while (header_offset > 0) { while (header_offset > 0) {
@ -211,7 +213,6 @@ MultiplexImage MultiplexEncodedImagePacker::Unpack(
image_component.codec_type = frame_headers[i].codec_type; image_component.codec_type = frame_headers[i].codec_type;
EncodedImage encoded_image = combined_image; EncodedImage encoded_image = combined_image;
encoded_image._timeStamp = combined_image._timeStamp;
encoded_image._frameType = frame_headers[i].frame_type; encoded_image._frameType = frame_headers[i].frame_type;
encoded_image._length = encoded_image._size = encoded_image._length = encoded_image._size =
static_cast<size_t>(frame_headers[i].bitstream_length); static_cast<size_t>(frame_headers[i].bitstream_length);

View File

@ -15,7 +15,6 @@
#include "common_video/libyuv/include/webrtc_libyuv.h" #include "common_video/libyuv/include/webrtc_libyuv.h"
#include "media/base/mediaconstants.h" #include "media/base/mediaconstants.h"
#include "modules/video_coding/codecs/multiplex/include/multiplex_decoder_adapter.h" #include "modules/video_coding/codecs/multiplex/include/multiplex_decoder_adapter.h"
#include "modules/video_coding/codecs/multiplex/include/multiplex_encoded_image_packer.h"
#include "modules/video_coding/codecs/multiplex/include/multiplex_encoder_adapter.h" #include "modules/video_coding/codecs/multiplex/include/multiplex_encoder_adapter.h"
#include "modules/video_coding/codecs/test/video_codec_test.h" #include "modules/video_coding/codecs/test/video_codec_test.h"
#include "modules/video_coding/codecs/vp9/include/vp9.h" #include "modules/video_coding/codecs/vp9/include/vp9.h"
@ -66,21 +65,7 @@ class TestMultiplexAdapter : public VideoCodecTest {
yuv_buffer->DataV(), yuv_buffer->StrideV(), yuv_buffer->DataY(), yuv_buffer->DataV(), yuv_buffer->StrideV(), yuv_buffer->DataY(),
yuv_buffer->StrideY(), rtc::KeepRefUntilDone(yuv_buffer)); yuv_buffer->StrideY(), rtc::KeepRefUntilDone(yuv_buffer));
return rtc::WrapUnique<VideoFrame>( return rtc::WrapUnique<VideoFrame>(
new VideoFrame(yuva_buffer, 123 /* timestamp_us */, new VideoFrame(yuva_buffer, kVideoRotation_0, 0));
345 /* render_time_ms */, kVideoRotation_0));
}
std::unique_ptr<VideoFrame> ExtractAXXFrame(const VideoFrame& yuva_frame) {
const I420ABufferInterface* yuva_buffer =
yuva_frame.video_frame_buffer()->GetI420A();
rtc::scoped_refptr<I420BufferInterface> axx_buffer = WrapI420Buffer(
yuva_buffer->width(), yuva_buffer->height(), yuva_buffer->DataA(),
yuva_buffer->StrideA(), yuva_buffer->DataU(), yuva_buffer->StrideU(),
yuva_buffer->DataV(), yuva_buffer->StrideV(),
rtc::KeepRefUntilDone(yuva_frame.video_frame_buffer()));
return rtc::WrapUnique<VideoFrame>(
new VideoFrame(axx_buffer, 123 /* timestamp_us */,
345 /* render_time_ms */, kVideoRotation_0));
} }
private: private:
@ -125,6 +110,7 @@ TEST_F(TestMultiplexAdapter, EncodeDecodeI420Frame) {
EncodedImage encoded_frame; EncodedImage encoded_frame;
CodecSpecificInfo codec_specific_info; CodecSpecificInfo codec_specific_info;
ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info)); ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
EXPECT_EQ(kVideoCodecMultiplex, codec_specific_info.codecType); EXPECT_EQ(kVideoCodecMultiplex, codec_specific_info.codecType);
EXPECT_EQ( EXPECT_EQ(
@ -144,6 +130,7 @@ TEST_F(TestMultiplexAdapter, EncodeDecodeI420AFrame) {
EncodedImage encoded_frame; EncodedImage encoded_frame;
CodecSpecificInfo codec_specific_info; CodecSpecificInfo codec_specific_info;
ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info)); ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
EXPECT_EQ(kVideoCodecMultiplex, codec_specific_info.codecType); EXPECT_EQ(kVideoCodecMultiplex, codec_specific_info.codecType);
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
@ -153,72 +140,6 @@ TEST_F(TestMultiplexAdapter, EncodeDecodeI420AFrame) {
ASSERT_TRUE(WaitForDecodedFrame(&decoded_frame, &decoded_qp)); ASSERT_TRUE(WaitForDecodedFrame(&decoded_frame, &decoded_qp));
ASSERT_TRUE(decoded_frame); ASSERT_TRUE(decoded_frame);
EXPECT_GT(I420PSNR(yuva_frame.get(), decoded_frame.get()), 36); EXPECT_GT(I420PSNR(yuva_frame.get(), decoded_frame.get()), 36);
// Find PSNR for AXX bits.
std::unique_ptr<VideoFrame> input_axx_frame = ExtractAXXFrame(*yuva_frame);
std::unique_ptr<VideoFrame> output_axx_frame =
ExtractAXXFrame(*decoded_frame);
EXPECT_GT(I420PSNR(input_axx_frame.get(), output_axx_frame.get()), 47);
}
TEST_F(TestMultiplexAdapter, CheckSingleFrameEncodedBitstream) {
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder_->Encode(*input_frame_, nullptr, nullptr));
EncodedImage encoded_frame;
CodecSpecificInfo codec_specific_info;
ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
EXPECT_EQ(kVideoCodecMultiplex, codec_specific_info.codecType);
EXPECT_EQ(0, codec_specific_info.codecSpecific.generic.simulcast_idx);
const MultiplexImage& unpacked_frame =
MultiplexEncodedImagePacker::Unpack(encoded_frame);
EXPECT_EQ(0, unpacked_frame.image_index);
EXPECT_EQ(1, unpacked_frame.component_count);
const MultiplexImageComponent& component = unpacked_frame.image_components[0];
EXPECT_EQ(0, component.component_index);
EXPECT_NE(nullptr, component.encoded_image._buffer);
EXPECT_EQ(kVideoFrameKey, component.encoded_image._frameType);
}
TEST_F(TestMultiplexAdapter, CheckDoubleFramesEncodedBitstream) {
std::unique_ptr<VideoFrame> yuva_frame = CreateI420AInputFrame();
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder_->Encode(*yuva_frame, nullptr, nullptr));
EncodedImage encoded_frame;
CodecSpecificInfo codec_specific_info;
ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
EXPECT_EQ(kVideoCodecMultiplex, codec_specific_info.codecType);
EXPECT_EQ(0, codec_specific_info.codecSpecific.generic.simulcast_idx);
const MultiplexImage& unpacked_frame =
MultiplexEncodedImagePacker::Unpack(encoded_frame);
EXPECT_EQ(0, unpacked_frame.image_index);
EXPECT_EQ(2, unpacked_frame.component_count);
EXPECT_EQ(unpacked_frame.image_components.size(),
unpacked_frame.component_count);
for (int i = 0; i < unpacked_frame.component_count; ++i) {
const MultiplexImageComponent& component =
unpacked_frame.image_components[i];
EXPECT_EQ(i, component.component_index);
EXPECT_NE(nullptr, component.encoded_image._buffer);
EXPECT_EQ(kVideoFrameKey, component.encoded_image._frameType);
}
}
TEST_F(TestMultiplexAdapter, ImageIndexIncreases) {
std::unique_ptr<VideoFrame> yuva_frame = CreateI420AInputFrame();
const size_t expected_num_encoded_frames = 3;
for (size_t i = 0; i < expected_num_encoded_frames; ++i) {
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder_->Encode(*yuva_frame, nullptr, nullptr));
EncodedImage encoded_frame;
CodecSpecificInfo codec_specific_info;
ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
const MultiplexImage& unpacked_frame =
MultiplexEncodedImagePacker::Unpack(encoded_frame);
EXPECT_EQ(i, unpacked_frame.image_index);
EXPECT_EQ(i ? kVideoFrameDelta : kVideoFrameKey, encoded_frame._frameType);
}
} }
} // namespace webrtc } // namespace webrtc