Update test code to use EncodedImage::Allocate

Bug: webrtc:9378
Change-Id: I2ea63b097b0263b264fbbcca295365781fcae621
Reviewed-on: https://webrtc-review.googlesource.com/c/122780
Commit-Queue: Niels Moller <nisse@webrtc.org>
Reviewed-by: Erik Språng <sprang@webrtc.org>
Reviewed-by: Karl Wiberg <kwiberg@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#26690}
This commit is contained in:
Niels Möller
2019-02-14 16:15:54 +01:00
committed by Commit Bot
parent fd965c008c
commit 663844d800
6 changed files with 30 additions and 43 deletions

View File

@ -103,9 +103,10 @@ TEST(LoopbackMediaTransport, VideoDeliveredToSink) {
thread->Start();
MediaTransportPair transport_pair(thread.get());
testing::StrictMock<MockMediaTransportVideoSinkInterface> sink;
uint8_t encoded_data[] = {1, 2, 3};
constexpr uint8_t encoded_data[] = {1, 2, 3};
EncodedImage encoded_image;
encoded_image.set_buffer(encoded_data, sizeof(encoded_data));
encoded_image.Allocate(sizeof(encoded_data));
memcpy(encoded_image.data(), encoded_data, sizeof(encoded_data));
encoded_image.set_size(sizeof(encoded_data));
EXPECT_CALL(sink, OnData(1, testing::Property(

View File

@ -133,12 +133,13 @@ class RtpVideoSenderTestFixture {
} // namespace
TEST(RtpVideoSenderTest, SendOnOneModule) {
uint8_t payload = 'a';
constexpr uint8_t kPayload = 'a';
EncodedImage encoded_image;
encoded_image.SetTimestamp(1);
encoded_image.capture_time_ms_ = 2;
encoded_image._frameType = kVideoFrameKey;
encoded_image.set_buffer(&payload, 1);
encoded_image.Allocate(1);
encoded_image.data()[0] = kPayload;
encoded_image.set_size(1);
RtpVideoSenderTestFixture test({kSsrc1}, kPayloadType, {});
@ -163,12 +164,13 @@ TEST(RtpVideoSenderTest, SendOnOneModule) {
}
TEST(RtpVideoSenderTest, SendSimulcastSetActive) {
uint8_t payload = 'a';
constexpr uint8_t kPayload = 'a';
EncodedImage encoded_image_1;
encoded_image_1.SetTimestamp(1);
encoded_image_1.capture_time_ms_ = 2;
encoded_image_1._frameType = kVideoFrameKey;
encoded_image_1.set_buffer(&payload, 1);
encoded_image_1.Allocate(1);
encoded_image_1.data()[0] = kPayload;
encoded_image_1.set_size(1);
RtpVideoSenderTestFixture test({kSsrc1, kSsrc2}, kPayloadType, {});
@ -207,12 +209,13 @@ TEST(RtpVideoSenderTest, SendSimulcastSetActive) {
// that outgoing data can be sent on this module, and checks that no data can
// be sent if both modules are inactive.
TEST(RtpVideoSenderTest, SendSimulcastSetActiveModules) {
uint8_t payload = 'a';
constexpr uint8_t kPayload = 'a';
EncodedImage encoded_image_1;
encoded_image_1.SetTimestamp(1);
encoded_image_1.capture_time_ms_ = 2;
encoded_image_1._frameType = kVideoFrameKey;
encoded_image_1.set_buffer(&payload, 1);
encoded_image_1.Allocate(1);
encoded_image_1.data()[0] = kPayload;
encoded_image_1.set_size(1);
EncodedImage encoded_image_2(encoded_image_1);
@ -296,12 +299,13 @@ TEST(RtpVideoSenderTest, FrameCountCallbacks) {
RtpVideoSenderTestFixture test({kSsrc1}, kPayloadType, {}, &callback);
uint8_t payload = 'a';
constexpr uint8_t kPayload = 'a';
EncodedImage encoded_image;
encoded_image.SetTimestamp(1);
encoded_image.capture_time_ms_ = 2;
encoded_image._frameType = kVideoFrameKey;
encoded_image.set_buffer(&payload, 1);
encoded_image.Allocate(1);
encoded_image.data()[0] = kPayload;
encoded_image.set_size(1);
encoded_image._frameType = kVideoFrameKey;

View File

@ -246,14 +246,6 @@ VideoProcessor::~VideoProcessor() {
// Sanity check.
RTC_CHECK_LE(input_frames_.size(), kMaxBufferedInputFrames);
// Deal with manual memory management of EncodedImage's.
for (size_t i = 0; i < num_simulcast_or_spatial_layers_; ++i) {
uint8_t* data = merged_encoded_frames_.at(i).data();
if (data) {
delete[] data;
}
}
}
void VideoProcessor::ProcessFrame() {
@ -571,27 +563,19 @@ const webrtc::EncodedImage* VideoProcessor::BuildAndStoreSuperframe(
const size_t buffer_size_bytes =
payload_size_bytes + EncodedImage::GetBufferPaddingBytes(codec);
uint8_t* copied_buffer = new uint8_t[buffer_size_bytes];
RTC_CHECK(copied_buffer);
EncodedImage copied_image = encoded_image;
copied_image.Allocate(buffer_size_bytes);
if (base_image.size()) {
RTC_CHECK(base_image.data());
memcpy(copied_buffer, base_image.data(), base_image.size());
memcpy(copied_image.data(), base_image.data(), base_image.size());
}
memcpy(copied_buffer + base_image.size(), encoded_image.data(),
memcpy(copied_image.data() + base_image.size(), encoded_image.data(),
encoded_image.size());
EncodedImage copied_image = encoded_image;
copied_image = encoded_image;
copied_image.set_buffer(copied_buffer, buffer_size_bytes);
copied_image.set_size(payload_size_bytes);
// Replace previous EncodedImage for this spatial layer.
uint8_t* old_buffer = merged_encoded_frames_.at(spatial_idx).buffer();
if (old_buffer) {
delete[] old_buffer;
}
merged_encoded_frames_.at(spatial_idx) = copied_image;
merged_encoded_frames_.at(spatial_idx) = std::move(copied_image);
return &merged_encoded_frames_.at(spatial_idx);
}

View File

@ -109,10 +109,9 @@ class VideoProcessor {
const webrtc::EncodedImage& encoded_image,
const webrtc::CodecSpecificInfo* codec_specific_info)
: video_processor_(video_processor),
buffer_(encoded_image.data(), encoded_image.size()),
encoded_image_(encoded_image),
codec_specific_info_(*codec_specific_info) {
encoded_image_.set_buffer(buffer_.data(), buffer_.size());
encoded_image_.Retain();
}
bool Run() override {
@ -122,7 +121,6 @@ class VideoProcessor {
private:
VideoProcessor* const video_processor_;
rtc::Buffer buffer_;
webrtc::EncodedImage encoded_image_;
const webrtc::CodecSpecificInfo codec_specific_info_;
};

View File

@ -82,7 +82,6 @@ std::vector<std::vector<FrameType>> GetTimingFrames(
{delay_ms, kDefaultOutlierFrameSizePercent});
callback.OnFrameRateChanged(kFramerate);
int s, i;
std::vector<uint8_t> frame_data(max_frame_size);
std::vector<std::vector<FrameType>> result(num_streams);
for (s = 0; s < num_streams; ++s)
callback.OnTargetBitrateChanged(average_frame_sizes[s] * kFramerate, s);
@ -95,7 +94,7 @@ std::vector<std::vector<FrameType>> GetTimingFrames(
EncodedImage image;
CodecSpecificInfo codec_specific;
image.set_buffer(frame_data.data(), frame_data.size());
image.Allocate(max_frame_size);
image.set_size(FrameSize(min_frame_size, max_frame_size, s, i));
image.capture_time_ms_ = current_timestamp;
image.SetTimestamp(static_cast<uint32_t>(current_timestamp * 90));
@ -189,9 +188,9 @@ TEST(TestVCMEncodedFrameCallback, NoTimingFrameIfNoEncodeStartTime) {
EncodedImage image;
CodecSpecificInfo codec_specific;
int64_t timestamp = 1;
uint8_t frame_data[500];
image.set_buffer(frame_data, sizeof(frame_data));
image.set_size(sizeof(frame_data));
constexpr size_t kFrameSize = 500;
image.Allocate(kFrameSize);
image.set_size(kFrameSize);
image.capture_time_ms_ = timestamp;
image.SetTimestamp(static_cast<uint32_t>(timestamp * 90));
codec_specific.codecType = kVideoCodecGeneric;
@ -222,9 +221,9 @@ TEST(TestVCMEncodedFrameCallback, AdjustsCaptureTimeForInternalSourceEncoder) {
const int64_t kEncodeStartDelayMs = 2;
const int64_t kEncodeFinishDelayMs = 10;
int64_t timestamp = 1;
uint8_t frame_data[500];
image.set_buffer(frame_data, sizeof(frame_data));
image.set_size(sizeof(frame_data));
constexpr size_t kFrameSize = 500;
image.Allocate(kFrameSize);
image.set_size(kFrameSize);
image.capture_time_ms_ = timestamp;
image.SetTimestamp(static_cast<uint32_t>(timestamp * 90));
codec_specific.codecType = kVideoCodecGeneric;

View File

@ -41,7 +41,8 @@ class IvfFileWriterTest : public ::testing::Test {
int num_frames,
bool use_capture_tims_ms) {
EncodedImage frame;
frame.set_buffer(dummy_payload, sizeof(dummy_payload));
frame.Allocate(sizeof(dummy_payload));
memcpy(frame.data(), dummy_payload, sizeof(dummy_payload));
frame._encodedWidth = width;
frame._encodedHeight = height;
for (int i = 1; i <= num_frames; ++i) {