Rename EncodedImage::_length --> size_, and make private.

Use size() accessor function. Also replace most nearby uses of _buffer
with data().

Bug: webrtc:9378
Change-Id: I1ac3459612f7c6151bd057d05448da1c4e1c6e3d
Reviewed-on: https://webrtc-review.googlesource.com/c/116783
Commit-Queue: Niels Moller <nisse@webrtc.org>
Reviewed-by: Karl Wiberg <kwiberg@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#26273}
This commit is contained in:
Niels Möller
2019-01-15 08:50:01 +01:00
committed by Commit Bot
parent 7491e8f17b
commit 77536a2b81
43 changed files with 153 additions and 164 deletions

View File

@ -320,8 +320,7 @@ void VideoCodecTestFixtureImpl::H264KeyframeChecker::CheckEncodedFrame(
bool contains_pps = false;
bool contains_idr = false;
const std::vector<webrtc::H264::NaluIndex> nalu_indices =
webrtc::H264::FindNaluIndices(encoded_frame._buffer,
encoded_frame._length);
webrtc::H264::FindNaluIndices(encoded_frame.data(), encoded_frame.size());
for (const webrtc::H264::NaluIndex& index : nalu_indices) {
webrtc::H264::NaluType nalu_type = webrtc::H264::ParseNaluType(
encoded_frame._buffer[index.payload_start_offset]);

View File

@ -45,11 +45,9 @@ class QpFrameChecker : public VideoCodecTestFixture::EncodedFrameChecker {
const EncodedImage& encoded_frame) const override {
int qp;
if (codec == kVideoCodecVP8) {
EXPECT_TRUE(
vp8::GetQp(encoded_frame._buffer, encoded_frame._length, &qp));
EXPECT_TRUE(vp8::GetQp(encoded_frame.data(), encoded_frame.size(), &qp));
} else if (codec == kVideoCodecVP9) {
EXPECT_TRUE(
vp9::GetQp(encoded_frame._buffer, encoded_frame._length, &qp));
EXPECT_TRUE(vp9::GetQp(encoded_frame.data(), encoded_frame.size(), &qp));
} else {
RTC_NOTREACHED();
}

View File

@ -51,8 +51,7 @@ size_t GetMaxNaluSizeBytes(const EncodedImage& encoded_frame,
return 0;
std::vector<webrtc::H264::NaluIndex> nalu_indices =
webrtc::H264::FindNaluIndices(encoded_frame._buffer,
encoded_frame._length);
webrtc::H264::FindNaluIndices(encoded_frame.data(), encoded_frame.size());
RTC_CHECK(!nalu_indices.empty());
@ -392,7 +391,7 @@ void VideoProcessor::FrameEncoded(
frame_stat->encode_start_ns, encode_stop_ns - post_encode_time_ns_);
frame_stat->target_bitrate_kbps =
bitrate_allocation_.GetTemporalLayerSum(spatial_idx, temporal_idx) / 1000;
frame_stat->length_bytes = encoded_image._length;
frame_stat->length_bytes = encoded_image.size();
frame_stat->frame_type = encoded_image._frameType;
frame_stat->temporal_idx = temporal_idx;
frame_stat->max_nalu_size_bytes = GetMaxNaluSizeBytes(encoded_image, config_);
@ -554,7 +553,7 @@ const webrtc::EncodedImage* VideoProcessor::BuildAndStoreSuperframe(
RTC_CHECK_GT(config_.NumberOfSpatialLayers(), 1);
EncodedImage base_image;
RTC_CHECK_EQ(base_image._length, 0);
RTC_CHECK_EQ(base_image.size(), 0);
// Each SVC layer is decoded with dedicated decoder. Find the nearest
// non-dropped base frame and merge it and current frame into superframe.
@ -568,29 +567,29 @@ const webrtc::EncodedImage* VideoProcessor::BuildAndStoreSuperframe(
}
}
}
const size_t payload_size_bytes = base_image._length + encoded_image._length;
const size_t payload_size_bytes = base_image.size() + encoded_image.size();
const size_t buffer_size_bytes =
payload_size_bytes + EncodedImage::GetBufferPaddingBytes(codec);
uint8_t* copied_buffer = new uint8_t[buffer_size_bytes];
RTC_CHECK(copied_buffer);
if (base_image._length) {
if (base_image.size()) {
RTC_CHECK(base_image._buffer);
memcpy(copied_buffer, base_image._buffer, base_image._length);
memcpy(copied_buffer, base_image.data(), base_image.size());
}
memcpy(copied_buffer + base_image._length, encoded_image._buffer,
encoded_image._length);
memcpy(copied_buffer + base_image.size(), encoded_image.data(),
encoded_image.size());
EncodedImage copied_image = encoded_image;
copied_image = encoded_image;
copied_image.set_buffer(copied_buffer, buffer_size_bytes);
copied_image._length = payload_size_bytes;
copied_image.set_size(payload_size_bytes);
// Replace previous EncodedImage for this spatial layer.
uint8_t* old_buffer = merged_encoded_frames_.at(spatial_idx)._buffer;
if (old_buffer) {
delete[] old_buffer;
uint8_t* old_data = merged_encoded_frames_.at(spatial_idx).data();
if (old_data) {
delete[] old_data;
}
merged_encoded_frames_.at(spatial_idx) = copied_image;

View File

@ -109,7 +109,7 @@ class VideoProcessor {
const webrtc::EncodedImage& encoded_image,
const webrtc::CodecSpecificInfo* codec_specific_info)
: video_processor_(video_processor),
buffer_(encoded_image._buffer, encoded_image._length),
buffer_(encoded_image._buffer, encoded_image.size()),
encoded_image_(encoded_image),
codec_specific_info_(*codec_specific_info) {
encoded_image_._buffer = buffer_.data();