Rename EncodedImage::_length --> size_, and make private.

Use size() accessor function. Also replace most nearby uses of _buffer
with data().

Bug: webrtc:9378
Change-Id: I1ac3459612f7c6151bd057d05448da1c4e1c6e3d
Reviewed-on: https://webrtc-review.googlesource.com/c/116783
Commit-Queue: Niels Moller <nisse@webrtc.org>
Reviewed-by: Karl Wiberg <kwiberg@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#26273}
This commit is contained in:
Niels Möller
2019-01-15 08:50:01 +01:00
committed by Commit Bot
parent 7491e8f17b
commit 77536a2b81
43 changed files with 153 additions and 164 deletions

View File

@ -360,7 +360,7 @@ int32_t MediaCodecVideoDecoder::Decode(
ALOGE << "Decode() - callback_ is NULL";
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
}
if (inputImage._buffer == NULL && inputImage._length > 0) {
if (inputImage.data() == NULL && inputImage.size() > 0) {
ALOGE << "Decode() - inputImage is incorrect";
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
}
@ -408,7 +408,7 @@ int32_t MediaCodecVideoDecoder::Decode(
}
key_frame_required_ = false;
}
if (inputImage._length == 0) {
if (inputImage.size() == 0) {
return WEBRTC_VIDEO_CODEC_ERROR;
}
@ -476,35 +476,34 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
RTC_CHECK(buffer) << "Indirect buffer??";
size_t buffer_capacity =
rtc::dchecked_cast<size_t>(jni->GetDirectBufferCapacity(j_input_buffer));
if (CheckException(jni) || buffer_capacity < inputImage._length) {
ALOGE << "Input frame size " << inputImage._length
if (CheckException(jni) || buffer_capacity < inputImage.size()) {
ALOGE << "Input frame size " << inputImage.size()
<< " is bigger than buffer size " << buffer_capacity;
return ProcessHWErrorOnCodecThread();
}
jlong presentation_timestamp_us = static_cast<jlong>(
static_cast<int64_t>(frames_received_) * 1000000 / codec_.maxFramerate);
memcpy(buffer, inputImage._buffer, inputImage._length);
memcpy(buffer, inputImage.data(), inputImage.size());
if (frames_decoded_ < frames_decoded_logged_) {
ALOGD << "Decoder frame in # " << frames_received_
<< ". Type: " << inputImage._frameType << ". Buffer # "
<< j_input_buffer_index
<< ". TS: " << presentation_timestamp_us / 1000
<< ". Size: " << inputImage._length;
<< ". Size: " << inputImage.size();
}
// Save input image timestamps for later output.
frames_received_++;
current_bytes_ += inputImage._length;
current_bytes_ += inputImage.size();
absl::optional<uint8_t> qp;
if (codecType_ == kVideoCodecVP8) {
int qp_int;
if (vp8::GetQp(inputImage._buffer, inputImage._length, &qp_int)) {
if (vp8::GetQp(inputImage.data(), inputImage.size(), &qp_int)) {
qp = qp_int;
}
} else if (codecType_ == kVideoCodecH264) {
h264_bitstream_parser_.ParseBitstream(inputImage._buffer,
inputImage._length);
h264_bitstream_parser_.ParseBitstream(inputImage.data(), inputImage.size());
int qp_int;
if (h264_bitstream_parser_.GetLastSliceQp(&qp_int)) {
qp = qp_int;
@ -515,7 +514,7 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
// Feed input to decoder.
bool success = Java_MediaCodecVideoDecoder_queueInputBuffer(
jni, j_media_codec_video_decoder_, j_input_buffer_index,
static_cast<int>(inputImage._length), presentation_timestamp_us,
static_cast<int>(inputImage.size()), presentation_timestamp_us,
static_cast<int64_t>(inputImage.Timestamp()), inputImage.ntp_time_ms_);
if (CheckException(jni) || !success) {
ALOGE << "queueInputBuffer error";

View File

@ -1047,7 +1047,7 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
if (codec_type == kVideoCodecVP8 || codec_type == kVideoCodecVP9) {
header.VerifyAndAllocateFragmentationHeader(1);
header.fragmentationOffset[0] = 0;
header.fragmentationLength[0] = image->_length;
header.fragmentationLength[0] = image->size();
header.fragmentationPlType[0] = 0;
header.fragmentationTimeDiff[0] = 0;
if (codec_type == kVideoCodecVP8) {

View File

@ -27,8 +27,8 @@ ScopedJavaLocalRef<jobject> NativeToJavaFrameType(JNIEnv* env,
ScopedJavaLocalRef<jobject> NativeToJavaEncodedImage(
JNIEnv* jni,
const EncodedImage& image) {
ScopedJavaLocalRef<jobject> buffer =
NewDirectByteBuffer(jni, image._buffer, image._length);
ScopedJavaLocalRef<jobject> buffer = NewDirectByteBuffer(
jni, const_cast<uint8_t*>(image.data()), image.size());
ScopedJavaLocalRef<jobject> frame_type =
NativeToJavaFrameType(jni, image._frameType);
ScopedJavaLocalRef<jobject> qp;

View File

@ -237,21 +237,21 @@ absl::optional<uint8_t> VideoDecoderWrapper::ParseQP(
switch (codec_settings_.codecType) {
case kVideoCodecVP8: {
int qp_int;
if (vp8::GetQp(input_image._buffer, input_image._length, &qp_int)) {
if (vp8::GetQp(input_image.data(), input_image.size(), &qp_int)) {
qp = qp_int;
}
break;
}
case kVideoCodecVP9: {
int qp_int;
if (vp9::GetQp(input_image._buffer, input_image._length, &qp_int)) {
if (vp9::GetQp(input_image.data(), input_image.size(), &qp_int)) {
qp = qp_int;
}
break;
}
case kVideoCodecH264: {
h264_bitstream_parser_.ParseBitstream(input_image._buffer,
input_image._length);
h264_bitstream_parser_.ParseBitstream(input_image.data(),
input_image.size());
int qp_int;
if (h264_bitstream_parser_.GetLastSliceQp(&qp_int)) {
qp = qp_int;

View File

@ -17,9 +17,9 @@
- (instancetype)initWithNativeEncodedImage:(webrtc::EncodedImage)encodedImage {
if (self = [super init]) {
// Wrap the buffer in NSData without copying, do not take ownership.
self.buffer = [NSData dataWithBytesNoCopy:encodedImage._buffer
length:encodedImage._length
freeWhenDone:NO];
self.buffer = [NSData dataWithBytesNoCopy:encodedImage.data()
length:encodedImage.size()
freeWhenDone:NO];
self.encodedWidth = rtc::dchecked_cast<int32_t>(encodedImage._encodedWidth);
self.encodedHeight = rtc::dchecked_cast<int32_t>(encodedImage._encodedHeight);
self.timeStamp = encodedImage.Timestamp();