Delete RawVideoType enum, use the VideoType enum instead.

BUG=webrtc:7385

Review-Url: https://codereview.webrtc.org/2765243002
Cr-Commit-Position: refs/heads/master@{#17930}
This commit is contained in:
nisse
2017-04-28 07:18:05 -07:00
committed by Commit bot
parent aec49d2b49
commit eb44b39a21
31 changed files with 261 additions and 309 deletions

View File

@ -107,8 +107,8 @@ static void RtpFragmentize(EncodedImage* encoded_image,
// should be more than enough to hold any encoded data of future frames of
// the same size (avoiding possible future reallocation due to variations in
// required size).
encoded_image->_size =
CalcBufferSize(kI420, frame_buffer.width(), frame_buffer.height());
encoded_image->_size = CalcBufferSize(
VideoType::kI420, frame_buffer.width(), frame_buffer.height());
if (encoded_image->_size < required_size) {
// Encoded data > unencoded data. Allocate required bytes.
LOG(LS_WARNING) << "Encoding produced more bytes than the original image "
@ -254,8 +254,8 @@ int32_t H264EncoderImpl::InitEncode(const VideoCodec* codec_settings,
&video_format);
// Initialize encoded image. Default buffer size: size of unencoded data.
encoded_image_._size =
CalcBufferSize(kI420, codec_settings->width, codec_settings->height);
encoded_image_._size = CalcBufferSize(VideoType::kI420, codec_settings->width,
codec_settings->height);
encoded_image_._buffer = new uint8_t[encoded_image_._size];
encoded_image_buffer_.reset(encoded_image_._buffer);
encoded_image_._completeFrame = true;

View File

@ -57,9 +57,9 @@ int I420Encoder::InitEncode(const VideoCodec* codecSettings,
_encodedImage._buffer = NULL;
_encodedImage._size = 0;
}
const size_t newSize =
CalcBufferSize(kI420, codecSettings->width, codecSettings->height) +
kI420HeaderSize;
const size_t newSize = CalcBufferSize(VideoType::kI420, codecSettings->width,
codecSettings->height) +
kI420HeaderSize;
uint8_t* newBuffer = new uint8_t[newSize];
if (newBuffer == NULL) {
return WEBRTC_VIDEO_CODEC_MEMORY;
@ -96,9 +96,9 @@ int I420Encoder::Encode(const VideoFrame& inputImage,
return WEBRTC_VIDEO_CODEC_ERR_SIZE;
}
size_t req_length =
CalcBufferSize(kI420, inputImage.width(), inputImage.height()) +
kI420HeaderSize;
size_t req_length = CalcBufferSize(VideoType::kI420, inputImage.width(),
inputImage.height()) +
kI420HeaderSize;
if (_encodedImage._size > req_length) {
// Reallocate buffer.
delete[] _encodedImage._buffer;
@ -193,7 +193,8 @@ int I420Decoder::Decode(const EncodedImage& inputImage,
_height = height;
// Verify that the available length is sufficient:
size_t req_length = CalcBufferSize(kI420, _width, _height) + kI420HeaderSize;
size_t req_length =
CalcBufferSize(VideoType::kI420, _width, _height) + kI420HeaderSize;
if (req_length > inputImage._length) {
return WEBRTC_VIDEO_CODEC_ERROR;
@ -204,7 +205,7 @@ int I420Decoder::Decode(const EncodedImage& inputImage,
I420Buffer::Create(_width, _height, _width, half_width, half_width);
// Converting from raw buffer I420Buffer.
int ret = ConvertToI420(kI420, buffer, 0, 0, _width, _height, 0,
int ret = ConvertToI420(VideoType::kI420, buffer, 0, 0, _width, _height, 0,
kVideoRotation_0, frame_buffer.get());
if (ret < 0) {
return WEBRTC_VIDEO_CODEC_MEMORY;

View File

@ -251,7 +251,8 @@ bool VideoProcessorImpl::ProcessFrame(int frame_number) {
}
if (source_frame_writer_) {
size_t length = CalcBufferSize(kI420, buffer->width(), buffer->height());
size_t length =
CalcBufferSize(VideoType::kI420, buffer->width(), buffer->height());
rtc::Buffer extracted_buffer(length);
int extracted_length =
ExtractBuffer(buffer, length, extracted_buffer.data());
@ -467,14 +468,15 @@ void VideoProcessorImpl::FrameDecoded(const VideoFrame& image) {
scaled_buffer->ScaleFrom(*image.video_frame_buffer());
}
size_t length =
CalcBufferSize(kI420, scaled_buffer->width(), scaled_buffer->height());
size_t length = CalcBufferSize(VideoType::kI420, scaled_buffer->width(),
scaled_buffer->height());
extracted_buffer.SetSize(length);
extracted_length =
ExtractBuffer(scaled_buffer, length, extracted_buffer.data());
} else {
// No resize.
size_t length = CalcBufferSize(kI420, image.width(), image.height());
size_t length =
CalcBufferSize(VideoType::kI420, image.width(), image.height());
extracted_buffer.SetSize(length);
if (image.video_frame_buffer()->native_handle()) {
extracted_length =

View File

@ -248,7 +248,7 @@ class VideoProcessorIntegrationTest : public testing::Test {
test::OutputPath(), "videoprocessor_integrationtest");
config_.frame_length_in_bytes =
CalcBufferSize(kI420, process.width, process.height);
CalcBufferSize(VideoType::kI420, process.width, process.height);
config_.verbose = process.verbose_logging;
config_.use_single_core = process.use_single_core;
// Key frame interval and packet loss are set for each test.

View File

@ -198,7 +198,7 @@ class TestVp8Impl : public ::testing::Test {
int64_t startTime = rtc::TimeMillis();
while (rtc::TimeMillis() - startTime < kMaxWaitDecTimeMs) {
if (decode_complete_callback_->DecodeComplete()) {
return CalcBufferSize(kI420, decoded_frame_->width(),
return CalcBufferSize(VideoType::kI420, decoded_frame_->width(),
decoded_frame_->height());
}
}

View File

@ -368,7 +368,7 @@ int VP8EncoderImpl::InitEncode(const VideoCodec* inst,
delete[] encoded_images_[i]._buffer;
}
encoded_images_[i]._size =
CalcBufferSize(kI420, codec_.width, codec_.height);
CalcBufferSize(VideoType::kI420, codec_.width, codec_.height);
encoded_images_[i]._buffer = new uint8_t[encoded_images_[i]._size];
encoded_images_[i]._completeFrame = true;
}

View File

@ -284,7 +284,8 @@ int VP9EncoderImpl::InitEncode(const VideoCodec* inst,
if (encoded_image_._buffer != NULL) {
delete[] encoded_image_._buffer;
}
encoded_image_._size = CalcBufferSize(kI420, codec_.width, codec_.height);
encoded_image_._size =
CalcBufferSize(VideoType::kI420, codec_.width, codec_.height);
encoded_image_._buffer = new uint8_t[encoded_image_._size];
encoded_image_._completeFrame = true;
// Creating a wrapper to the image - setting image data to NULL. Actual