Revert of Delete unused and almost unused frame-related methods. (patchset #12 id:220001 of https://codereview.webrtc.org/2065733003/ )

Reason for revert:
Breaks downstream applications which inherits webrtc::VideoFrameBuffer and tries to override deleted methods data(), stride() and MutableData().

Original issue's description:
> Delete unused and almost unused frame-related methods.
>
> webrtc::VideoFrame::set_video_frame_buffer
> webrtc::VideoFrame::ConvertNativeToI420Frame
>
> cricket::WebRtcVideoFrame::InitToBlack
>
> VideoFrameBuffer::data
> VideoFrameBuffer::stride
> VideoFrameBuffer::MutableData
>
> TBR=tkchin@webrtc.org # Refactoring affecting RTCVideoFrame
> BUG=webrtc:5682
>
> Committed: https://crrev.com/76270de4bc2dac188f10f805e6e2fb86693ef864
> Cr-Commit-Position: refs/heads/master@{#13183}

TBR=perkj@webrtc.org,pbos@webrtc.org,marpan@webrtc.org,tkchin@webrtc.org
# Skipping CQ checks because original CL landed less than 1 days ago.
NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
BUG=webrtc:5682

Review-Url: https://codereview.webrtc.org/2076113002
Cr-Commit-Position: refs/heads/master@{#13184}
This commit is contained in:
nisse
2016-06-17 02:55:14 -07:00
committed by Commit bot
parent 76270de4bc
commit 72e735d386
16 changed files with 174 additions and 92 deletions

View File

@ -794,12 +794,12 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(
libyuv::I420Copy(y_ptr, stride,
u_ptr, uv_stride,
v_ptr, uv_stride,
frame_buffer->MutableDataY(),
frame_buffer->StrideY(),
frame_buffer->MutableDataU(),
frame_buffer->StrideU(),
frame_buffer->MutableDataV(),
frame_buffer->StrideV(),
frame_buffer->MutableData(webrtc::kYPlane),
frame_buffer->stride(webrtc::kYPlane),
frame_buffer->MutableData(webrtc::kUPlane),
frame_buffer->stride(webrtc::kUPlane),
frame_buffer->MutableData(webrtc::kVPlane),
frame_buffer->stride(webrtc::kVPlane),
width, height);
} else {
// All other supported formats are nv12.
@ -808,12 +808,12 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(
libyuv::NV12ToI420(
y_ptr, stride,
uv_ptr, stride,
frame_buffer->MutableDataY(),
frame_buffer->StrideY(),
frame_buffer->MutableDataU(),
frame_buffer->StrideU(),
frame_buffer->MutableDataV(),
frame_buffer->StrideV(),
frame_buffer->MutableData(webrtc::kYPlane),
frame_buffer->stride(webrtc::kYPlane),
frame_buffer->MutableData(webrtc::kUPlane),
frame_buffer->stride(webrtc::kUPlane),
frame_buffer->MutableData(webrtc::kVPlane),
frame_buffer->stride(webrtc::kVPlane),
width, height);
}
// Return output byte buffer back to codec.

View File

@ -670,8 +670,7 @@ int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
}
consecutive_full_queue_frame_drops_ = 0;
rtc::scoped_refptr<webrtc::VideoFrameBuffer> input_buffer(
frame.video_frame_buffer());
VideoFrame input_frame = frame;
if (scale_) {
// Check framerate before spatial resolution change.
quality_scaler_.OnEncodeFrame(frame.width(), frame.height());
@ -679,22 +678,21 @@ int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
quality_scaler_.GetScaledResolution();
if (scaled_resolution.width != frame.width() ||
scaled_resolution.height != frame.height()) {
if (input_buffer->native_handle() != nullptr) {
input_buffer = static_cast<AndroidTextureBuffer*>(input_buffer.get())
->CropScaleAndRotate(frame.width(), frame.height(),
0, 0,
scaled_resolution.width,
scaled_resolution.height,
webrtc::kVideoRotation_0);
if (frame.video_frame_buffer()->native_handle() != nullptr) {
rtc::scoped_refptr<webrtc::VideoFrameBuffer> scaled_buffer(
static_cast<AndroidTextureBuffer*>(
frame.video_frame_buffer().get())->CropScaleAndRotate(
frame.width(), frame.height(), 0, 0,
scaled_resolution.width, scaled_resolution.height,
webrtc::kVideoRotation_0));
input_frame.set_video_frame_buffer(scaled_buffer);
} else {
input_buffer = quality_scaler_.GetScaledBuffer(input_buffer);
input_frame.set_video_frame_buffer(
quality_scaler_.GetScaledBuffer(frame.video_frame_buffer()));
}
}
}
VideoFrame input_frame(input_buffer, frame.timestamp(),
frame.render_time_ms(), frame.rotation());
if (!MaybeReconfigureEncoderOnCodecThread(input_frame)) {
ALOGE << "Failed to reconfigure encoder.";
return WEBRTC_VIDEO_CODEC_ERROR;

View File

@ -215,10 +215,10 @@ void AndroidVideoCapturerJni::OnMemoryBufferFrame(void* video_frame,
libyuv::NV12ToI420Rotate(
y_plane + width * crop_y + crop_x, width,
uv_plane + uv_width * crop_y + crop_x, width,
buffer->MutableDataY(), buffer->StrideY(),
buffer->MutableData(webrtc::kYPlane), buffer->stride(webrtc::kYPlane),
// Swap U and V, since we have NV21, not NV12.
buffer->MutableDataV(), buffer->StrideV(),
buffer->MutableDataU(), buffer->StrideU(),
buffer->MutableData(webrtc::kVPlane), buffer->stride(webrtc::kVPlane),
buffer->MutableData(webrtc::kUPlane), buffer->stride(webrtc::kUPlane),
crop_width, crop_height, static_cast<libyuv::RotationMode>(
capturer_->apply_rotation() ? rotation : 0));

View File

@ -46,9 +46,9 @@ CoreVideoFrameBuffer::NativeToI420Buffer() {
int src_uv_stride = CVPixelBufferGetBytesPerRowOfPlane(pixel_buffer_, 1);
int ret = libyuv::NV12ToI420(
src_y, src_y_stride, src_uv, src_uv_stride,
buffer->MutableDataY(), buffer->StrideY(),
buffer->MutableDataU(), buffer->StrideU(),
buffer->MutableDataV(), buffer->StrideV(),
buffer->MutableData(webrtc::kYPlane), buffer->stride(webrtc::kYPlane),
buffer->MutableData(webrtc::kUPlane), buffer->stride(webrtc::kUPlane),
buffer->MutableData(webrtc::kVPlane), buffer->stride(webrtc::kVPlane),
width, height);
CVPixelBufferUnlockBaseAddress(pixel_buffer_, kCVPixelBufferLock_ReadOnly);
if (ret) {

View File

@ -48,20 +48,27 @@ class VideoFrameBuffer : public rtc::RefCountInterface {
// Returns pointer to the pixel data for a given plane. The memory is owned by
// the VideoFrameBuffer object and must not be freed by the caller.
virtual const uint8_t* DataY() const = 0;
virtual const uint8_t* DataU() const = 0;
virtual const uint8_t* DataV() const = 0;
virtual const uint8_t* DataY() const;
virtual const uint8_t* DataU() const;
virtual const uint8_t* DataV() const;
// Deprecated method.
// TODO(nisse): Delete after all users are updated.
virtual const uint8_t* data(PlaneType type) const;
// TODO(nisse): Move MutableData methods to the I420Buffer subclass.
// Non-const data access.
virtual uint8_t* MutableDataY();
virtual uint8_t* MutableDataU();
virtual uint8_t* MutableDataV();
// Deprecated method. TODO(nisse): Delete after all users are updated.
virtual uint8_t* MutableData(PlaneType type);
// Returns the number of bytes between successive rows for a given plane.
virtual int StrideY() const = 0;
virtual int StrideU() const = 0;
virtual int StrideV() const = 0;
virtual int StrideY() const;
virtual int StrideU() const;
virtual int StrideV() const;
// Deprecated method. TODO(nisse): Delete after all users are updated.
virtual int stride(PlaneType type) const;
// Return the handle of the underlying video frame. This is used when the
// frame is backed by a texture.

View File

@ -155,6 +155,20 @@ const rtc::scoped_refptr<VideoFrameBuffer>& VideoFrame::video_frame_buffer()
return video_frame_buffer_;
}
void VideoFrame::set_video_frame_buffer(
const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer) {
RTC_DCHECK(buffer);
video_frame_buffer_ = buffer;
}
VideoFrame VideoFrame::ConvertNativeToI420Frame() const {
RTC_DCHECK(video_frame_buffer_->native_handle());
VideoFrame frame;
frame.ShallowCopy(*this);
frame.set_video_frame_buffer(video_frame_buffer_->NativeToI420Buffer());
return frame;
}
size_t EncodedImage::GetBufferPaddingBytes(VideoCodecType codec_type) {
switch (codec_type) {
case kVideoCodecVP8:

View File

@ -30,6 +30,54 @@ int I420DataSize(int height, int stride_y, int stride_u, int stride_v) {
} // namespace
const uint8_t* VideoFrameBuffer::data(PlaneType type) const {
switch (type) {
case kYPlane:
return DataY();
case kUPlane:
return DataU();
case kVPlane:
return DataV();
default:
RTC_NOTREACHED();
return nullptr;
}
}
const uint8_t* VideoFrameBuffer::DataY() const {
return data(kYPlane);
}
const uint8_t* VideoFrameBuffer::DataU() const {
return data(kUPlane);
}
const uint8_t* VideoFrameBuffer::DataV() const {
return data(kVPlane);
}
int VideoFrameBuffer::stride(PlaneType type) const {
switch (type) {
case kYPlane:
return StrideY();
case kUPlane:
return StrideU();
case kVPlane:
return StrideV();
default:
RTC_NOTREACHED();
return 0;
}
}
int VideoFrameBuffer::StrideY() const {
return stride(kYPlane);
}
int VideoFrameBuffer::StrideU() const {
return stride(kUPlane);
}
int VideoFrameBuffer::StrideV() const {
return stride(kVPlane);
}
uint8_t* VideoFrameBuffer::MutableDataY() {
RTC_NOTREACHED();
return nullptr;
@ -43,6 +91,20 @@ uint8_t* VideoFrameBuffer::MutableDataV() {
return nullptr;
}
uint8_t* VideoFrameBuffer::MutableData(PlaneType type) {
switch (type) {
case kYPlane:
return MutableDataY();
case kUPlane:
return MutableDataU();
case kVPlane:
return MutableDataV();
default:
RTC_NOTREACHED();
return nullptr;
}
}
VideoFrameBuffer::~VideoFrameBuffer() {}
I420Buffer::I420Buffer(int width, int height)

View File

@ -65,6 +65,20 @@ bool WebRtcVideoFrame::Init(const CapturedFrame* frame, int dw, int dh,
frame->rotation, apply_rotation);
}
// TODO(nisse): Deprecated, delete as soon as Chrome is updated.
bool WebRtcVideoFrame::InitToBlack(int w, int h,
int64_t time_stamp_ns) {
rtc::scoped_refptr<webrtc::I420Buffer> buffer(
new rtc::RefCountedObject<webrtc::I420Buffer>(w, h));
buffer->SetToBlack();
video_frame_buffer_ = new rtc::RefCountedObject<webrtc::I420Buffer>(w, h);
SetTimeStamp(time_stamp_ns);
rotation_ = webrtc::kVideoRotation_0;
return true;
}
int WebRtcVideoFrame::width() const {
return video_frame_buffer_ ? video_frame_buffer_->width() : 0;
}

View File

@ -63,6 +63,9 @@ class WebRtcVideoFrame : public VideoFrame {
void InitToEmptyBuffer(int w, int h);
void InitToEmptyBuffer(int w, int h, int64_t time_stamp_ns);
// TODO(nisse): Deprecated, delete as soon as Chrome is updated.
bool InitToBlack(int w, int h, int64_t time_stamp_ns);
int width() const override;
int height() const override;

View File

@ -123,16 +123,11 @@ int H264DecoderImpl::AVGetBuffer2(
// The video frame is stored in |video_frame|. |av_frame| is FFmpeg's version
// of a video frame and will be set up to reference |video_frame|'s buffers.
// TODO(nisse): The VideoFrame's timestamp and rotation info is not used.
// Refactor to do not use a VideoFrame object at all.
VideoFrame* video_frame = new VideoFrame();
// FFmpeg expects the initial allocation to be zero-initialized according to
// http://crbug.com/390941. Our pool is set up to zero-initialize new buffers.
VideoFrame* video_frame = new VideoFrame(
decoder->pool_.CreateBuffer(width, height),
0 /* timestamp */, 0 /* render_time_ms */, kVideoRotation_0);
video_frame->set_video_frame_buffer(
decoder->pool_.CreateBuffer(width, height));
// DCHECK that we have a continuous buffer as is required.
RTC_DCHECK_EQ(video_frame->video_frame_buffer()->DataU(),
video_frame->video_frame_buffer()->DataY() +
@ -360,30 +355,22 @@ int32_t H264DecoderImpl::Decode(const EncodedImage& input_image,
video_frame->video_frame_buffer()->DataV());
video_frame->set_timestamp(input_image._timeStamp);
int32_t ret;
// The decoded image may be larger than what is supposed to be visible, see
// |AVGetBuffer2|'s use of |avcodec_align_dimensions|. This crops the image
// without copying the underlying buffer.
rtc::scoped_refptr<VideoFrameBuffer> buf = video_frame->video_frame_buffer();
if (av_frame_->width != buf->width() || av_frame_->height != buf->height()) {
rtc::scoped_refptr<VideoFrameBuffer> cropped_buf(
video_frame->set_video_frame_buffer(
new rtc::RefCountedObject<WrappedI420Buffer>(
av_frame_->width, av_frame_->height,
buf->DataY(), buf->StrideY(),
buf->DataU(), buf->StrideU(),
buf->DataV(), buf->StrideV(),
rtc::KeepRefUntilDone(buf)));
VideoFrame cropped_frame(
cropped_buf, video_frame->timestamp(), video_frame->render_time_ms(),
video_frame->rotation());
// TODO(nisse): Timestamp and rotation are all zero here. Change decoder
// interface to pass a VideoFrameBuffer instead of a VideoFrame?
ret = decoded_image_callback_->Decoded(cropped_frame);
} else {
// Return decoded frame.
ret = decoded_image_callback_->Decoded(*video_frame);
}
// Return decoded frame.
int32_t ret = decoded_image_callback_->Decoded(*video_frame);
// Stop referencing it, possibly freeing |video_frame|.
av_frame_unref(av_frame_.get());
video_frame = nullptr;

View File

@ -960,9 +960,9 @@ int VP9DecoderImpl::ReturnFrame(const vpx_image_t* img, uint32_t timestamp) {
// release |img_buffer|.
rtc::KeepRefUntilDone(img_buffer)));
VideoFrame decoded_image(img_wrapped_buffer, timestamp,
0 /* render_time_ms */, webrtc::kVideoRotation_0);
VideoFrame decoded_image;
decoded_image.set_video_frame_buffer(img_wrapped_buffer);
decoded_image.set_timestamp(timestamp);
int ret = decode_complete_callback_->Decoded(decoded_image);
if (ret != 0)
return ret;

View File

@ -288,17 +288,9 @@ int32_t VideoSender::AddVideoFrame(const VideoFrame& videoFrame,
!_encoder->SupportsNativeHandle()) {
// This module only supports software encoding.
// TODO(pbos): Offload conversion from the encoder thread.
rtc::scoped_refptr<VideoFrameBuffer> converted_buffer(
converted_frame.video_frame_buffer()->NativeToI420Buffer());
if (!converted_buffer) {
LOG(LS_ERROR) << "Frame conversion failed, dropping frame.";
return VCM_PARAMETER_ERROR;
}
converted_frame = VideoFrame(converted_buffer,
converted_frame.timestamp(),
converted_frame.render_time_ms(),
converted_frame.rotation());
converted_frame = converted_frame.ConvertNativeToI420Frame();
RTC_CHECK(!converted_frame.IsZeroSize())
<< "Frame conversion failed, won't be able to encode frame.";
}
int32_t ret =
_encoder->Encode(converted_frame, codecSpecificInfo, next_frame_types);

View File

@ -96,22 +96,19 @@ const VideoFrame* VPMFramePreprocessor::PreprocessFrame(
const VideoFrame* current_frame = &frame;
if (denoiser_) {
rtc::scoped_refptr<I420Buffer>* denoised_buffer = &denoised_buffer_[0];
rtc::scoped_refptr<I420Buffer>* denoised_buffer_prev = &denoised_buffer_[1];
rtc::scoped_refptr<I420Buffer>* denoised_frame = &denoised_buffer_[0];
rtc::scoped_refptr<I420Buffer>* denoised_frame_prev = &denoised_buffer_[1];
// Swap the buffer to save one memcpy in DenoiseFrame.
if (denoised_frame_toggle_) {
denoised_buffer = &denoised_buffer_[1];
denoised_buffer_prev = &denoised_buffer_[0];
denoised_frame = &denoised_buffer_[1];
denoised_frame_prev = &denoised_buffer_[0];
}
// Invert the flag.
denoised_frame_toggle_ ^= 1;
denoiser_->DenoiseFrame(current_frame->video_frame_buffer(),
denoised_buffer,
denoised_buffer_prev, true);
denoised_frame_ = VideoFrame(*denoised_buffer,
current_frame->timestamp(),
current_frame->render_time_ms(),
current_frame->rotation());
denoiser_->DenoiseFrame(current_frame->video_frame_buffer(), denoised_frame,
denoised_frame_prev, true);
denoised_frame_.ShallowCopy(*current_frame);
denoised_frame_.set_video_frame_buffer(*denoised_frame);
current_frame = &denoised_frame_;
}

View File

@ -58,10 +58,10 @@ int32_t VPMSimpleSpatialResampler::ResampleFrame(const VideoFrame& inFrame,
scaled_buffer->CropAndScaleFrom(inFrame.video_frame_buffer());
*outFrame = VideoFrame(scaled_buffer,
inFrame.timestamp(),
inFrame.render_time_ms(),
inFrame.rotation());
outFrame->set_video_frame_buffer(scaled_buffer);
// Setting time parameters to the output frame.
outFrame->set_timestamp(inFrame.timestamp());
outFrame->set_render_time_ms(inFrame.render_time_ms());
return VPM_OK;
}

View File

@ -40,42 +40,42 @@
if (!self.i420Buffer) {
return nullptr;
}
return self.i420Buffer->DataY();
return self.i420Buffer->data(webrtc::kYPlane);
}
- (const uint8_t *)uPlane {
if (!self.i420Buffer) {
return nullptr;
}
return self.i420Buffer->DataU();
return self.i420Buffer->data(webrtc::kUPlane);
}
- (const uint8_t *)vPlane {
if (!self.i420Buffer) {
return nullptr;
}
return self.i420Buffer->DataV();
return self.i420Buffer->data(webrtc::kVPlane);
}
- (int32_t)yPitch {
if (!self.i420Buffer) {
return 0;
}
return self.i420Buffer->StrideY();
return self.i420Buffer->stride(webrtc::kYPlane);
}
- (int32_t)uPitch {
if (!self.i420Buffer) {
return 0;
}
return self.i420Buffer->StrideU();
return self.i420Buffer->stride(webrtc::kUPlane);
}
- (int32_t)vPitch {
if (!self.i420Buffer) {
return 0;
}
return self.i420Buffer->StrideV();
return self.i420Buffer->stride(webrtc::kVPlane);
}
- (int64_t)timeStamp {

View File

@ -121,6 +121,14 @@ class VideoFrame {
const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& video_frame_buffer()
const;
// Set the underlying buffer.
void set_video_frame_buffer(
const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer);
// Convert native-handle frame to memory-backed I420 frame. Should not be
// called on a non-native-handle frame.
VideoFrame ConvertNativeToI420Frame() const;
// Return true if the frame is stored in a texture.
bool is_texture() {
return video_frame_buffer() &&