Partial reland of Delete unused and almost unused frame-related methods. (patchset #1 id:1 of https://codereview.webrtc.org/2076113002/ )

Reason for revert:
Taking out the VideoFrameBuffer changes which broke downstream.

Original issue's description:
> Revert of Delete unused and almost unused frame-related methods. (patchset #12 id:220001 of https://codereview.webrtc.org/2065733003/ )
>
> Reason for revert:
> Breaks downstream applications which inherits webrtc::VideoFrameBuffer and tries to override deleted methods data(), stride() and MutableData().
>
> Original issue's description:
> > Delete unused and almost unused frame-related methods.
> >
> > webrtc::VideoFrame::set_video_frame_buffer
> > webrtc::VideoFrame::ConvertNativeToI420Frame
> >
> > cricket::WebRtcVideoFrame::InitToBlack
> >
> > VideoFrameBuffer::data
> > VideoFrameBuffer::stride
> > VideoFrameBuffer::MutableData
> >
> > TBR=tkchin@webrtc.org # Refactoring affecting RTCVideoFrame
> > BUG=webrtc:5682
> >
> > Committed: https://crrev.com/76270de4bc2dac188f10f805e6e2fb86693ef864
> > Cr-Commit-Position: refs/heads/master@{#13183}
>
> TBR=perkj@webrtc.org,pbos@webrtc.org,marpan@webrtc.org,tkchin@webrtc.org
> # Skipping CQ checks because original CL landed less than 1 days ago.
> NOPRESUBMIT=true
> NOTREECHECKS=true
> NOTRY=true
> BUG=webrtc:5682
>
> Committed: https://crrev.com/72e735d3867a0fd6ab7e4d0761c7ba5f6c068617
> Cr-Commit-Position: refs/heads/master@{#13184}

TBR=perkj@webrtc.org,pbos@webrtc.org,marpan@webrtc.org,tkchin@webrtc.org
# Skipping CQ checks because original CL landed less than 1 days ago.
NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
BUG=webrtc:5682

Review-Url: https://codereview.webrtc.org/2076123002
Cr-Commit-Position: refs/heads/master@{#13189}
This commit is contained in:
nisse
2016-06-17 05:03:04 -07:00
committed by Commit bot
parent fd634c43e9
commit ca6d5d1c9f
14 changed files with 86 additions and 99 deletions

View File

@ -794,12 +794,12 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(
libyuv::I420Copy(y_ptr, stride,
u_ptr, uv_stride,
v_ptr, uv_stride,
frame_buffer->MutableData(webrtc::kYPlane),
frame_buffer->stride(webrtc::kYPlane),
frame_buffer->MutableData(webrtc::kUPlane),
frame_buffer->stride(webrtc::kUPlane),
frame_buffer->MutableData(webrtc::kVPlane),
frame_buffer->stride(webrtc::kVPlane),
frame_buffer->MutableDataY(),
frame_buffer->StrideY(),
frame_buffer->MutableDataU(),
frame_buffer->StrideU(),
frame_buffer->MutableDataV(),
frame_buffer->StrideV(),
width, height);
} else {
// All other supported formats are nv12.
@ -808,12 +808,12 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(
libyuv::NV12ToI420(
y_ptr, stride,
uv_ptr, stride,
frame_buffer->MutableData(webrtc::kYPlane),
frame_buffer->stride(webrtc::kYPlane),
frame_buffer->MutableData(webrtc::kUPlane),
frame_buffer->stride(webrtc::kUPlane),
frame_buffer->MutableData(webrtc::kVPlane),
frame_buffer->stride(webrtc::kVPlane),
frame_buffer->MutableDataY(),
frame_buffer->StrideY(),
frame_buffer->MutableDataU(),
frame_buffer->StrideU(),
frame_buffer->MutableDataV(),
frame_buffer->StrideV(),
width, height);
}
// Return output byte buffer back to codec.

View File

@ -670,7 +670,8 @@ int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
}
consecutive_full_queue_frame_drops_ = 0;
VideoFrame input_frame = frame;
rtc::scoped_refptr<webrtc::VideoFrameBuffer> input_buffer(
frame.video_frame_buffer());
if (scale_) {
// Check framerate before spatial resolution change.
quality_scaler_.OnEncodeFrame(frame.width(), frame.height());
@ -678,21 +679,22 @@ int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
quality_scaler_.GetScaledResolution();
if (scaled_resolution.width != frame.width() ||
scaled_resolution.height != frame.height()) {
if (frame.video_frame_buffer()->native_handle() != nullptr) {
rtc::scoped_refptr<webrtc::VideoFrameBuffer> scaled_buffer(
static_cast<AndroidTextureBuffer*>(
frame.video_frame_buffer().get())->CropScaleAndRotate(
frame.width(), frame.height(), 0, 0,
scaled_resolution.width, scaled_resolution.height,
webrtc::kVideoRotation_0));
input_frame.set_video_frame_buffer(scaled_buffer);
if (input_buffer->native_handle() != nullptr) {
input_buffer = static_cast<AndroidTextureBuffer*>(input_buffer.get())
->CropScaleAndRotate(frame.width(), frame.height(),
0, 0,
scaled_resolution.width,
scaled_resolution.height,
webrtc::kVideoRotation_0);
} else {
input_frame.set_video_frame_buffer(
quality_scaler_.GetScaledBuffer(frame.video_frame_buffer()));
input_buffer = quality_scaler_.GetScaledBuffer(input_buffer);
}
}
}
VideoFrame input_frame(input_buffer, frame.timestamp(),
frame.render_time_ms(), frame.rotation());
if (!MaybeReconfigureEncoderOnCodecThread(input_frame)) {
ALOGE << "Failed to reconfigure encoder.";
return WEBRTC_VIDEO_CODEC_ERROR;

View File

@ -215,10 +215,10 @@ void AndroidVideoCapturerJni::OnMemoryBufferFrame(void* video_frame,
libyuv::NV12ToI420Rotate(
y_plane + width * crop_y + crop_x, width,
uv_plane + uv_width * crop_y + crop_x, width,
buffer->MutableData(webrtc::kYPlane), buffer->stride(webrtc::kYPlane),
buffer->MutableDataY(), buffer->StrideY(),
// Swap U and V, since we have NV21, not NV12.
buffer->MutableData(webrtc::kVPlane), buffer->stride(webrtc::kVPlane),
buffer->MutableData(webrtc::kUPlane), buffer->stride(webrtc::kUPlane),
buffer->MutableDataV(), buffer->StrideV(),
buffer->MutableDataU(), buffer->StrideU(),
crop_width, crop_height, static_cast<libyuv::RotationMode>(
capturer_->apply_rotation() ? rotation : 0));

View File

@ -46,9 +46,9 @@ CoreVideoFrameBuffer::NativeToI420Buffer() {
int src_uv_stride = CVPixelBufferGetBytesPerRowOfPlane(pixel_buffer_, 1);
int ret = libyuv::NV12ToI420(
src_y, src_y_stride, src_uv, src_uv_stride,
buffer->MutableData(webrtc::kYPlane), buffer->stride(webrtc::kYPlane),
buffer->MutableData(webrtc::kUPlane), buffer->stride(webrtc::kUPlane),
buffer->MutableData(webrtc::kVPlane), buffer->stride(webrtc::kVPlane),
buffer->MutableDataY(), buffer->StrideY(),
buffer->MutableDataU(), buffer->StrideU(),
buffer->MutableDataV(), buffer->StrideV(),
width, height);
CVPixelBufferUnlockBaseAddress(pixel_buffer_, kCVPixelBufferLock_ReadOnly);
if (ret) {

View File

@ -155,20 +155,6 @@ const rtc::scoped_refptr<VideoFrameBuffer>& VideoFrame::video_frame_buffer()
return video_frame_buffer_;
}
void VideoFrame::set_video_frame_buffer(
const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer) {
RTC_DCHECK(buffer);
video_frame_buffer_ = buffer;
}
VideoFrame VideoFrame::ConvertNativeToI420Frame() const {
RTC_DCHECK(video_frame_buffer_->native_handle());
VideoFrame frame;
frame.ShallowCopy(*this);
frame.set_video_frame_buffer(video_frame_buffer_->NativeToI420Buffer());
return frame;
}
size_t EncodedImage::GetBufferPaddingBytes(VideoCodecType codec_type) {
switch (codec_type) {
case kVideoCodecVP8:

View File

@ -65,20 +65,6 @@ bool WebRtcVideoFrame::Init(const CapturedFrame* frame, int dw, int dh,
frame->rotation, apply_rotation);
}
// TODO(nisse): Deprecated, delete as soon as Chrome is updated.
bool WebRtcVideoFrame::InitToBlack(int w, int h,
int64_t time_stamp_ns) {
rtc::scoped_refptr<webrtc::I420Buffer> buffer(
new rtc::RefCountedObject<webrtc::I420Buffer>(w, h));
buffer->SetToBlack();
video_frame_buffer_ = new rtc::RefCountedObject<webrtc::I420Buffer>(w, h);
SetTimeStamp(time_stamp_ns);
rotation_ = webrtc::kVideoRotation_0;
return true;
}
int WebRtcVideoFrame::width() const {
return video_frame_buffer_ ? video_frame_buffer_->width() : 0;
}

View File

@ -63,9 +63,6 @@ class WebRtcVideoFrame : public VideoFrame {
void InitToEmptyBuffer(int w, int h);
void InitToEmptyBuffer(int w, int h, int64_t time_stamp_ns);
// TODO(nisse): Deprecated, delete as soon as Chrome is updated.
bool InitToBlack(int w, int h, int64_t time_stamp_ns);
int width() const override;
int height() const override;

View File

@ -123,11 +123,16 @@ int H264DecoderImpl::AVGetBuffer2(
// The video frame is stored in |video_frame|. |av_frame| is FFmpeg's version
// of a video frame and will be set up to reference |video_frame|'s buffers.
VideoFrame* video_frame = new VideoFrame();
// TODO(nisse): The VideoFrame's timestamp and rotation info is not used.
// Refactor to do not use a VideoFrame object at all.
// FFmpeg expects the initial allocation to be zero-initialized according to
// http://crbug.com/390941. Our pool is set up to zero-initialize new buffers.
video_frame->set_video_frame_buffer(
decoder->pool_.CreateBuffer(width, height));
VideoFrame* video_frame = new VideoFrame(
decoder->pool_.CreateBuffer(width, height),
0 /* timestamp */, 0 /* render_time_ms */, kVideoRotation_0);
// DCHECK that we have a continuous buffer as is required.
RTC_DCHECK_EQ(video_frame->video_frame_buffer()->DataU(),
video_frame->video_frame_buffer()->DataY() +
@ -355,22 +360,30 @@ int32_t H264DecoderImpl::Decode(const EncodedImage& input_image,
video_frame->video_frame_buffer()->DataV());
video_frame->set_timestamp(input_image._timeStamp);
int32_t ret;
// The decoded image may be larger than what is supposed to be visible, see
// |AVGetBuffer2|'s use of |avcodec_align_dimensions|. This crops the image
// without copying the underlying buffer.
rtc::scoped_refptr<VideoFrameBuffer> buf = video_frame->video_frame_buffer();
if (av_frame_->width != buf->width() || av_frame_->height != buf->height()) {
video_frame->set_video_frame_buffer(
rtc::scoped_refptr<VideoFrameBuffer> cropped_buf(
new rtc::RefCountedObject<WrappedI420Buffer>(
av_frame_->width, av_frame_->height,
buf->DataY(), buf->StrideY(),
buf->DataU(), buf->StrideU(),
buf->DataV(), buf->StrideV(),
rtc::KeepRefUntilDone(buf)));
}
VideoFrame cropped_frame(
cropped_buf, video_frame->timestamp(), video_frame->render_time_ms(),
video_frame->rotation());
// TODO(nisse): Timestamp and rotation are all zero here. Change decoder
// interface to pass a VideoFrameBuffer instead of a VideoFrame?
ret = decoded_image_callback_->Decoded(cropped_frame);
} else {
// Return decoded frame.
int32_t ret = decoded_image_callback_->Decoded(*video_frame);
ret = decoded_image_callback_->Decoded(*video_frame);
}
// Stop referencing it, possibly freeing |video_frame|.
av_frame_unref(av_frame_.get());
video_frame = nullptr;

View File

@ -960,9 +960,9 @@ int VP9DecoderImpl::ReturnFrame(const vpx_image_t* img, uint32_t timestamp) {
// release |img_buffer|.
rtc::KeepRefUntilDone(img_buffer)));
VideoFrame decoded_image;
decoded_image.set_video_frame_buffer(img_wrapped_buffer);
decoded_image.set_timestamp(timestamp);
VideoFrame decoded_image(img_wrapped_buffer, timestamp,
0 /* render_time_ms */, webrtc::kVideoRotation_0);
int ret = decode_complete_callback_->Decoded(decoded_image);
if (ret != 0)
return ret;

View File

@ -288,9 +288,17 @@ int32_t VideoSender::AddVideoFrame(const VideoFrame& videoFrame,
!_encoder->SupportsNativeHandle()) {
// This module only supports software encoding.
// TODO(pbos): Offload conversion from the encoder thread.
converted_frame = converted_frame.ConvertNativeToI420Frame();
RTC_CHECK(!converted_frame.IsZeroSize())
<< "Frame conversion failed, won't be able to encode frame.";
rtc::scoped_refptr<VideoFrameBuffer> converted_buffer(
converted_frame.video_frame_buffer()->NativeToI420Buffer());
if (!converted_buffer) {
LOG(LS_ERROR) << "Frame conversion failed, dropping frame.";
return VCM_PARAMETER_ERROR;
}
converted_frame = VideoFrame(converted_buffer,
converted_frame.timestamp(),
converted_frame.render_time_ms(),
converted_frame.rotation());
}
int32_t ret =
_encoder->Encode(converted_frame, codecSpecificInfo, next_frame_types);

View File

@ -96,19 +96,22 @@ const VideoFrame* VPMFramePreprocessor::PreprocessFrame(
const VideoFrame* current_frame = &frame;
if (denoiser_) {
rtc::scoped_refptr<I420Buffer>* denoised_frame = &denoised_buffer_[0];
rtc::scoped_refptr<I420Buffer>* denoised_frame_prev = &denoised_buffer_[1];
rtc::scoped_refptr<I420Buffer>* denoised_buffer = &denoised_buffer_[0];
rtc::scoped_refptr<I420Buffer>* denoised_buffer_prev = &denoised_buffer_[1];
// Swap the buffer to save one memcpy in DenoiseFrame.
if (denoised_frame_toggle_) {
denoised_frame = &denoised_buffer_[1];
denoised_frame_prev = &denoised_buffer_[0];
denoised_buffer = &denoised_buffer_[1];
denoised_buffer_prev = &denoised_buffer_[0];
}
// Invert the flag.
denoised_frame_toggle_ ^= 1;
denoiser_->DenoiseFrame(current_frame->video_frame_buffer(), denoised_frame,
denoised_frame_prev, true);
denoised_frame_.ShallowCopy(*current_frame);
denoised_frame_.set_video_frame_buffer(*denoised_frame);
denoiser_->DenoiseFrame(current_frame->video_frame_buffer(),
denoised_buffer,
denoised_buffer_prev, true);
denoised_frame_ = VideoFrame(*denoised_buffer,
current_frame->timestamp(),
current_frame->render_time_ms(),
current_frame->rotation());
current_frame = &denoised_frame_;
}

View File

@ -58,10 +58,10 @@ int32_t VPMSimpleSpatialResampler::ResampleFrame(const VideoFrame& inFrame,
scaled_buffer->CropAndScaleFrom(inFrame.video_frame_buffer());
outFrame->set_video_frame_buffer(scaled_buffer);
// Setting time parameters to the output frame.
outFrame->set_timestamp(inFrame.timestamp());
outFrame->set_render_time_ms(inFrame.render_time_ms());
*outFrame = VideoFrame(scaled_buffer,
inFrame.timestamp(),
inFrame.render_time_ms(),
inFrame.rotation());
return VPM_OK;
}

View File

@ -40,42 +40,42 @@
if (!self.i420Buffer) {
return nullptr;
}
return self.i420Buffer->data(webrtc::kYPlane);
return self.i420Buffer->DataY();
}
- (const uint8_t *)uPlane {
if (!self.i420Buffer) {
return nullptr;
}
return self.i420Buffer->data(webrtc::kUPlane);
return self.i420Buffer->DataU();
}
- (const uint8_t *)vPlane {
if (!self.i420Buffer) {
return nullptr;
}
return self.i420Buffer->data(webrtc::kVPlane);
return self.i420Buffer->DataV();
}
- (int32_t)yPitch {
if (!self.i420Buffer) {
return 0;
}
return self.i420Buffer->stride(webrtc::kYPlane);
return self.i420Buffer->StrideY();
}
- (int32_t)uPitch {
if (!self.i420Buffer) {
return 0;
}
return self.i420Buffer->stride(webrtc::kUPlane);
return self.i420Buffer->StrideU();
}
- (int32_t)vPitch {
if (!self.i420Buffer) {
return 0;
}
return self.i420Buffer->stride(webrtc::kVPlane);
return self.i420Buffer->StrideV();
}
- (int64_t)timeStamp {

View File

@ -121,14 +121,6 @@ class VideoFrame {
const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& video_frame_buffer()
const;
// Set the underlying buffer.
void set_video_frame_buffer(
const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer);
// Convert native-handle frame to memory-backed I420 frame. Should not be
// called on a non-native-handle frame.
VideoFrame ConvertNativeToI420Frame() const;
// Return true if the frame is stored in a texture.
bool is_texture() {
return video_frame_buffer() &&