Delete unused and almost unused frame-related methods.

webrtc::VideoFrame::set_video_frame_buffer
webrtc::VideoFrame::ConvertNativeToI420Frame

cricket::WebRtcVideoFrame::InitToBlack

VideoFrameBuffer::data
VideoFrameBuffer::stride
VideoFrameBuffer::MutableData

TBR=tkchin@webrtc.org # Refactoring affecting RTCVideoFrame
BUG=webrtc:5682

Review-Url: https://codereview.webrtc.org/2065733003
Cr-Commit-Position: refs/heads/master@{#13183}
This commit is contained in:
nisse
2016-06-17 02:00:12 -07:00
committed by Commit bot
parent 0dbc8bfbe5
commit 76270de4bc
16 changed files with 92 additions and 174 deletions

View File

@ -123,11 +123,16 @@ int H264DecoderImpl::AVGetBuffer2(
// The video frame is stored in |video_frame|. |av_frame| is FFmpeg's version
// of a video frame and will be set up to reference |video_frame|'s buffers.
VideoFrame* video_frame = new VideoFrame();
// TODO(nisse): The VideoFrame's timestamp and rotation info is not used.
// Refactor to do not use a VideoFrame object at all.
// FFmpeg expects the initial allocation to be zero-initialized according to
// http://crbug.com/390941. Our pool is set up to zero-initialize new buffers.
video_frame->set_video_frame_buffer(
decoder->pool_.CreateBuffer(width, height));
VideoFrame* video_frame = new VideoFrame(
decoder->pool_.CreateBuffer(width, height),
0 /* timestamp */, 0 /* render_time_ms */, kVideoRotation_0);
// DCHECK that we have a continuous buffer as is required.
RTC_DCHECK_EQ(video_frame->video_frame_buffer()->DataU(),
video_frame->video_frame_buffer()->DataY() +
@ -355,22 +360,30 @@ int32_t H264DecoderImpl::Decode(const EncodedImage& input_image,
video_frame->video_frame_buffer()->DataV());
video_frame->set_timestamp(input_image._timeStamp);
int32_t ret;
// The decoded image may be larger than what is supposed to be visible, see
// |AVGetBuffer2|'s use of |avcodec_align_dimensions|. This crops the image
// without copying the underlying buffer.
rtc::scoped_refptr<VideoFrameBuffer> buf = video_frame->video_frame_buffer();
if (av_frame_->width != buf->width() || av_frame_->height != buf->height()) {
video_frame->set_video_frame_buffer(
rtc::scoped_refptr<VideoFrameBuffer> cropped_buf(
new rtc::RefCountedObject<WrappedI420Buffer>(
av_frame_->width, av_frame_->height,
buf->DataY(), buf->StrideY(),
buf->DataU(), buf->StrideU(),
buf->DataV(), buf->StrideV(),
rtc::KeepRefUntilDone(buf)));
VideoFrame cropped_frame(
cropped_buf, video_frame->timestamp(), video_frame->render_time_ms(),
video_frame->rotation());
// TODO(nisse): Timestamp and rotation are all zero here. Change decoder
// interface to pass a VideoFrameBuffer instead of a VideoFrame?
ret = decoded_image_callback_->Decoded(cropped_frame);
} else {
// Return decoded frame.
ret = decoded_image_callback_->Decoded(*video_frame);
}
// Return decoded frame.
int32_t ret = decoded_image_callback_->Decoded(*video_frame);
// Stop referencing it, possibly freeing |video_frame|.
av_frame_unref(av_frame_.get());
video_frame = nullptr;

View File

@ -960,9 +960,9 @@ int VP9DecoderImpl::ReturnFrame(const vpx_image_t* img, uint32_t timestamp) {
// release |img_buffer|.
rtc::KeepRefUntilDone(img_buffer)));
VideoFrame decoded_image;
decoded_image.set_video_frame_buffer(img_wrapped_buffer);
decoded_image.set_timestamp(timestamp);
VideoFrame decoded_image(img_wrapped_buffer, timestamp,
0 /* render_time_ms */, webrtc::kVideoRotation_0);
int ret = decode_complete_callback_->Decoded(decoded_image);
if (ret != 0)
return ret;