From 72e735d3867a0fd6ab7e4d0761c7ba5f6c068617 Mon Sep 17 00:00:00 2001 From: nisse Date: Fri, 17 Jun 2016 02:55:14 -0700 Subject: [PATCH] Revert of Delete unused and almost unused frame-related methods. (patchset #12 id:220001 of https://codereview.webrtc.org/2065733003/ ) Reason for revert: Breaks downstream applications which inherits webrtc::VideoFrameBuffer and tries to override deleted methods data(), stride() and MutableData(). Original issue's description: > Delete unused and almost unused frame-related methods. > > webrtc::VideoFrame::set_video_frame_buffer > webrtc::VideoFrame::ConvertNativeToI420Frame > > cricket::WebRtcVideoFrame::InitToBlack > > VideoFrameBuffer::data > VideoFrameBuffer::stride > VideoFrameBuffer::MutableData > > TBR=tkchin@webrtc.org # Refactoring affecting RTCVideoFrame > BUG=webrtc:5682 > > Committed: https://crrev.com/76270de4bc2dac188f10f805e6e2fb86693ef864 > Cr-Commit-Position: refs/heads/master@{#13183} TBR=perkj@webrtc.org,pbos@webrtc.org,marpan@webrtc.org,tkchin@webrtc.org # Skipping CQ checks because original CL landed less than 1 days ago. NOPRESUBMIT=true NOTREECHECKS=true NOTRY=true BUG=webrtc:5682 Review-Url: https://codereview.webrtc.org/2076113002 Cr-Commit-Position: refs/heads/master@{#13184} --- .../api/java/jni/androidmediadecoder_jni.cc | 24 +++---- .../api/java/jni/androidmediaencoder_jni.cc | 24 ++++--- .../api/java/jni/androidvideocapturer_jni.cc | 6 +- webrtc/common_video/corevideo_frame_buffer.cc | 6 +- .../common_video/include/video_frame_buffer.h | 19 ++++-- webrtc/common_video/video_frame.cc | 14 +++++ webrtc/common_video/video_frame_buffer.cc | 62 +++++++++++++++++++ webrtc/media/engine/webrtcvideoframe.cc | 14 +++++ webrtc/media/engine/webrtcvideoframe.h | 3 + .../codecs/h264/h264_decoder_impl.cc | 27 +++----- .../video_coding/codecs/vp9/vp9_impl.cc | 6 +- webrtc/modules/video_coding/video_sender.cc | 14 +---- .../video_processing/frame_preprocessor.cc | 19 +++--- .../video_processing/spatial_resampler.cc | 8 +-- .../objc/Framework/Classes/RTCVideoFrame.mm | 12 ++-- webrtc/video_frame.h | 8 +++ 16 files changed, 174 insertions(+), 92 deletions(-) diff --git a/webrtc/api/java/jni/androidmediadecoder_jni.cc b/webrtc/api/java/jni/androidmediadecoder_jni.cc index 2ec222f81f..3793756ec7 100644 --- a/webrtc/api/java/jni/androidmediadecoder_jni.cc +++ b/webrtc/api/java/jni/androidmediadecoder_jni.cc @@ -794,12 +794,12 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs( libyuv::I420Copy(y_ptr, stride, u_ptr, uv_stride, v_ptr, uv_stride, - frame_buffer->MutableDataY(), - frame_buffer->StrideY(), - frame_buffer->MutableDataU(), - frame_buffer->StrideU(), - frame_buffer->MutableDataV(), - frame_buffer->StrideV(), + frame_buffer->MutableData(webrtc::kYPlane), + frame_buffer->stride(webrtc::kYPlane), + frame_buffer->MutableData(webrtc::kUPlane), + frame_buffer->stride(webrtc::kUPlane), + frame_buffer->MutableData(webrtc::kVPlane), + frame_buffer->stride(webrtc::kVPlane), width, height); } else { // All other supported formats are nv12. @@ -808,12 +808,12 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs( libyuv::NV12ToI420( y_ptr, stride, uv_ptr, stride, - frame_buffer->MutableDataY(), - frame_buffer->StrideY(), - frame_buffer->MutableDataU(), - frame_buffer->StrideU(), - frame_buffer->MutableDataV(), - frame_buffer->StrideV(), + frame_buffer->MutableData(webrtc::kYPlane), + frame_buffer->stride(webrtc::kYPlane), + frame_buffer->MutableData(webrtc::kUPlane), + frame_buffer->stride(webrtc::kUPlane), + frame_buffer->MutableData(webrtc::kVPlane), + frame_buffer->stride(webrtc::kVPlane), width, height); } // Return output byte buffer back to codec. diff --git a/webrtc/api/java/jni/androidmediaencoder_jni.cc b/webrtc/api/java/jni/androidmediaencoder_jni.cc index 20dc1509e0..ce1ebc17c0 100644 --- a/webrtc/api/java/jni/androidmediaencoder_jni.cc +++ b/webrtc/api/java/jni/androidmediaencoder_jni.cc @@ -670,8 +670,7 @@ int32_t MediaCodecVideoEncoder::EncodeOnCodecThread( } consecutive_full_queue_frame_drops_ = 0; - rtc::scoped_refptr input_buffer( - frame.video_frame_buffer()); + VideoFrame input_frame = frame; if (scale_) { // Check framerate before spatial resolution change. quality_scaler_.OnEncodeFrame(frame.width(), frame.height()); @@ -679,22 +678,21 @@ int32_t MediaCodecVideoEncoder::EncodeOnCodecThread( quality_scaler_.GetScaledResolution(); if (scaled_resolution.width != frame.width() || scaled_resolution.height != frame.height()) { - if (input_buffer->native_handle() != nullptr) { - input_buffer = static_cast(input_buffer.get()) - ->CropScaleAndRotate(frame.width(), frame.height(), - 0, 0, - scaled_resolution.width, - scaled_resolution.height, - webrtc::kVideoRotation_0); + if (frame.video_frame_buffer()->native_handle() != nullptr) { + rtc::scoped_refptr scaled_buffer( + static_cast( + frame.video_frame_buffer().get())->CropScaleAndRotate( + frame.width(), frame.height(), 0, 0, + scaled_resolution.width, scaled_resolution.height, + webrtc::kVideoRotation_0)); + input_frame.set_video_frame_buffer(scaled_buffer); } else { - input_buffer = quality_scaler_.GetScaledBuffer(input_buffer); + input_frame.set_video_frame_buffer( + quality_scaler_.GetScaledBuffer(frame.video_frame_buffer())); } } } - VideoFrame input_frame(input_buffer, frame.timestamp(), - frame.render_time_ms(), frame.rotation()); - if (!MaybeReconfigureEncoderOnCodecThread(input_frame)) { ALOGE << "Failed to reconfigure encoder."; return WEBRTC_VIDEO_CODEC_ERROR; diff --git a/webrtc/api/java/jni/androidvideocapturer_jni.cc b/webrtc/api/java/jni/androidvideocapturer_jni.cc index 15cd2d4469..4f3d64b97a 100644 --- a/webrtc/api/java/jni/androidvideocapturer_jni.cc +++ b/webrtc/api/java/jni/androidvideocapturer_jni.cc @@ -215,10 +215,10 @@ void AndroidVideoCapturerJni::OnMemoryBufferFrame(void* video_frame, libyuv::NV12ToI420Rotate( y_plane + width * crop_y + crop_x, width, uv_plane + uv_width * crop_y + crop_x, width, - buffer->MutableDataY(), buffer->StrideY(), + buffer->MutableData(webrtc::kYPlane), buffer->stride(webrtc::kYPlane), // Swap U and V, since we have NV21, not NV12. - buffer->MutableDataV(), buffer->StrideV(), - buffer->MutableDataU(), buffer->StrideU(), + buffer->MutableData(webrtc::kVPlane), buffer->stride(webrtc::kVPlane), + buffer->MutableData(webrtc::kUPlane), buffer->stride(webrtc::kUPlane), crop_width, crop_height, static_cast( capturer_->apply_rotation() ? rotation : 0)); diff --git a/webrtc/common_video/corevideo_frame_buffer.cc b/webrtc/common_video/corevideo_frame_buffer.cc index a58ddc7fbb..55dc00da85 100644 --- a/webrtc/common_video/corevideo_frame_buffer.cc +++ b/webrtc/common_video/corevideo_frame_buffer.cc @@ -46,9 +46,9 @@ CoreVideoFrameBuffer::NativeToI420Buffer() { int src_uv_stride = CVPixelBufferGetBytesPerRowOfPlane(pixel_buffer_, 1); int ret = libyuv::NV12ToI420( src_y, src_y_stride, src_uv, src_uv_stride, - buffer->MutableDataY(), buffer->StrideY(), - buffer->MutableDataU(), buffer->StrideU(), - buffer->MutableDataV(), buffer->StrideV(), + buffer->MutableData(webrtc::kYPlane), buffer->stride(webrtc::kYPlane), + buffer->MutableData(webrtc::kUPlane), buffer->stride(webrtc::kUPlane), + buffer->MutableData(webrtc::kVPlane), buffer->stride(webrtc::kVPlane), width, height); CVPixelBufferUnlockBaseAddress(pixel_buffer_, kCVPixelBufferLock_ReadOnly); if (ret) { diff --git a/webrtc/common_video/include/video_frame_buffer.h b/webrtc/common_video/include/video_frame_buffer.h index 328c0ca586..dfffb9a0d6 100644 --- a/webrtc/common_video/include/video_frame_buffer.h +++ b/webrtc/common_video/include/video_frame_buffer.h @@ -48,20 +48,27 @@ class VideoFrameBuffer : public rtc::RefCountInterface { // Returns pointer to the pixel data for a given plane. The memory is owned by // the VideoFrameBuffer object and must not be freed by the caller. - virtual const uint8_t* DataY() const = 0; - virtual const uint8_t* DataU() const = 0; - virtual const uint8_t* DataV() const = 0; + virtual const uint8_t* DataY() const; + virtual const uint8_t* DataU() const; + virtual const uint8_t* DataV() const; + // Deprecated method. + // TODO(nisse): Delete after all users are updated. + virtual const uint8_t* data(PlaneType type) const; // TODO(nisse): Move MutableData methods to the I420Buffer subclass. // Non-const data access. virtual uint8_t* MutableDataY(); virtual uint8_t* MutableDataU(); virtual uint8_t* MutableDataV(); + // Deprecated method. TODO(nisse): Delete after all users are updated. + virtual uint8_t* MutableData(PlaneType type); // Returns the number of bytes between successive rows for a given plane. - virtual int StrideY() const = 0; - virtual int StrideU() const = 0; - virtual int StrideV() const = 0; + virtual int StrideY() const; + virtual int StrideU() const; + virtual int StrideV() const; + // Deprecated method. TODO(nisse): Delete after all users are updated. + virtual int stride(PlaneType type) const; // Return the handle of the underlying video frame. This is used when the // frame is backed by a texture. diff --git a/webrtc/common_video/video_frame.cc b/webrtc/common_video/video_frame.cc index 463e8ed442..bfac3a680d 100644 --- a/webrtc/common_video/video_frame.cc +++ b/webrtc/common_video/video_frame.cc @@ -155,6 +155,20 @@ const rtc::scoped_refptr& VideoFrame::video_frame_buffer() return video_frame_buffer_; } +void VideoFrame::set_video_frame_buffer( + const rtc::scoped_refptr& buffer) { + RTC_DCHECK(buffer); + video_frame_buffer_ = buffer; +} + +VideoFrame VideoFrame::ConvertNativeToI420Frame() const { + RTC_DCHECK(video_frame_buffer_->native_handle()); + VideoFrame frame; + frame.ShallowCopy(*this); + frame.set_video_frame_buffer(video_frame_buffer_->NativeToI420Buffer()); + return frame; +} + size_t EncodedImage::GetBufferPaddingBytes(VideoCodecType codec_type) { switch (codec_type) { case kVideoCodecVP8: diff --git a/webrtc/common_video/video_frame_buffer.cc b/webrtc/common_video/video_frame_buffer.cc index 60ecae381c..14e19dc0e3 100644 --- a/webrtc/common_video/video_frame_buffer.cc +++ b/webrtc/common_video/video_frame_buffer.cc @@ -30,6 +30,54 @@ int I420DataSize(int height, int stride_y, int stride_u, int stride_v) { } // namespace +const uint8_t* VideoFrameBuffer::data(PlaneType type) const { + switch (type) { + case kYPlane: + return DataY(); + case kUPlane: + return DataU(); + case kVPlane: + return DataV(); + default: + RTC_NOTREACHED(); + return nullptr; + } +} + +const uint8_t* VideoFrameBuffer::DataY() const { + return data(kYPlane); +} +const uint8_t* VideoFrameBuffer::DataU() const { + return data(kUPlane); +} +const uint8_t* VideoFrameBuffer::DataV() const { + return data(kVPlane); +} + +int VideoFrameBuffer::stride(PlaneType type) const { + switch (type) { + case kYPlane: + return StrideY(); + case kUPlane: + return StrideU(); + case kVPlane: + return StrideV(); + default: + RTC_NOTREACHED(); + return 0; + } +} + +int VideoFrameBuffer::StrideY() const { + return stride(kYPlane); +} +int VideoFrameBuffer::StrideU() const { + return stride(kUPlane); +} +int VideoFrameBuffer::StrideV() const { + return stride(kVPlane); +} + uint8_t* VideoFrameBuffer::MutableDataY() { RTC_NOTREACHED(); return nullptr; @@ -43,6 +91,20 @@ uint8_t* VideoFrameBuffer::MutableDataV() { return nullptr; } +uint8_t* VideoFrameBuffer::MutableData(PlaneType type) { + switch (type) { + case kYPlane: + return MutableDataY(); + case kUPlane: + return MutableDataU(); + case kVPlane: + return MutableDataV(); + default: + RTC_NOTREACHED(); + return nullptr; + } +} + VideoFrameBuffer::~VideoFrameBuffer() {} I420Buffer::I420Buffer(int width, int height) diff --git a/webrtc/media/engine/webrtcvideoframe.cc b/webrtc/media/engine/webrtcvideoframe.cc index 4f89c8b85d..f77ca70fa3 100644 --- a/webrtc/media/engine/webrtcvideoframe.cc +++ b/webrtc/media/engine/webrtcvideoframe.cc @@ -65,6 +65,20 @@ bool WebRtcVideoFrame::Init(const CapturedFrame* frame, int dw, int dh, frame->rotation, apply_rotation); } +// TODO(nisse): Deprecated, delete as soon as Chrome is updated. +bool WebRtcVideoFrame::InitToBlack(int w, int h, + int64_t time_stamp_ns) { + rtc::scoped_refptr buffer( + new rtc::RefCountedObject(w, h)); + buffer->SetToBlack(); + + video_frame_buffer_ = new rtc::RefCountedObject(w, h); + SetTimeStamp(time_stamp_ns); + rotation_ = webrtc::kVideoRotation_0; + + return true; +} + int WebRtcVideoFrame::width() const { return video_frame_buffer_ ? video_frame_buffer_->width() : 0; } diff --git a/webrtc/media/engine/webrtcvideoframe.h b/webrtc/media/engine/webrtcvideoframe.h index 487e32ef9a..7f2a5c6059 100644 --- a/webrtc/media/engine/webrtcvideoframe.h +++ b/webrtc/media/engine/webrtcvideoframe.h @@ -63,6 +63,9 @@ class WebRtcVideoFrame : public VideoFrame { void InitToEmptyBuffer(int w, int h); void InitToEmptyBuffer(int w, int h, int64_t time_stamp_ns); + // TODO(nisse): Deprecated, delete as soon as Chrome is updated. + bool InitToBlack(int w, int h, int64_t time_stamp_ns); + int width() const override; int height() const override; diff --git a/webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.cc b/webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.cc index 563df37fe8..f560a37d0e 100644 --- a/webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.cc +++ b/webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.cc @@ -123,16 +123,11 @@ int H264DecoderImpl::AVGetBuffer2( // The video frame is stored in |video_frame|. |av_frame| is FFmpeg's version // of a video frame and will be set up to reference |video_frame|'s buffers. - - // TODO(nisse): The VideoFrame's timestamp and rotation info is not used. - // Refactor to do not use a VideoFrame object at all. - + VideoFrame* video_frame = new VideoFrame(); // FFmpeg expects the initial allocation to be zero-initialized according to // http://crbug.com/390941. Our pool is set up to zero-initialize new buffers. - VideoFrame* video_frame = new VideoFrame( - decoder->pool_.CreateBuffer(width, height), - 0 /* timestamp */, 0 /* render_time_ms */, kVideoRotation_0); - + video_frame->set_video_frame_buffer( + decoder->pool_.CreateBuffer(width, height)); // DCHECK that we have a continuous buffer as is required. RTC_DCHECK_EQ(video_frame->video_frame_buffer()->DataU(), video_frame->video_frame_buffer()->DataY() + @@ -360,30 +355,22 @@ int32_t H264DecoderImpl::Decode(const EncodedImage& input_image, video_frame->video_frame_buffer()->DataV()); video_frame->set_timestamp(input_image._timeStamp); - int32_t ret; - // The decoded image may be larger than what is supposed to be visible, see // |AVGetBuffer2|'s use of |avcodec_align_dimensions|. This crops the image // without copying the underlying buffer. rtc::scoped_refptr buf = video_frame->video_frame_buffer(); if (av_frame_->width != buf->width() || av_frame_->height != buf->height()) { - rtc::scoped_refptr cropped_buf( + video_frame->set_video_frame_buffer( new rtc::RefCountedObject( av_frame_->width, av_frame_->height, buf->DataY(), buf->StrideY(), buf->DataU(), buf->StrideU(), buf->DataV(), buf->StrideV(), rtc::KeepRefUntilDone(buf))); - VideoFrame cropped_frame( - cropped_buf, video_frame->timestamp(), video_frame->render_time_ms(), - video_frame->rotation()); - // TODO(nisse): Timestamp and rotation are all zero here. Change decoder - // interface to pass a VideoFrameBuffer instead of a VideoFrame? - ret = decoded_image_callback_->Decoded(cropped_frame); - } else { - // Return decoded frame. - ret = decoded_image_callback_->Decoded(*video_frame); } + + // Return decoded frame. + int32_t ret = decoded_image_callback_->Decoded(*video_frame); // Stop referencing it, possibly freeing |video_frame|. av_frame_unref(av_frame_.get()); video_frame = nullptr; diff --git a/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc b/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc index 52ea1d63cb..750f7427cd 100644 --- a/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc +++ b/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc @@ -960,9 +960,9 @@ int VP9DecoderImpl::ReturnFrame(const vpx_image_t* img, uint32_t timestamp) { // release |img_buffer|. rtc::KeepRefUntilDone(img_buffer))); - VideoFrame decoded_image(img_wrapped_buffer, timestamp, - 0 /* render_time_ms */, webrtc::kVideoRotation_0); - + VideoFrame decoded_image; + decoded_image.set_video_frame_buffer(img_wrapped_buffer); + decoded_image.set_timestamp(timestamp); int ret = decode_complete_callback_->Decoded(decoded_image); if (ret != 0) return ret; diff --git a/webrtc/modules/video_coding/video_sender.cc b/webrtc/modules/video_coding/video_sender.cc index e2c0d1f8a9..b9c5ea547a 100644 --- a/webrtc/modules/video_coding/video_sender.cc +++ b/webrtc/modules/video_coding/video_sender.cc @@ -288,17 +288,9 @@ int32_t VideoSender::AddVideoFrame(const VideoFrame& videoFrame, !_encoder->SupportsNativeHandle()) { // This module only supports software encoding. // TODO(pbos): Offload conversion from the encoder thread. - rtc::scoped_refptr converted_buffer( - converted_frame.video_frame_buffer()->NativeToI420Buffer()); - - if (!converted_buffer) { - LOG(LS_ERROR) << "Frame conversion failed, dropping frame."; - return VCM_PARAMETER_ERROR; - } - converted_frame = VideoFrame(converted_buffer, - converted_frame.timestamp(), - converted_frame.render_time_ms(), - converted_frame.rotation()); + converted_frame = converted_frame.ConvertNativeToI420Frame(); + RTC_CHECK(!converted_frame.IsZeroSize()) + << "Frame conversion failed, won't be able to encode frame."; } int32_t ret = _encoder->Encode(converted_frame, codecSpecificInfo, next_frame_types); diff --git a/webrtc/modules/video_processing/frame_preprocessor.cc b/webrtc/modules/video_processing/frame_preprocessor.cc index e86bbbb3bf..1d21340b31 100644 --- a/webrtc/modules/video_processing/frame_preprocessor.cc +++ b/webrtc/modules/video_processing/frame_preprocessor.cc @@ -96,22 +96,19 @@ const VideoFrame* VPMFramePreprocessor::PreprocessFrame( const VideoFrame* current_frame = &frame; if (denoiser_) { - rtc::scoped_refptr* denoised_buffer = &denoised_buffer_[0]; - rtc::scoped_refptr* denoised_buffer_prev = &denoised_buffer_[1]; + rtc::scoped_refptr* denoised_frame = &denoised_buffer_[0]; + rtc::scoped_refptr* denoised_frame_prev = &denoised_buffer_[1]; // Swap the buffer to save one memcpy in DenoiseFrame. if (denoised_frame_toggle_) { - denoised_buffer = &denoised_buffer_[1]; - denoised_buffer_prev = &denoised_buffer_[0]; + denoised_frame = &denoised_buffer_[1]; + denoised_frame_prev = &denoised_buffer_[0]; } // Invert the flag. denoised_frame_toggle_ ^= 1; - denoiser_->DenoiseFrame(current_frame->video_frame_buffer(), - denoised_buffer, - denoised_buffer_prev, true); - denoised_frame_ = VideoFrame(*denoised_buffer, - current_frame->timestamp(), - current_frame->render_time_ms(), - current_frame->rotation()); + denoiser_->DenoiseFrame(current_frame->video_frame_buffer(), denoised_frame, + denoised_frame_prev, true); + denoised_frame_.ShallowCopy(*current_frame); + denoised_frame_.set_video_frame_buffer(*denoised_frame); current_frame = &denoised_frame_; } diff --git a/webrtc/modules/video_processing/spatial_resampler.cc b/webrtc/modules/video_processing/spatial_resampler.cc index 7c4aae2088..74a570fb07 100644 --- a/webrtc/modules/video_processing/spatial_resampler.cc +++ b/webrtc/modules/video_processing/spatial_resampler.cc @@ -58,10 +58,10 @@ int32_t VPMSimpleSpatialResampler::ResampleFrame(const VideoFrame& inFrame, scaled_buffer->CropAndScaleFrom(inFrame.video_frame_buffer()); - *outFrame = VideoFrame(scaled_buffer, - inFrame.timestamp(), - inFrame.render_time_ms(), - inFrame.rotation()); + outFrame->set_video_frame_buffer(scaled_buffer); + // Setting time parameters to the output frame. + outFrame->set_timestamp(inFrame.timestamp()); + outFrame->set_render_time_ms(inFrame.render_time_ms()); return VPM_OK; } diff --git a/webrtc/sdk/objc/Framework/Classes/RTCVideoFrame.mm b/webrtc/sdk/objc/Framework/Classes/RTCVideoFrame.mm index 5b2d2586b1..872f6be5f0 100644 --- a/webrtc/sdk/objc/Framework/Classes/RTCVideoFrame.mm +++ b/webrtc/sdk/objc/Framework/Classes/RTCVideoFrame.mm @@ -40,42 +40,42 @@ if (!self.i420Buffer) { return nullptr; } - return self.i420Buffer->DataY(); + return self.i420Buffer->data(webrtc::kYPlane); } - (const uint8_t *)uPlane { if (!self.i420Buffer) { return nullptr; } - return self.i420Buffer->DataU(); + return self.i420Buffer->data(webrtc::kUPlane); } - (const uint8_t *)vPlane { if (!self.i420Buffer) { return nullptr; } - return self.i420Buffer->DataV(); + return self.i420Buffer->data(webrtc::kVPlane); } - (int32_t)yPitch { if (!self.i420Buffer) { return 0; } - return self.i420Buffer->StrideY(); + return self.i420Buffer->stride(webrtc::kYPlane); } - (int32_t)uPitch { if (!self.i420Buffer) { return 0; } - return self.i420Buffer->StrideU(); + return self.i420Buffer->stride(webrtc::kUPlane); } - (int32_t)vPitch { if (!self.i420Buffer) { return 0; } - return self.i420Buffer->StrideV(); + return self.i420Buffer->stride(webrtc::kVPlane); } - (int64_t)timeStamp { diff --git a/webrtc/video_frame.h b/webrtc/video_frame.h index 0f2b9a68af..b9ba69bb05 100644 --- a/webrtc/video_frame.h +++ b/webrtc/video_frame.h @@ -121,6 +121,14 @@ class VideoFrame { const rtc::scoped_refptr& video_frame_buffer() const; + // Set the underlying buffer. + void set_video_frame_buffer( + const rtc::scoped_refptr& buffer); + + // Convert native-handle frame to memory-backed I420 frame. Should not be + // called on a non-native-handle frame. + VideoFrame ConvertNativeToI420Frame() const; + // Return true if the frame is stored in a texture. bool is_texture() { return video_frame_buffer() &&