Delete method webrtc::VideoFrame::native_handle.

Instead, use the corresponding method on VideoFrameBuffer. In the process,
reduce code duplication in frame comparison functions used in
the test code.

Make FramesEqual use FrameBufsEqual. Make the latter support texture frames.

The cl also refactors VideoFrame::CopyFrame to use I420Buffer::Copy. This
has possibly undesired side effects of never reusing the frame buffer of
the destination frame, and producing a frame buffer which may use different
stride than the source frame.

BUG=webrtc:5682

Review URL: https://codereview.webrtc.org/1881953002

Cr-Commit-Position: refs/heads/master@{#12373}
This commit is contained in:
nisse
2016-04-15 03:43:39 -07:00
committed by Commit bot
parent 3911c26bc0
commit 26acec4772
11 changed files with 77 additions and 140 deletions

View File

@ -666,7 +666,7 @@ int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
quality_scaler_.GetScaledResolution();
if (scaled_resolution.width != frame.width() ||
scaled_resolution.height != frame.height()) {
if (frame.native_handle() != nullptr) {
if (frame.video_frame_buffer()->native_handle() != nullptr) {
rtc::scoped_refptr<webrtc::VideoFrameBuffer> scaled_buffer(
static_cast<AndroidTextureBuffer*>(
frame.video_frame_buffer().get())->ScaleAndRotate(
@ -691,7 +691,7 @@ int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
const bool key_frame =
frame_types->front() != webrtc::kVideoFrameDelta || send_key_frame;
bool encode_status = true;
if (!input_frame.native_handle()) {
if (!input_frame.video_frame_buffer()->native_handle()) {
int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_,
j_dequeue_input_buffer_method_);
CHECK_EXCEPTION(jni);
@ -741,7 +741,8 @@ bool MediaCodecVideoEncoder::MaybeReconfigureEncoderOnCodecThread(
const webrtc::VideoFrame& frame) {
RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
const bool is_texture_frame = frame.native_handle() != nullptr;
const bool is_texture_frame =
frame.video_frame_buffer()->native_handle() != nullptr;
const bool reconfigure_due_to_format = is_texture_frame != use_surface_;
const bool reconfigure_due_to_size =
frame.width() != width_ || frame.height() != height_;
@ -802,8 +803,8 @@ bool MediaCodecVideoEncoder::EncodeTextureOnCodecThread(JNIEnv* jni,
bool key_frame, const webrtc::VideoFrame& frame) {
RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
RTC_CHECK(use_surface_);
NativeHandleImpl* handle =
static_cast<NativeHandleImpl*>(frame.native_handle());
NativeHandleImpl* handle = static_cast<NativeHandleImpl*>(
frame.video_frame_buffer()->native_handle());
jfloatArray sampling_matrix = jni->NewFloatArray(16);
jni->SetFloatArrayRegion(sampling_matrix, 0, 16, handle->sampling_matrix);
@ -1256,4 +1257,3 @@ void MediaCodecVideoEncoderFactory::DestroyVideoEncoder(
}
} // namespace webrtc_jni

View File

@ -246,7 +246,8 @@ TEST(TestVideoFrame, TextureInitialValues) {
EXPECT_EQ(480, frame.height());
EXPECT_EQ(100u, frame.timestamp());
EXPECT_EQ(10, frame.render_time_ms());
EXPECT_EQ(handle, frame.native_handle());
ASSERT_TRUE(frame.video_frame_buffer() != nullptr);
EXPECT_EQ(handle, frame.video_frame_buffer()->native_handle());
frame.set_timestamp(200);
EXPECT_EQ(200u, frame.timestamp());

View File

@ -113,22 +113,12 @@ void VideoFrame::CreateFrame(const uint8_t* buffer,
}
void VideoFrame::CopyFrame(const VideoFrame& videoFrame) {
if (videoFrame.IsZeroSize()) {
video_frame_buffer_ = nullptr;
} else if (videoFrame.native_handle()) {
video_frame_buffer_ = videoFrame.video_frame_buffer();
} else {
CreateFrame(videoFrame.buffer(kYPlane), videoFrame.buffer(kUPlane),
videoFrame.buffer(kVPlane), videoFrame.width(),
videoFrame.height(), videoFrame.stride(kYPlane),
videoFrame.stride(kUPlane), videoFrame.stride(kVPlane),
kVideoRotation_0);
}
ShallowCopy(videoFrame);
timestamp_ = videoFrame.timestamp_;
ntp_time_ms_ = videoFrame.ntp_time_ms_;
render_time_ms_ = videoFrame.render_time_ms_;
rotation_ = videoFrame.rotation_;
// If backed by a plain memory buffer, create a new, non-shared, copy.
if (video_frame_buffer_ && !video_frame_buffer_->native_handle()) {
video_frame_buffer_ = I420Buffer::Copy(video_frame_buffer_);
}
}
void VideoFrame::ShallowCopy(const VideoFrame& videoFrame) {
@ -177,10 +167,6 @@ bool VideoFrame::IsZeroSize() const {
return !video_frame_buffer_;
}
void* VideoFrame::native_handle() const {
return video_frame_buffer_ ? video_frame_buffer_->native_handle() : nullptr;
}
rtc::scoped_refptr<VideoFrameBuffer> VideoFrame::video_frame_buffer() const {
return video_frame_buffer_;
}
@ -191,7 +177,7 @@ void VideoFrame::set_video_frame_buffer(
}
VideoFrame VideoFrame::ConvertNativeToI420Frame() const {
RTC_DCHECK(native_handle());
RTC_DCHECK(video_frame_buffer_->native_handle());
VideoFrame frame;
frame.ShallowCopy(*this);
frame.set_video_frame_buffer(video_frame_buffer_->NativeToI420Buffer());

View File

@ -315,7 +315,8 @@ int32_t VideoSender::AddVideoFrame(const VideoFrame& videoFrame,
return VCM_PARAMETER_ERROR;
}
VideoFrame converted_frame = videoFrame;
if (converted_frame.native_handle() && !_encoder->SupportsNativeHandle()) {
if (converted_frame.video_frame_buffer()->native_handle() &&
!_encoder->SupportsNativeHandle()) {
// This module only supports software encoding.
// TODO(pbos): Offload conversion from the encoder thread.
converted_frame = converted_frame.ConvertNativeToI420Frame();

View File

@ -16,53 +16,60 @@ namespace test {
bool EqualPlane(const uint8_t* data1,
const uint8_t* data2,
int stride,
int stride1,
int stride2,
int width,
int height) {
for (int y = 0; y < height; ++y) {
if (memcmp(data1, data2, width) != 0)
return false;
data1 += stride;
data2 += stride;
data1 += stride1;
data2 += stride2;
}
return true;
}
bool FramesEqual(const webrtc::VideoFrame& f1, const webrtc::VideoFrame& f2) {
if (f1.width() != f2.width() || f1.height() != f2.height() ||
f1.stride(webrtc::kYPlane) != f2.stride(webrtc::kYPlane) ||
f1.stride(webrtc::kUPlane) != f2.stride(webrtc::kUPlane) ||
f1.stride(webrtc::kVPlane) != f2.stride(webrtc::kVPlane) ||
f1.timestamp() != f2.timestamp() ||
if (f1.timestamp() != f2.timestamp() ||
f1.ntp_time_ms() != f2.ntp_time_ms() ||
f1.render_time_ms() != f2.render_time_ms()) {
return false;
}
const int half_width = (f1.width() + 1) / 2;
const int half_height = (f1.height() + 1) / 2;
return EqualPlane(f1.buffer(webrtc::kYPlane), f2.buffer(webrtc::kYPlane),
f1.stride(webrtc::kYPlane), f1.width(), f1.height()) &&
EqualPlane(f1.buffer(webrtc::kUPlane), f2.buffer(webrtc::kUPlane),
f1.stride(webrtc::kUPlane), half_width, half_height) &&
EqualPlane(f1.buffer(webrtc::kVPlane), f2.buffer(webrtc::kVPlane),
f1.stride(webrtc::kVPlane), half_width, half_height);
return FrameBufsEqual(f1.video_frame_buffer(), f2.video_frame_buffer());
}
bool FrameBufsEqual(const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& f1,
const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& f2) {
if (f1->width() != f2->width() || f1->height() != f2->height() ||
f1->stride(webrtc::kYPlane) != f2->stride(webrtc::kYPlane) ||
f1->stride(webrtc::kUPlane) != f2->stride(webrtc::kUPlane) ||
f1->stride(webrtc::kVPlane) != f2->stride(webrtc::kVPlane)) {
if (f1 == f2) {
return true;
}
// Exlude nullptr (except if both are nullptr, as above)
if (!f1 || !f2) {
return false;
}
if (f1->width() != f2->width() || f1->height() != f2->height()) {
return false;
}
// Exclude native handle
if (f1->native_handle()) {
return f1->native_handle() == f2->native_handle();
}
if (f2->native_handle()) {
return false;
}
const int half_width = (f1->width() + 1) / 2;
const int half_height = (f1->height() + 1) / 2;
return EqualPlane(f1->data(webrtc::kYPlane), f2->data(webrtc::kYPlane),
f1->stride(webrtc::kYPlane), f1->width(), f1->height()) &&
f1->stride(webrtc::kYPlane), f2->stride(webrtc::kYPlane),
f1->width(), f1->height()) &&
EqualPlane(f1->data(webrtc::kUPlane), f2->data(webrtc::kUPlane),
f1->stride(webrtc::kUPlane), half_width, half_height) &&
f1->stride(webrtc::kUPlane), f2->stride(webrtc::kUPlane),
half_width, half_height) &&
EqualPlane(f1->data(webrtc::kVPlane), f2->data(webrtc::kVPlane),
f1->stride(webrtc::kVPlane), half_width, half_height);
f1->stride(webrtc::kVPlane), f2->stride(webrtc::kVPlane),
half_width, half_height);
}
} // namespace test

View File

@ -20,10 +20,19 @@ namespace test {
bool EqualPlane(const uint8_t* data1,
const uint8_t* data2,
int stride,
int stride1,
int stride2,
int width,
int height);
static inline bool EqualPlane(const uint8_t* data1,
const uint8_t* data2,
int stride,
int width,
int height) {
return EqualPlane(data1, data2, stride, stride, width, height);
}
bool FramesEqual(const webrtc::VideoFrame& f1, const webrtc::VideoFrame& f2);
bool FrameBufsEqual(const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& f1,

View File

@ -16,6 +16,7 @@
#include "webrtc/base/event.h"
#include "webrtc/base/refcount.h"
#include "webrtc/test/fake_texture_frame.h"
#include "webrtc/test/frame_utils.h"
#include "webrtc/video/send_statistics_proxy.h"
// If an output frame does not arrive in 500ms, the test will fail.
@ -23,9 +24,6 @@
namespace webrtc {
bool EqualFrames(const VideoFrame& frame1, const VideoFrame& frame2);
bool EqualTextureFrames(const VideoFrame& frame1, const VideoFrame& frame2);
bool EqualBufferFrames(const VideoFrame& frame1, const VideoFrame& frame2);
bool EqualFramesVector(const std::vector<std::unique_ptr<VideoFrame>>& frames1,
const std::vector<std::unique_ptr<VideoFrame>>& frames2);
std::unique_ptr<VideoFrame> CreateVideoFrame(uint8_t length);
@ -54,7 +52,8 @@ class VideoCaptureInputTest : public ::testing::Test {
EXPECT_TRUE(capture_event_.Wait(FRAME_TIMEOUT_MS));
VideoFrame frame;
EXPECT_TRUE(input_->GetVideoFrame(&frame));
if (!frame.native_handle()) {
ASSERT_TRUE(frame.video_frame_buffer());
if (!frame.video_frame_buffer()->native_handle()) {
output_frame_ybuffers_.push_back(
static_cast<const VideoFrame*>(&frame)->buffer(kYPlane));
}
@ -168,7 +167,9 @@ TEST_F(VideoCaptureInputTest, TestTextureFrames) {
i + 1, webrtc::kVideoRotation_0))));
AddInputFrame(input_frames_[i].get());
WaitOutputFrame();
EXPECT_EQ(dummy_handle, output_frames_[i]->native_handle());
ASSERT_TRUE(output_frames_[i]->video_frame_buffer());
EXPECT_EQ(dummy_handle,
output_frames_[i]->video_frame_buffer()->native_handle());
}
EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_));
@ -198,7 +199,9 @@ TEST_F(VideoCaptureInputTest, TestI420FrameAfterTextureFrame) {
dummy_handle, 1, 1, 1, 1, webrtc::kVideoRotation_0))));
AddInputFrame(input_frames_[0].get());
WaitOutputFrame();
EXPECT_EQ(dummy_handle, output_frames_[0]->native_handle());
ASSERT_TRUE(output_frames_[0]->video_frame_buffer());
EXPECT_EQ(dummy_handle,
output_frames_[0]->video_frame_buffer()->native_handle());
input_frames_.push_back(CreateVideoFrame(2));
AddInputFrame(input_frames_[1].get());
@ -222,44 +225,18 @@ TEST_F(VideoCaptureInputTest, TestTextureFrameAfterI420Frame) {
EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_));
}
bool EqualFrames(const VideoFrame& frame1, const VideoFrame& frame2) {
if (frame1.native_handle() || frame2.native_handle())
return EqualTextureFrames(frame1, frame2);
return EqualBufferFrames(frame1, frame2);
}
bool EqualTextureFrames(const VideoFrame& frame1, const VideoFrame& frame2) {
return ((frame1.native_handle() == frame2.native_handle()) &&
(frame1.width() == frame2.width()) &&
(frame1.height() == frame2.height()));
}
bool EqualBufferFrames(const VideoFrame& frame1, const VideoFrame& frame2) {
return ((frame1.width() == frame2.width()) &&
(frame1.height() == frame2.height()) &&
(frame1.stride(kYPlane) == frame2.stride(kYPlane)) &&
(frame1.stride(kUPlane) == frame2.stride(kUPlane)) &&
(frame1.stride(kVPlane) == frame2.stride(kVPlane)) &&
(frame1.allocated_size(kYPlane) == frame2.allocated_size(kYPlane)) &&
(frame1.allocated_size(kUPlane) == frame2.allocated_size(kUPlane)) &&
(frame1.allocated_size(kVPlane) == frame2.allocated_size(kVPlane)) &&
(memcmp(frame1.buffer(kYPlane), frame2.buffer(kYPlane),
frame1.allocated_size(kYPlane)) == 0) &&
(memcmp(frame1.buffer(kUPlane), frame2.buffer(kUPlane),
frame1.allocated_size(kUPlane)) == 0) &&
(memcmp(frame1.buffer(kVPlane), frame2.buffer(kVPlane),
frame1.allocated_size(kVPlane)) == 0));
}
bool EqualFramesVector(
const std::vector<std::unique_ptr<VideoFrame>>& frames1,
const std::vector<std::unique_ptr<VideoFrame>>& frames2) {
if (frames1.size() != frames2.size())
return false;
for (size_t i = 0; i < frames1.size(); ++i) {
if (!EqualFrames(*frames1[i], *frames2[i]))
// Compare frame buffers, since we don't care about differing timestamps.
if (!test::FrameBufsEqual(frames1[i]->video_frame_buffer(),
frames2[i]->video_frame_buffer())) {
return false;
}
}
return true;
}

View File

@ -376,7 +376,7 @@ void VideoReceiveStream::FrameCallback(VideoFrame* video_frame) {
stats_proxy_.OnDecodedFrame();
// Post processing is not supported if the frame is backed by a texture.
if (!video_frame->native_handle()) {
if (!video_frame->video_frame_buffer()->native_handle()) {
if (config_.pre_render_callback)
config_.pre_render_callback->FrameCallback(video_frame);
}

View File

@ -32,6 +32,7 @@
#include "webrtc/test/call_test.h"
#include "webrtc/test/configurable_frame_size_encoder.h"
#include "webrtc/test/fake_texture_frame.h"
#include "webrtc/test/frame_utils.h"
#include "webrtc/test/null_transport.h"
#include "webrtc/test/testsupport/perf_test.h"
#include "webrtc/video/send_statistics_proxy.h"
@ -42,11 +43,6 @@ namespace webrtc {
enum VideoFormat { kGeneric, kVP8, };
void ExpectEqualFrames(const VideoFrame& frame1, const VideoFrame& frame2);
void ExpectEqualTextureFrames(const VideoFrame& frame1,
const VideoFrame& frame2);
void ExpectEqualBufferFrames(const VideoFrame& frame1,
const VideoFrame& frame2);
void ExpectEqualFramesVector(const std::vector<VideoFrame>& frames1,
const std::vector<VideoFrame>& frames2);
VideoFrame CreateVideoFrame(int width, int height, uint8_t data);
@ -1244,49 +1240,13 @@ TEST_F(VideoSendStreamTest, CapturesTextureAndVideoFrames) {
DestroyStreams();
}
void ExpectEqualFrames(const VideoFrame& frame1, const VideoFrame& frame2) {
if (frame1.native_handle() || frame2.native_handle())
ExpectEqualTextureFrames(frame1, frame2);
else
ExpectEqualBufferFrames(frame1, frame2);
}
void ExpectEqualTextureFrames(const VideoFrame& frame1,
const VideoFrame& frame2) {
EXPECT_EQ(frame1.native_handle(), frame2.native_handle());
EXPECT_EQ(frame1.width(), frame2.width());
EXPECT_EQ(frame1.height(), frame2.height());
}
void ExpectEqualBufferFrames(const VideoFrame& frame1,
const VideoFrame& frame2) {
EXPECT_EQ(frame1.width(), frame2.width());
EXPECT_EQ(frame1.height(), frame2.height());
EXPECT_EQ(frame1.stride(kYPlane), frame2.stride(kYPlane));
EXPECT_EQ(frame1.stride(kUPlane), frame2.stride(kUPlane));
EXPECT_EQ(frame1.stride(kVPlane), frame2.stride(kVPlane));
ASSERT_EQ(frame1.allocated_size(kYPlane), frame2.allocated_size(kYPlane));
EXPECT_EQ(0,
memcmp(frame1.buffer(kYPlane),
frame2.buffer(kYPlane),
frame1.allocated_size(kYPlane)));
ASSERT_EQ(frame1.allocated_size(kUPlane), frame2.allocated_size(kUPlane));
EXPECT_EQ(0,
memcmp(frame1.buffer(kUPlane),
frame2.buffer(kUPlane),
frame1.allocated_size(kUPlane)));
ASSERT_EQ(frame1.allocated_size(kVPlane), frame2.allocated_size(kVPlane));
EXPECT_EQ(0,
memcmp(frame1.buffer(kVPlane),
frame2.buffer(kVPlane),
frame1.allocated_size(kVPlane)));
}
void ExpectEqualFramesVector(const std::vector<VideoFrame>& frames1,
const std::vector<VideoFrame>& frames2) {
EXPECT_EQ(frames1.size(), frames2.size());
for (size_t i = 0; i < std::min(frames1.size(), frames2.size()); ++i)
ExpectEqualFrames(frames1[i], frames2[i]);
// Compare frame buffers, since we don't care about differing timestamps.
EXPECT_TRUE(test::FrameBufsEqual(frames1[i].video_frame_buffer(),
frames2[i].video_frame_buffer()));
}
VideoFrame CreateVideoFrame(int width, int height, uint8_t data) {

View File

@ -334,7 +334,7 @@ void ViEEncoder::EncodeVideoFrame(const VideoFrame& video_frame) {
"Encode");
const VideoFrame* frame_to_send = &video_frame;
// TODO(wuchengli): support texture frames.
if (!video_frame.native_handle()) {
if (!video_frame.video_frame_buffer()->native_handle()) {
// Pass frame via preprocessor.
frame_to_send = vp_->PreprocessFrame(video_frame);
if (!frame_to_send) {

View File

@ -125,12 +125,8 @@ class VideoFrame {
// Return true if underlying plane buffers are of zero size, false if not.
bool IsZeroSize() const;
// Return the handle of the underlying video frame. This is used when the
// frame is backed by a texture. The object should be destroyed when it is no
// longer in use, so the underlying resource can be freed.
void* native_handle() const;
// Return the underlying buffer.
// Return the underlying buffer. Never nullptr for a properly
// initialized VideoFrame.
rtc::scoped_refptr<webrtc::VideoFrameBuffer> video_frame_buffer() const;
// Set the underlying buffer.