Remove I420VideoFrame::SwapFrame

The few remaining uses of this function are replaced with I420VideoFrame assignment, similar to scoped_refptr assignment.

BUG=1128
R=mflodman@webrtc.org, stefan@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/42889004

Cr-Commit-Position: refs/heads/master@{#8844}
git-svn-id: http://webrtc.googlecode.com/svn/trunk@8844 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
magjed@webrtc.org
2015-03-24 12:43:05 +00:00
parent 2d2a30c2e2
commit deafa7b3c9
6 changed files with 2 additions and 83 deletions

View File

@ -159,14 +159,6 @@ void I420VideoFrame::ShallowCopy(const I420VideoFrame& videoFrame) {
rotation_ = videoFrame.rotation_; rotation_ = videoFrame.rotation_;
} }
void I420VideoFrame::SwapFrame(I420VideoFrame* videoFrame) {
video_frame_buffer_.swap(videoFrame->video_frame_buffer_);
std::swap(timestamp_, videoFrame->timestamp_);
std::swap(ntp_time_ms_, videoFrame->ntp_time_ms_);
std::swap(render_time_ms_, videoFrame->render_time_ms_);
std::swap(rotation_, videoFrame->rotation_);
}
void I420VideoFrame::Reset() { void I420VideoFrame::Reset() {
video_frame_buffer_ = nullptr; video_frame_buffer_ = nullptr;
timestamp_ = 0; timestamp_ = 0;

View File

@ -228,71 +228,6 @@ TEST(TestI420VideoFrame, CopyBuffer) {
EXPECT_LE(kSizeUv, frame2.allocated_size(kVPlane)); EXPECT_LE(kSizeUv, frame2.allocated_size(kVPlane));
} }
TEST(TestI420VideoFrame, FrameSwap) {
I420VideoFrame frame1, frame2;
uint32_t timestamp1 = 1;
int64_t ntp_time_ms1 = 2;
int64_t render_time_ms1 = 3;
int stride_y1 = 15;
int stride_u1 = 10;
int stride_v1 = 10;
int width1 = 15;
int height1 = 15;
const int kSizeY1 = 225;
const int kSizeU1 = 80;
const int kSizeV1 = 80;
uint32_t timestamp2 = 4;
int64_t ntp_time_ms2 = 5;
int64_t render_time_ms2 = 6;
int stride_y2 = 30;
int stride_u2 = 20;
int stride_v2 = 20;
int width2 = 30;
int height2 = 30;
const int kSizeY2 = 900;
const int kSizeU2 = 300;
const int kSizeV2 = 300;
// Initialize frame1 values.
EXPECT_EQ(0, frame1.CreateEmptyFrame(width1, height1,
stride_y1, stride_u1, stride_v1));
frame1.set_timestamp(timestamp1);
frame1.set_ntp_time_ms(ntp_time_ms1);
frame1.set_render_time_ms(render_time_ms1);
// Set memory for frame1.
uint8_t buffer_y1[kSizeY1];
uint8_t buffer_u1[kSizeU1];
uint8_t buffer_v1[kSizeV1];
memset(buffer_y1, 2, kSizeY1);
memset(buffer_u1, 4, kSizeU1);
memset(buffer_v1, 8, kSizeV1);
frame1.CreateFrame(buffer_y1, buffer_u1, buffer_v1,
width1, height1, stride_y1, stride_u1, stride_v1);
// Initialize frame2 values.
EXPECT_EQ(0, frame2.CreateEmptyFrame(width2, height2,
stride_y2, stride_u2, stride_v2));
frame2.set_timestamp(timestamp2);
frame1.set_ntp_time_ms(ntp_time_ms2);
frame2.set_render_time_ms(render_time_ms2);
// Set memory for frame2.
uint8_t buffer_y2[kSizeY2];
uint8_t buffer_u2[kSizeU2];
uint8_t buffer_v2[kSizeV2];
memset(buffer_y2, 0, kSizeY2);
memset(buffer_u2, 1, kSizeU2);
memset(buffer_v2, 2, kSizeV2);
frame2.CreateFrame(buffer_y2, buffer_u2, buffer_v2,
width2, height2, stride_y2, stride_u2, stride_v2);
// Copy frames for subsequent comparison.
I420VideoFrame frame1_copy, frame2_copy;
frame1_copy.CopyFrame(frame1);
frame2_copy.CopyFrame(frame2);
// Swap frames.
frame1.SwapFrame(&frame2);
// Verify swap.
EXPECT_TRUE(EqualFrames(frame1_copy, frame2));
EXPECT_TRUE(EqualFrames(frame2_copy, frame1));
}
TEST(TestI420VideoFrame, ReuseAllocation) { TEST(TestI420VideoFrame, ReuseAllocation) {
I420VideoFrame frame; I420VideoFrame frame;
frame.CreateEmptyFrame(640, 320, 640, 320, 320); frame.CreateEmptyFrame(640, 320, 640, 320, 320);

View File

@ -125,7 +125,6 @@ class Vp8TestDecodedImageCallback : public DecodedImageCallback {
: decoded_frames_(0) { : decoded_frames_(0) {
} }
virtual int32_t Decoded(I420VideoFrame& decoded_image) { virtual int32_t Decoded(I420VideoFrame& decoded_image) {
last_decoded_frame_.CopyFrame(decoded_image);
for (int i = 0; i < decoded_image.width(); ++i) { for (int i = 0; i < decoded_image.width(); ++i) {
EXPECT_NEAR(kColorY, decoded_image.buffer(kYPlane)[i], 1); EXPECT_NEAR(kColorY, decoded_image.buffer(kYPlane)[i], 1);
} }
@ -141,13 +140,9 @@ class Vp8TestDecodedImageCallback : public DecodedImageCallback {
int DecodedFrames() { int DecodedFrames() {
return decoded_frames_; return decoded_frames_;
} }
void GetLastDecodedFrame(I420VideoFrame* decoded_frame) {
decoded_frame->SwapFrame(&last_decoded_frame_);
}
private: private:
int decoded_frames_; int decoded_frames_;
I420VideoFrame last_decoded_frame_;
}; };
class SkipEncodingUnusedStreamsTest { class SkipEncodingUnusedStreamsTest {

View File

@ -386,7 +386,7 @@ int32_t AndroidNativeOpenGl2Channel::RenderFrame(
I420VideoFrame& videoFrame) { I420VideoFrame& videoFrame) {
// WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s:" ,__FUNCTION__); // WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s:" ,__FUNCTION__);
_renderCritSect.Enter(); _renderCritSect.Enter();
_bufferToRender.SwapFrame(&videoFrame); _bufferToRender = videoFrame;
_renderCritSect.Leave(); _renderCritSect.Leave();
_renderer.ReDraw(); _renderer.ReDraw();
return 0; return 0;

View File

@ -415,7 +415,7 @@ int32_t AndroidSurfaceViewChannel::RenderFrame(
I420VideoFrame& videoFrame) { I420VideoFrame& videoFrame) {
// WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s:" ,__FUNCTION__); // WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s:" ,__FUNCTION__);
_renderCritSect.Enter(); _renderCritSect.Enter();
_bufferToRender.SwapFrame(&videoFrame); _bufferToRender = videoFrame;
_renderCritSect.Leave(); _renderCritSect.Leave();
_renderer.ReDraw(); _renderer.ReDraw();
return 0; return 0;

View File

@ -87,9 +87,6 @@ class I420VideoFrame {
// reference to the video buffer also retained by |videoFrame|. // reference to the video buffer also retained by |videoFrame|.
void ShallowCopy(const I420VideoFrame& videoFrame); void ShallowCopy(const I420VideoFrame& videoFrame);
// Swap Frame.
void SwapFrame(I420VideoFrame* videoFrame);
// Release frame buffer and reset time stamps. // Release frame buffer and reset time stamps.
void Reset(); void Reset();