Adding Libyuv to Webrtc:

- Adding library to DEPS file
 - Adding Wrapper implementation and tests. 

This is an interim state, as these files are not being linked at this stage.
Review URL: http://webrtc-codereview.appspot.com/259005

git-svn-id: http://webrtc.googlecode.com/svn/trunk@1039 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
mikhal@webrtc.org
2011-11-28 18:09:41 +00:00
parent e07247af8d
commit 2cdb2d3833
10 changed files with 1945 additions and 0 deletions

3
DEPS
View File

@ -82,6 +82,9 @@ deps = {
"trunk/third_party/jsoncpp/":
"http://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/trunk/jsoncpp@246",
"trunk/third_party/libyuv":
(Var("googlecode_url") % "libyuv") + "/trunk@54",
}
deps_os = {

View File

@ -0,0 +1,311 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* WebRTC's Wrapper to libyuv.
*/
#ifndef WEBRTC_COMMON_VIDEO_LIBYUV_INCLUDE_LIBYUV_H_
#define WEBRTC_COMMON_VIDEO_LIBYUV_INCLUDE_LIBYUV_H_
#include "typedefs.h"
namespace webrtc {
// TODO(mikhal): 1. Sync libyuv and WebRtc meaning of stride.
// 2. Reorder parameters for consistency.
// Supported video types.
enum VideoType {
kUnknown,
kI420,
kIYUV,
kRGB24,
kARGB,
kARGB4444,
kRGB565,
kARGB1555,
kYUY2,
kYV12,
kUYVY,
kMJPG,
kNV21,
kNV12,
kARGBMac,
kRGBAMac,
kNumberOfVideoTypes
};
// Supported rotation
// Direction of rotation - clockwise.
enum VideoRotationMode {
kRotateNone = 0,
kRotate90 = 90,
kRotate180 = 180,
kRotate270 = 270,
};
// Calculate the required buffer size.
// Input:
// - type - The type of the designated video frame.
// - width - frame width in pixels.
// - height - frame height in pixels.
// Return value: The required size in bytes to accommodate the specified
// video frame or -1 in case of an error .
int CalcBufferSize(VideoType type, int width, int height);
// Compute required buffer size when converting from one type to another.
// Input:
// - src_video_type - Type of the existing video frame.
// - dst_video_type - Type of the designated video frame.
// - length - length in bytes of the data.
// Return value: The required size in bytes to accommodate the specified
// converted video frame or -1 in case of an error.
int CalcBufferSize(VideoType src_video_type,
VideoType dst_video_type,
int length);
// TODO (mikhal): Merge the two functions above.
// TODO(mikhal): If WebRTC doesn't switch to three plane representation,
// use helper functions for the planes and widths.
// Convert To/From I420
// The following two functions convert an image to/from a I420 type to/from
// a specified format.
//
// Input:
// - src_video_type : Type of input video
// - src_frame : Pointer to a source frame.
// - width : Image width in pixels.
// - height : Image height in pixels.
// - dst_frame : Pointer to a destination frame.
// - interlaced : Flag indicating if interlaced I420 output.
// - rotate : Rotation mode of output image.
// Return value: 0 if OK, < 0 otherwise.
//
// Note: the following functions includes the most common usage cases; for
// a more specific usage, refer to explicit function.
int ConvertToI420(VideoType src_video_type,
const uint8_t* src_frame,
int width,
int height,
uint8_t* dst_frame,
bool interlaced,
VideoRotationMode rotate);
int ConvertFromI420(VideoType dst_video_type,
const uint8_t* src_frame,
int width,
int height,
uint8_t* dst_frame,
bool interlaced,
VideoRotationMode rotate);
// The following list describes the designated conversion function which
// are called by the two prior general conversion function.
// Input and output descriptions mostly match the above descriptions, and are
// therefore omitted.
// Possible additional input value - dst_stride - stride of the dst frame.
int ConvertI420ToRGB24(const uint8_t* src_frame,
uint8_t* dst_frame,
int width, int height);
int ConvertI420ToARGB(const uint8_t* src_frame,
uint8_t* dst_frame,
int width, int height,
int dst_stride);
int ConvertI420ToARGB4444(const uint8_t* src_frame,
uint8_t* dst_frame,
int width,
int height,
int dst_stride);
int ConvertI420ToRGB565(const uint8_t* src_frame,
uint8_t* dst_frame,
int width,
int height);
int ConvertI420ToRGB565Android(const uint8_t* src_frame,
uint8_t* dst_frame,
int width,
int height);
int ConvertI420ToARGB1555(const uint8_t* src_frame,
uint8_t* dst_frame,
int width,
int height,
int dst_stride);
int ConvertI420ToARGBMac(const uint8_t* src_frame,
uint8_t* dst_frame,
int width, int height,
int dst_stride);
int ConvertI420ToRGBAMac(const uint8_t* src_frame,
uint8_t* dst_frame,
int width, int height,
int dst_stride);
int ConvertI420ToI420(const uint8_t* src_frame,
uint8_t* dst_frame,
int width, int height,
int dst_stride = 0);
int ConvertI420ToUYVY(const uint8_t* src_frame,
uint8_t* dst_frame,
int width, int height,
int dst_stride = 0);
int ConvertI420ToYUY2(const uint8_t* src_frame, uint8_t* dst_frame,
int width, int height,
int dst_stride = 0);
int ConvertI420ToYV12(const uint8_t* src_frame,
uint8_t* dst_frame,
int width, int height,
int dst_stride);
int ConvertYUY2ToI420(int width, int height,
const uint8_t* src_frame,
uint8_t* dst_frame);
int ConvertYV12ToI420(const uint8_t* src_frame,
int width, int height,
uint8_t* dst_frame);
int ConvertRGB24ToARGB(const uint8_t* src_frame,
uint8_t* dst_frame,
int width, int height,
int dst_stride);
int ConvertRGB24ToI420(int width, int height,
const uint8_t* src_frame,
uint8_t* dst_frame);
int ConvertARGBMacToI420(int width, int height,
const uint8_t* src_frame,
uint8_t* dst_frame);
int ConvertUYVYToI420(int width, int height,
const uint8_t* src_frame,
uint8_t* dst_frame);
// NV12 conversion and rotation
int ConvertNV12ToI420(const uint8_t* src_frame,
uint8_t* dst_frame,
int width, int height);
int ConvertNV12ToI420AndRotate180(const uint8_t* src_frame,
uint8_t* dst_frame, int width,
int height);
int ConvertNV12ToI420AndRotateAntiClockwise(const uint8_t* src_frame,
uint8_t* dst_frame,
int width,
int height);
int ConvertNV12ToI420AndRotateClockwise(const uint8_t* src_frame,
uint8_t* dst_frame,
int width,
int height);
int ConvertNV12ToRGB565(const uint8_t* src_frame,
uint8_t* dst_frame,
int width, int height);
// NV21 Conversion/Rotation
int ConvertNV21ToI420(const uint8_t* src_frame,
uint8_t* dst_frame,
int width, int height);
int ConvertNV21ToI420AndRotate180(const uint8_t* src_frame,
uint8_t* dst_frame,
int width, int height);
// TODO (mikhal): Rename to counterClockwise.
int ConvertNV21ToI420AndRotateAntiClockwise(const uint8_t* src_frame,
uint8_t* dst_frame,
int width,
int height);
int ConvertNV21ToI420AndRotateClockwise(const uint8_t* src_frame,
uint8_t* dst_frame,
int width,
int height);
// IPhone
int ConvertI420ToRGBAIPhone(const uint8_t* src_frame,
uint8_t* dst_frame,
int width, int height,
int dst_stride);
// I420 Cut and Pad - make a center cut
int CutI420Frame(uint8_t* frame,
int src_width, int src_height,
int dst_width, int dst_height);
int I420Rotate(const uint8_t* src_frame,
uint8_t* dst_frame,
int width, int height,
VideoRotationMode rotation_mode);
// Following three functions:
// Convert from I420/YV12 to I420 and rotate.
// Input:
// - src_frame : Pointer to a source frame.
// - src_width : Width of source frame in pixels.
// - src_height : Height of source frame in pixels.
// - dst_frame : Pointer to a destination frame.
// - dst_width : Width of destination frame in pixels.
// - dst_height : Height of destination frame in pixels.
// - src_color_space : Input color space.
// Return value: 0 if OK, < 0 otherwise.
int ConvertToI420AndRotateClockwise(const uint8_t* src_frame,
int src_width,
int src_height,
uint8_t* dst_frame,
int dst_width,
int dst_height,
VideoType src_video_type);
int ConvertToI420AndRotateAntiClockwise(const uint8_t* src_frame,
int src_width,
int src_height,
uint8_t* dst_frame,
int dst_width,
int dst_height,
VideoType src_video_type);
int ConvertToI420AndRotate180(const uint8_t* srcBuffer,
int srcWidth,
int srcHeight,
uint8_t* dstBuffer,
int dst_width,
int dst_height,
VideoType src_video_type);
// Mirror functions
// The following 2 functions perform mirroring on a given image
// (LeftRight/UpDown).
// Input:
// - width : Image width in pixels.
// - height : Image height in pixels.
// - src_frame : Pointer to a source frame.
// - dst_frame : Pointer to a destination frame.
// Return value: 0 if OK, < 0 otherwise.
int MirrorI420LeftRight(const uint8_t* src_frame,
uint8_t* dst_frame,
int width, int height);
int MirrorI420UpDown(const uint8_t* src_frame,
uint8_t* dst_frame,
int width, int height);
// Mirror functions + conversion
// Input:
// - src_frame : Pointer to source frame.
// - dst_frame : Pointer to destination frame.
// - src_width : Width of input buffer.
// - src_height : Height of input buffer.
// - src_color_space : Color space to convert from, I420 if no
// conversion should be done.
// Return value: 0 if OK, < 0 otherwise.
int ConvertToI420AndMirrorUpDown(const uint8_t* src_frame,
uint8_t* dst_frame,
int src_width,
int src_height,
VideoType src_video_type);
int ConvertToI420AndRotate(const uint8_t* src_frame,
uint8_t* dst_frame,
int height,
int width,
VideoType src_video_type,
VideoRotationMode mode);
}
#endif // WEBRTC_COMMON_VIDEO_LIBYUV_INCLUDE_LIBYUV_H_

View File

@ -0,0 +1,71 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* Interface to the LibYuv scaling functionality
*/
#ifndef WEBRTC_COMMON_VIDEO_LIBYUV_INCLUDE_SCALER_H_
#define WEBRTC_COMMON_VIDEO_LIBYUV_INCLUDE_SCALER_H_
#include "common_video/libyuv/include/libyuv.h"
#include "typedefs.h"
namespace webrtc {
// Supported scaling types
enum ScaleMethod {
kScalePoint, // no interpolation
kScaleBilinear,
kScaleBox
};
// TODO (mikhal): Have set return the expected value of the dst_frame, such
// that the user can allocate memory for Scale().
class Scaler {
public:
Scaler();
~Scaler();
// Set interpolation properties:
//
// Return value: 0 - OK
// -1 - parameter error
int Set(int src_width, int src_height,
int dst_width, int dst_height,
VideoType src_video_type, VideoType dst_video_type,
ScaleMethod method);
// Scale frame
// Memory is allocated by user. If dst_frame is not of sufficient size,
// the frame will be reallocated to the appropriate size.
// Return value: 0 - OK,
// -1 - parameter error
// -2 - scaler not set
int Scale(const uint8_t* src_frame,
uint8_t*& dst_frame,
int& dst_size);
private:
// Determine if the VideoTypes are currently supported.
bool SupportedVideoType(VideoType src_video_type,
VideoType dst_video_type);
ScaleMethod method_;
int src_width_;
int src_height_;
int dst_width_;
int dst_height_;
bool set_;
};
} // namespace webrtc
#endif // WEBRTC_COMMON_VIDEO_LIBYUV_INCLUDE_SCALER_H_

View File

@ -0,0 +1,899 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "common_video/libyuv/include/libyuv.h"
#include <assert.h>
// LibYuv includes
#include "third_party/libyuv/include/libyuv.h"
namespace webrtc {
int CalcBufferSize(VideoType type, int width, int height) {
int bits_per_pixel = 32;
switch (type) {
case kI420:
case kNV12:
case kNV21:
case kIYUV:
case kYV12:
bits_per_pixel = 12;
break;
case kARGB4444:
case kRGB565:
case kARGB1555:
case kYUY2:
case kUYVY:
bits_per_pixel = 16;
break;
case kRGB24:
bits_per_pixel = 24;
break;
case kARGB:
bits_per_pixel = 32;
break;
default:
assert(false);
return -1;
}
return (width * height * bits_per_pixel) / 8; // bytes
}
int CalcBufferSize(VideoType src_video_type,
VideoType dst_video_type,
int length) {
int src_bits_per_pixel = 32;
switch (src_video_type) {
case kI420:
case kNV12:
case kNV21:
case kIYUV:
case kYV12:
src_bits_per_pixel = 12;
break;
case kARGB4444:
case kRGB565:
case kARGB1555:
case kYUY2:
case kUYVY:
src_bits_per_pixel = 16;
break;
case kRGB24:
src_bits_per_pixel = 24;
break;
case kARGB:
src_bits_per_pixel = 32;
break;
default:
assert(false);
return -1;
}
int dst_bits_per_pixel = 32;
switch (dst_video_type) {
case kI420:
case kIYUV:
case kYV12:
dst_bits_per_pixel = 12;
break;
case kARGB4444:
case kRGB565:
case kARGB1555:
case kYUY2:
case kUYVY:
dst_bits_per_pixel = 16;
break;
case kRGB24:
dst_bits_per_pixel = 24;
break;
case kARGB:
dst_bits_per_pixel = 32;
break;
default:
assert(false);
return -1;
}
return (length * dst_bits_per_pixel) / src_bits_per_pixel;
}
int ConvertI420ToRGB24(const uint8_t* src_frame, uint8_t* dst_frame,
int width, int height) {
const uint8_t* yplane = src_frame;
const uint8_t* uplane = src_frame + width * height;
const uint8_t* vplane = uplane + (width * height / 4);
return libyuv::I420ToRGB24(yplane, width,
uplane, width / 2,
vplane, width / 2,
dst_frame, width * 3,
width, height);
}
int ConvertI420ToARGB(const uint8_t* src_frame, uint8_t* dst_frame,
int width, int height,
int dst_stride) {
if (dst_stride == 0 || dst_stride == width)
dst_stride = width * 4;
const uint8_t* yplane = src_frame;
const uint8_t* uplane = src_frame + width * height;
const uint8_t* vplane = uplane + (width * height / 4);
return libyuv::I420ToARGB(yplane, width,
uplane, width / 2,
vplane, width / 2,
dst_frame, dst_stride,
width, height);
}
int ConvertI420ToRGBAMac(const uint8_t* src_frame,
uint8_t* dst_frame,
int width, int height,
int dst_stride) {
// Equivalent to Convert YV12ToRGBA.
// YV12 same as I420 with U and V swapped.
if (dst_stride == 0 || dst_stride == width)
dst_stride = 4 * width;
const uint8_t* yplane = src_frame;
const uint8_t* uplane = src_frame + width * height;
const uint8_t* vplane = uplane + (width * height / 4);
return libyuv::I420ToARGB(yplane, width,
vplane, width / 2,
uplane, width / 2,
dst_frame, dst_stride,
width, height);
}
int ConvertI420ToARGB4444(const uint8_t* src_frame,
uint8_t* dst_frame,
int width, int height,
int dst_stride) {
if (dst_stride == 0 || dst_stride == width)
dst_stride = 2 * width;
const uint8_t* yplane = src_frame;
const uint8_t* uplane = src_frame + width * height;
const uint8_t* vplane = uplane + (width * height / 4);
return libyuv::I420ToARGB4444(yplane, width,
uplane, width / 2,
vplane, width / 2,
dst_frame, dst_stride,
width, height);
}
int ConvertI420ToRGB565(const uint8_t* src_frame,
uint8_t* dst_frame,
int width, int height) {
const uint8_t* yplane = src_frame;
const uint8_t* uplane = src_frame + width * height;
const uint8_t* vplane = uplane + (width * height / 4);
return libyuv::I420ToRGB565(yplane, width,
uplane, width / 2,
vplane, width / 2,
dst_frame, width,
width, height);
}
// Same as ConvertI420ToRGB565 with a vertical flip.
int ConvertI420ToRGB565Android(const uint8_t* src_frame,
uint8_t* dst_frame,
int width, int height) {
const uint8_t* yplane = src_frame;
const uint8_t* uplane = src_frame + width * height;
const uint8_t* vplane = uplane + (width * height / 4);
// Same as RGB565 + inversion - set negative height.
height = -height;
return libyuv::I420ToRGB565(yplane, width,
uplane, width / 2,
vplane, width / 2,
dst_frame, width,
width, height);
}
int ConvertI420ToARGB1555(const uint8_t* src_frame,
uint8_t* dst_frame,
int width, int height,
int dst_stride) {
if (dst_stride == 0 || dst_stride == width)
dst_stride = 2 * width;
else if (dst_stride < 2 * width)
return -1;
const uint8_t* yplane = src_frame;
const uint8_t* uplane = src_frame + width * height;
const uint8_t* vplane = uplane + (width * height / 4);
return libyuv::I420ToARGB1555(yplane, width,
uplane, width / 2,
vplane, width / 2,
dst_frame, dst_stride,
width, height);
}
int ConvertI420ToYUY2(const uint8_t* src_frame, uint8_t* dst_frame,
int width, int height,
int dst_stride) {
const uint8_t* yplane = src_frame;
const uint8_t* uplane = src_frame + width * height;
const uint8_t* vplane = uplane + (width * height / 4);
if (dst_stride == 0 || dst_stride == width)
dst_stride = 2 * width;
return libyuv::I420ToYUY2(yplane, width,
uplane, width / 2,
vplane, width / 2,
dst_frame, dst_stride,
width, height);
}
int ConvertI420ToUYVY(const uint8_t* src_frame, uint8_t* dst_frame,
int width, int height,
int dst_stride) {
if (dst_stride == 0 || dst_stride == width)
dst_stride = 2 * width;
else if (dst_stride < width)
return -1;
const uint8_t* yplane = src_frame;
const uint8_t* uplane = src_frame + width * height;
const uint8_t* vplane = uplane + (width * height / 4);
return libyuv::I420ToUYVY(yplane, width,
uplane, width / 2,
vplane, width / 2,
dst_frame, dst_stride,
width, height);
}
int ConvertI420ToYV12(const uint8_t* src_frame, uint8_t* dst_frame,
int width, int height,
int dst_stride) {
if (dst_stride == 0 || dst_stride == width)
dst_stride = width;
else if (dst_stride < width)
return -1;
const uint8_t* src_yplane = src_frame;
const uint8_t* src_uplane = src_frame + width * height;
const uint8_t* src_vplane = src_uplane + (width * height / 4);
uint8_t* dst_yplane = dst_frame;
uint8_t* dst_uplane = dst_frame + width * height;
uint8_t* dst_vplane = dst_uplane + (width * height / 4);
// YV12 is YVU => Use I420(YUV) copy and flip U and V.
return libyuv::I420Copy(src_yplane, width,
src_vplane, width / 2,
src_uplane, width / 2,
dst_yplane, dst_stride,
dst_uplane, dst_stride / 2,
dst_vplane, dst_stride / 2,
width, height);
}
int ConvertYV12ToI420(const uint8_t* src_frame,
int width, int height,
uint8_t* dst_frame) {
const uint8_t* src_yplane = src_frame;
const uint8_t* src_uplane = src_frame + width * height;
const uint8_t* src_vplane = src_uplane + (width * height / 4);
uint8_t* dst_yplane = dst_frame;
uint8_t* dst_uplane = dst_frame + width * height;
uint8_t* dst_vplane = dst_uplane + (width * height / 4);
// YV12 is YVU => Use I420(YUV) copy and flip U and V.
return libyuv::I420Copy(src_yplane, width,
src_vplane, width / 2,
src_uplane, width / 2,
dst_yplane, width,
dst_uplane, width / 2,
dst_vplane, width / 2,
width, height);
}
int ConvertNV12ToI420(const uint8_t* src_frame, uint8_t* dst_frame,
int width, int height) {
const uint8_t* src_yplane = src_frame;
const uint8_t* src_uvplane = src_frame + width * height;
uint8_t* dst_yplane = dst_frame;
uint8_t* dst_uplane = dst_frame + width * height;
uint8_t* dst_vplane = dst_uplane + (width * height / 4);
return libyuv::NV12ToI420(src_yplane, width,
src_uvplane, width,
dst_yplane, width,
dst_uplane, width / 2,
dst_vplane, width / 2,
width, height);
}
int ConvertNV12ToI420AndRotate180(const uint8_t* src_frame,
uint8_t* dst_frame,
int width, int height) {
const uint8_t* src_yplane = src_frame;
const uint8_t* src_uvplane = src_frame + width * height;
uint8_t* dst_yplane = dst_frame;
uint8_t* dst_uplane = dst_frame + width * height;
uint8_t* dst_vplane = dst_uplane + (width * height / 4);
return libyuv::NV12ToI420Rotate(src_yplane, width,
src_uvplane, width,
dst_yplane, width,
dst_uplane, width / 2,
dst_vplane, width / 2,
width, height,
libyuv::kRotate180);
}
int ConvertNV12ToI420AndRotateClockwise(const uint8_t* src_frame,
uint8_t* dst_frame,
int width, int height) {
const uint8_t* src_yplane = src_frame;
const uint8_t* src_uvplane = src_frame + width * height;
uint8_t* dst_yplane = dst_frame;
uint8_t* dst_uplane = dst_frame + width * height;
uint8_t* dst_vplane = dst_uplane + (width * height / 4);
return libyuv::NV12ToI420Rotate(src_yplane, width,
src_uvplane, width,
dst_yplane, width,
dst_uplane, width / 2,
dst_vplane, width / 2,
width, height,
libyuv::kRotate90);
}
int ConvertNV12ToI420AndRotateAntiClockwise(const uint8_t* src_frame,
uint8_t* dst_frame,
int width,
int height) {
const uint8_t* src_yplane = src_frame;
const uint8_t* src_uvplane = src_frame + width * height;
uint8_t* dst_yplane = dst_frame;
uint8_t* dst_uplane = dst_frame + width * height;
uint8_t* dst_vplane = dst_uplane + (width * height / 4);
return libyuv::NV12ToI420Rotate(src_yplane, width,
src_uvplane, width,
dst_yplane, width,
dst_uplane, width / 2,
dst_vplane, width / 2,
width, height,
libyuv::kRotate270);
}
int ConvertNV12ToRGB565(const uint8_t* src_frame,
uint8_t* dst_frame,
int width, int height) {
const uint8_t* yplane = src_frame;
const uint8_t* uvInterlaced = src_frame + (width * height);
return libyuv::NV12ToRGB565(yplane, width,
uvInterlaced, width / 2,
dst_frame, width,
width, height);
}
int ConvertNV21ToI420(const uint8_t* src_frame, uint8_t* dst_frame,
int width, int height) {
// NV21 = y plane followed by an interleaved V/U plane, i.e. same as NV12
// but the U and the V are switched. Use the NV12 function and switch the U
// and V planes.
const uint8_t* src_yplane = src_frame;
const uint8_t* src_uvplane = src_frame + width * height;
uint8_t* dst_yplane = dst_frame;
uint8_t* dst_uplane = dst_frame + width * height;
uint8_t* dst_vplane = dst_uplane + (width * height / 4);
return libyuv::NV12ToI420(src_yplane, width,
src_uvplane, width,
dst_yplane, width,
dst_vplane, width / 2,
dst_uplane, width / 2,
width, height);
}
int ConvertNV21ToI420AndRotate180(const uint8_t* src_frame,
uint8_t* dst_frame,
int width, int height) {
const uint8_t* src_yplane = src_frame;
const uint8_t* src_uvplane = src_frame + width * height;
uint8_t* dst_yplane = dst_frame;
uint8_t* dst_uplane = dst_frame + width * height;
uint8_t* dst_vplane = dst_uplane + (width * height / 4);
return libyuv::NV12ToI420Rotate(src_yplane, width,
src_uvplane, width,
dst_yplane, width,
dst_vplane, width / 2,
dst_uplane, width / 2,
width, height,
libyuv::kRotate180);
}
int ConvertNV21ToI420AndRotateClockwise(const uint8_t* src_frame,
uint8_t* dst_frame,
int width, int height) {
const uint8_t* src_yplane = src_frame;
const uint8_t* src_uvplane = src_frame + width * height;
uint8_t* dst_yplane = dst_frame;
uint8_t* dst_uplane = dst_frame + width * height;
uint8_t* dst_vplane = dst_uplane + (width * height / 4);
return libyuv::NV12ToI420Rotate(src_yplane, width,
src_uvplane, width,
dst_yplane, width,
dst_vplane, width / 2,
dst_uplane, width / 2,
width, height,
libyuv::kRotate90);
}
int ConvertNV21ToI420AndRotateAntiClockwise(const uint8_t* src_frame,
uint8_t* dst_frame,
int width,
int height) {
const uint8_t* src_yplane = src_frame;
const uint8_t* src_uvplane = src_frame + width * height;
uint8_t* dst_yplane = dst_frame;
uint8_t* dst_uplane = dst_frame + width * height;
uint8_t* dst_vplane = dst_uplane + (width * height / 4);
return libyuv::NV12ToI420Rotate(src_yplane, width,
src_uvplane, width,
dst_yplane, width,
dst_vplane, width / 2,
dst_uplane, width / 2,
width, height,
libyuv::kRotate270);
}
int ConvertI420ToRGBAIPhone(const uint8_t* src_frame,
uint8_t* dst_frame,
int width, int height,
int dst_stride) {
if (dst_stride == 0 || dst_stride == width)
dst_stride = 4 * width;
else if (dst_stride < 4 * width)
return -1;
const uint8_t* yplane = src_frame;
const uint8_t* uplane = src_frame + width * height;
const uint8_t* vplane = uplane + (width * height / 4);
// RGBAIPhone = ABGR
return libyuv::I420ToABGR(yplane, width,
uplane, width / 2,
vplane, width / 2,
dst_frame, dst_stride,
width, height);
}
int ConvertI420ToI420(const uint8_t* src_frame, uint8_t* dst_frame,
int width,
int height, int dst_stride) {
if (dst_stride == 0)
dst_stride = width;
const uint8_t* src_yplane = src_frame;
const uint8_t* src_uplane = src_frame + width * height;
const uint8_t* src_vplane = src_uplane + (width * height / 4);
uint8_t* dst_yplane = dst_frame;
uint8_t* dst_uplane = dst_frame + width * height;
uint8_t* dst_vplane = dst_uplane + (width * height / 4);
return libyuv::I420Copy(src_yplane, width,
src_uplane, width / 2,
src_vplane, width / 2,
dst_yplane, width,
dst_uplane, width / 2,
dst_vplane, width / 2,
width, height);
}
int ConvertUYVYToI420(int width, int height,
const uint8_t* src_frame, uint8_t* dst_frame) {
uint8_t* dst_yplane = dst_frame;
uint8_t* dst_uplane = dst_frame + width * height;
uint8_t* dst_vplane = dst_uplane + (width * height / 4);
return libyuv::UYVYToI420(src_frame, 2 * width,
dst_yplane, width,
dst_uplane, width / 2,
dst_vplane, width / 2,
width, height);
}
int ConvertYUY2ToI420(int width, int height,
const uint8_t* src_frame, uint8_t* dst_frame) {
uint8_t* dst_yplane = dst_frame;
uint8_t* dst_uplane = dst_frame + width * height;
uint8_t* dst_vplane = dst_uplane + (width * height / 4);
return libyuv::YUY2ToI420(src_frame, 2 * width,
dst_yplane, width,
dst_uplane, width / 2,
dst_vplane, width / 2,
width, height);
}
// Make a center cut
int CutI420Frame(uint8_t* frame,
int fromWidth, int fromHeight,
int toWidth, int toHeight) {
// TODO(mikhal): Verify
return libyuv::I420Crop(frame,
fromWidth, fromHeight,
toWidth, toHeight);
}
int ConvertRGB24ToARGB(const uint8_t* src_frame, uint8_t* dst_frame,
int width, int height, int dst_stride) {
if (dst_stride == 0 || dst_stride == width)
dst_stride = width;
// Stride - currently webrtc style
return libyuv::RGB24ToARGB(src_frame, width,
dst_frame, dst_stride,
width, height);
}
int ConvertRGB24ToI420(int width, int height,
const uint8_t* src_frame, uint8_t* dst_frame) {
uint8_t* yplane = dst_frame;
uint8_t* uplane = yplane + width * height;
uint8_t* vplane = uplane + (width * height / 4);
// WebRtc expects a vertical flipped image.
return libyuv::RGB24ToI420(src_frame, width * 3,
yplane, width,
uplane, width / 2,
vplane, width / 2,
width, -height);
}
int ConvertI420ToARGBMac(const uint8_t* src_frame, uint8_t* dst_frame,
int width, int height, int dst_stride) {
// Equivalent to YV12ToARGB.
// YV12 = YVU => use I420 and interchange U and V.
const uint8_t* yplane = src_frame;
const uint8_t* uplane = yplane + width * height;
const uint8_t* vplane = uplane + (width * height / 4);
if (dst_stride == 0 || dst_stride == width)
dst_stride = 4 * width;
else if (dst_stride < 4 * width)
return -1;
return libyuv::I420ToARGB(yplane, width,
vplane, width / 2,
uplane, width / 2,
dst_frame, dst_stride,
width, height);
}
int ConvertARGBMacToI420(int width, int height,
const uint8_t* src_frame, uint8_t* dst_frame) {
// Equivalent to YV12ToARGB.
// YV12 = YVU => use I420 and switch U and V.
uint8_t* yplane = dst_frame;
uint8_t* uplane = yplane + width * height;
uint8_t* vplane = uplane + (width * height / 4);
return libyuv::ARGBToI420(src_frame, width * 4,
yplane, width,
vplane, width / 2,
uplane, width / 2,
width, height);
}
int ConvertToI420(VideoType src_video_type,
const uint8_t* src_frame,
int width,
int height,
uint8_t* dst_frame,
bool interlaced,
VideoRotationMode rotate /* = kRotateNone */) {
switch (src_video_type) {
case kRGB24:
return ConvertRGB24ToI420(width, height, src_frame,
dst_frame);
case kARGB:
return ConvertARGBMacToI420(width, height, src_frame,
dst_frame);
case kI420:
return I420Rotate(src_frame,
dst_frame,
width, height,
rotate);
case kYUY2:
return ConvertYUY2ToI420(width, height,
src_frame, dst_frame);
case kUYVY:
return ConvertUYVYToI420(width, height, src_frame,
dst_frame);
case kYV12:
switch (rotate) {
case kRotateNone:
return ConvertYV12ToI420(src_frame,
width, height,
dst_frame);
case kRotate90:
return ConvertToI420AndRotateClockwise(src_frame,
width,
height,
dst_frame,
height, width,
kYV12);
case kRotate270:
return ConvertToI420AndRotateAntiClockwise(src_frame,
width, height,
dst_frame,
height, width,
kYV12);
case kRotate180:
return ConvertToI420AndRotate180(src_frame,
width, height,
dst_frame,
height, width,
kYV12);
}
case kNV12:
switch (rotate) {
case kRotateNone:
return ConvertNV12ToI420(src_frame, dst_frame,
width, height);
case kRotate90:
return ConvertNV12ToI420AndRotateClockwise(src_frame,
dst_frame,
width, height);
case kRotate270:
return ConvertNV12ToI420AndRotateAntiClockwise(src_frame,
dst_frame,
width, height);
case kRotate180:
return ConvertNV12ToI420AndRotate180(src_frame,
dst_frame,
width, height);
}
case kNV21:
switch (rotate) {
case kRotateNone:
return ConvertNV21ToI420(src_frame,
dst_frame,
width, height);
case kRotate90:
return ConvertNV21ToI420AndRotateClockwise(src_frame,
dst_frame,
width, height);
case kRotate270:
return ConvertNV21ToI420AndRotateAntiClockwise(src_frame,
dst_frame,
width, height);
case kRotate180:
return ConvertNV21ToI420AndRotate180(src_frame,
dst_frame,
width, height);
}
break;
default:
return -1;
}
return -1;
}
int ConvertFromI420(VideoType dst_video_type,
const uint8_t* src_frame,
int width,
int height,
uint8_t* dst_frame,
bool interlaced,
VideoRotationMode rotate) {
switch (dst_video_type) {
case kRGB24:
return ConvertI420ToRGB24(src_frame, dst_frame, width, height);
case kARGB:
return ConvertI420ToARGB(src_frame, dst_frame, width, height, 0);
case kARGB4444:
return ConvertI420ToARGB4444(src_frame, dst_frame, width, height, 0);
case kARGB1555:
return ConvertI420ToARGB1555(src_frame, dst_frame, width, height, 0);
case kRGB565:
return ConvertI420ToRGB565(src_frame, dst_frame, width, height);
case kI420:
return ConvertI420ToI420(src_frame, dst_frame, width, height, width);
case kUYVY:
return ConvertI420ToUYVY(src_frame, dst_frame, width, height);
case kYUY2:
return ConvertI420ToYUY2(src_frame, dst_frame, width, height, 0);
case kYV12:
return ConvertI420ToYV12(src_frame, dst_frame, width, height, 0);
case kRGBAMac:
return ConvertI420ToRGBAMac(src_frame, dst_frame, width, height, 0);
case kARGBMac:
return ConvertI420ToARGBMac(src_frame, dst_frame, width, height, 0);
default:
return -1;
}
}
int MirrorI420LeftRight(const uint8_t* src_frame,
uint8_t* dst_frame,
int width, int height) {
const uint8_t* src_yplane = src_frame;
const uint8_t* src_uplane = src_yplane + width * height;
const uint8_t* src_vplane = src_uplane + (width * height / 4);
uint8_t* dst_yplane = dst_frame;
uint8_t* dst_uplane = dst_yplane + width * height;
uint8_t* dst_vplane = dst_uplane + (width * height / 4);
return libyuv::I420Mirror(src_yplane, width,
src_uplane, width / 2,
src_vplane, width / 2,
dst_yplane, width,
dst_uplane, width / 2,
dst_vplane, width / 2,
width, height);
}
int MirrorI420UpDown(const uint8_t* src_frame, uint8_t* dst_frame,
int width, int height) {
const uint8_t* src_yplane = src_frame;
const uint8_t* src_uplane = src_frame + width * height;
const uint8_t* src_vplane = src_uplane + (width * height / 4);
uint8_t* dst_yplane = dst_frame;
uint8_t* dst_uplane = dst_frame + width * height;
uint8_t* dst_vplane = dst_uplane + (width * height / 4);
// Inserting negative height flips the frame.
return libyuv::I420Copy(src_yplane, width,
src_uplane, width / 2,
src_vplane, width / 2,
dst_yplane, width,
dst_uplane, width / 2,
dst_vplane, width / 2,
width, -height);
}
int ConvertToI420AndMirrorUpDown(const uint8_t* src_frame,
uint8_t* dst_frame,
int src_width, int src_height,
VideoType src_video_type) {
if (src_video_type != kI420 && src_video_type != kYV12)
return -1;
// TODO(mikhal): Use a more general convert function - with negative height
// Inserting negative height flips the frame.
// Using I420Copy with a negative height.
const uint8_t* src_yplane = src_frame;
const uint8_t* src_uplane = src_frame + src_width * src_height;
const uint8_t* src_vplane = src_uplane + (src_width * src_height / 4);
uint8_t* dst_yplane = dst_frame;
uint8_t* dst_uplane = dst_frame + src_width * src_height;
uint8_t* dst_vplane = dst_uplane + (src_width * src_height / 4);
if (src_video_type == kYV12) {
// Switch U and V
dst_vplane = dst_frame + src_width * src_height;
dst_uplane = dst_vplane + (src_width * src_height / 4);
}
// Inserting negative height flips the frame.
return libyuv::I420Copy(src_yplane, src_width,
src_uplane, src_width / 2,
src_vplane, src_width / 2,
dst_yplane, src_width,
dst_uplane, src_width / 2,
dst_vplane, src_width / 2,
src_width, -src_height);
}
int I420Rotate(const uint8_t* src_frame,
uint8_t* dst_frame,
int width, int height,
VideoRotationMode rotation_mode) {
const uint8_t* src_yplane = src_frame;
const uint8_t* src_uplane = src_frame + width * height;
const uint8_t* src_vplane = src_frame + (width * height / 4);
uint8_t* dst_yplane = dst_frame;
uint8_t* dst_uplane = dst_frame + width * height;
uint8_t* dst_vplane = dst_uplane + (width * height / 4);
return libyuv::I420Rotate(src_yplane, width,
src_uplane, width / 2,
src_vplane, width / 2,
dst_yplane, width,
dst_uplane, width / 2,
dst_vplane, width / 2,
width, height,
static_cast<libyuv::RotationMode>(rotation_mode));
}
// TODO(mikhal): modify API to use only the general function.
int ConvertToI420AndRotateClockwise(const uint8_t* src_frame,
int src_width,
int src_height,
uint8_t* dst_frame,
int dst_width,
int dst_height,
VideoType src_video_type) {
if (src_video_type != kI420 && src_video_type != kYV12)
return -1;
const uint8_t* src_yplane = src_frame;
const uint8_t* src_uplane = src_frame + src_width * src_height;
const uint8_t* src_vplane = src_uplane + (src_width * src_height / 4);
uint8_t* dst_yplane = dst_frame;
uint8_t* dst_uplane = dst_frame + src_width * src_height;
uint8_t* dst_vplane = dst_uplane + (src_width * src_height / 4);
if (src_video_type == kYV12) {
// Switch U and V
dst_vplane = dst_frame + src_width * src_height;
dst_uplane = dst_vplane + (src_width * src_height / 4);
}
return libyuv::I420Rotate(src_yplane, src_width,
src_uplane, src_width / 2,
src_vplane, src_width / 2,
dst_yplane, src_width,
dst_uplane, src_width / 2,
dst_vplane, src_width / 2,
src_width, src_height,
libyuv::kRotate90);
}
// TODO(mikhal): modify API to use only the general function.
int ConvertToI420AndRotateAntiClockwise(const uint8_t* src_frame,
int src_width,
int src_height,
uint8_t* dst_frame,
int dst_width,
int dst_height,
VideoType src_video_type) {
if (src_video_type != kI420 && src_video_type != kYV12)
return -1;
const uint8_t* src_yplane = src_frame;
const uint8_t* src_uplane = src_frame + src_width * src_height;
const uint8_t* src_vplane = src_uplane + (src_width * src_height / 4);
uint8_t* dst_yplane = dst_frame;
uint8_t* dst_uplane = dst_frame + src_width * src_height;
uint8_t* dst_vplane = dst_uplane + (src_width * src_height / 4);
if (src_video_type == kYV12) {
// Switch U and V
dst_vplane = dst_frame + src_width * src_height;
dst_uplane = dst_vplane + (src_width * src_height / 4);
}
return libyuv::I420Rotate(src_yplane, src_width,
src_uplane, src_width / 2,
src_vplane, src_width / 2,
dst_yplane, src_width,
dst_uplane, src_width / 2,
dst_vplane, src_width / 2,
src_width, src_height,
libyuv::kRotate270);
}
// TODO(mikhal): modify API to use only the general function.
int ConvertToI420AndRotate180(const uint8_t* src_frame,
int src_width,
int src_height,
uint8_t* dst_frame,
int dst_width,
int dst_height,
VideoType src_video_type) {
if (src_video_type != kI420 && src_video_type != kYV12)
return -1;
const uint8_t* src_yplane = src_frame;
const uint8_t* src_uplane = src_frame + src_width * src_height;
const uint8_t* src_vplane = src_uplane + (src_width * src_height / 4);
uint8_t* dst_yplane = dst_frame;
uint8_t* dst_uplane = dst_frame + src_width * src_height;
uint8_t* dst_vplane = dst_uplane + (src_width * src_height / 4);
if (src_video_type == kYV12) {
// Switch U and V
dst_vplane = dst_frame + src_width * src_height;
dst_uplane = dst_vplane + (src_width * src_height / 4);
}
return libyuv::I420Rotate(src_yplane, src_width,
src_uplane, src_width / 2,
src_vplane, src_width / 2,
dst_yplane, src_width,
dst_uplane, src_width / 2,
dst_vplane, src_width / 2,
src_width, src_height,
libyuv::kRotate180);
}
} // namespace webrtc

View File

@ -0,0 +1,46 @@
# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
{
'targets': [
{
'target_name': 'webrtc_libyuv',
'type': '<(library)',
'dependencies': [
'<(DEPTH)/third_party/libyuv/libyuv.gyp:libyuv'
],
'sources': [
'include/libyuv.h',
'include/scaler.h',
'libyuv.cc',
'scaler.cc',
],
},
], # targets
'conditions': [
['build_with_chromium==0', {
'targets': [
{
'target_name': 'libyuv_unittests',
'type': 'executable',
'dependencies': [
'webrtc_libyuv',
'<(webrtc_root)/../testing/gtest.gyp:gtest',
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
'<(webrtc_root)/../test/test.gyp:test_support_main',
],
'sources': [
'test/test_util.h',
'test/test_util.cc',
'test/unit_test.cc',
],
},
], # targets
}], # build_with_chromium
], # conditions
}

View File

@ -0,0 +1,101 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "common_video/libyuv/include/scaler.h"
// LibYuv
#include "third_party/libyuv/include/libyuv.h"
namespace webrtc {
Scaler::Scaler()
: method_(kScaleBox),
src_width_(0),
src_height_(0),
dst_width_(0),
dst_height_(0),
set_(false) {}
Scaler::~Scaler() {}
int Scaler::Set(int src_width, int src_height,
int dst_width, int dst_height,
VideoType src_video_type, VideoType dst_video_type,
ScaleMethod method) {
set_ = false;
if (src_width < 1 || src_height < 1 || dst_width < 1 || dst_height < 1)
return -1;
if (!SupportedVideoType(src_video_type, dst_video_type))
return -1;
src_width_ = src_width;
src_height_ = src_height;
dst_width_ = dst_width;
dst_height_ = dst_height;
method_ = method;
set_ = true;
return 0;
}
int Scaler::Scale(const uint8_t* src_frame,
uint8_t*& dst_frame,
int& dst_size) {
if (src_frame == NULL)
return -1;
if (!set_)
return -2;
// Making sure that destination frame is of sufficient size
int required_dst_size = dst_width_ * dst_height_ * 3 / 2;
if (dst_frame && required_dst_size > dst_size) {
// allocated buffer is too small
delete [] dst_frame;
dst_frame = NULL;
}
if (dst_frame == NULL) {
dst_frame = new uint8_t[required_dst_size];
dst_size = required_dst_size;
}
// Converting to planes:
const uint8_t* src_yplane = src_frame;
const uint8_t* src_uplane = src_frame + src_width_ * src_height_;
const uint8_t* src_vplane = src_uplane + src_width_ * src_height_ / 4;
uint8_t* dst_yplane = dst_frame;
uint8_t* dst_uplane = dst_frame + dst_width_ * dst_height_;
uint8_t* dst_vplane = dst_uplane + dst_width_ * dst_height_ / 4;
return libyuv::I420Scale(src_yplane, src_width_,
src_uplane, src_width_ / 2,
src_vplane, src_width_ / 2,
src_width_, src_height_,
dst_yplane, dst_width_,
dst_uplane, dst_width_ / 2,
dst_vplane, dst_width_ / 2,
dst_width_, dst_height_,
libyuv::FilterMode(method_));
}
// TODO(mikhal): Add support for more types.
bool Scaler::SupportedVideoType(VideoType src_video_type,
VideoType dst_video_type) {
if (src_video_type != dst_video_type)
return false;
if ((src_video_type == kI420) || (src_video_type == kIYUV) ||
(src_video_type == kYV12))
return true;
return false;
}
} // namespace webrtc

View File

@ -0,0 +1,97 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "common_video/libyuv/test/test_util.h"
#include <math.h>
#include <stdio.h>
namespace webrtc {
int PrintFrame(const uint8_t* frame, int width, int height) {
if (frame == NULL)
return -1;
int k = 0;
for (int i = 0; i < height; i++) {
for (int j = 0; j < width; j++) {
printf("%d ", frame[k++]);
}
printf(" \n");
}
printf(" \n");
return 0;
}
int PrintFrame(const uint8_t* frame, int width,
int height, const char* str) {
if (frame == NULL)
return -1;
printf("%s %dx%d \n", str, width, height);
const uint8_t* frame_y = frame;
const uint8_t* frame_u = frame_y + width * height;
const uint8_t* frame_v = frame_u + width * height / 4;
int ret = 0;
ret += PrintFrame(frame_y, width, height);
ret += PrintFrame(frame_u, width / 2, height / 2);
ret += PrintFrame(frame_v, width / 2, height / 2);
return ret;
}
void CreateImage(int width, int height,
uint8_t* frame, int offset,
int height_factor, int width_factor) {
if (frame == NULL)
return;
for (int i = 0; i < height; i++) {
for (int j = 0; j < width; j++) {
*frame = static_cast<uint8_t>((i + offset) * height_factor
+ j * width_factor);
frame++;
}
}
}
// TODO (mikhal): Following update to latest version, use PSNR tool from libyuv.
int ImagePSNRfromBuffer(const uint8_t* ref_frame,
const uint8_t* test_frame,
int width, int height, double* YPSNRptr) {
if (height <= 0 || width <= 0 || ref_frame == NULL || test_frame == NULL)
return -1;
// Assumes I420, one frame
double mse = 0.0;
double mse_log_sum = 0.0;
const uint8_t *ref = ref_frame;
const uint8_t *test = test_frame;
mse = 0.0;
// Calculate Y sum-square-difference.
for ( int k = 0; k < width * height; k++ ) {
mse += (test[k] - ref[k]) * (test[k] - ref[k]);
}
// Divide by number of pixels.
mse /= static_cast<double> (width * height);
if (mse == 0) {
*YPSNRptr = 48;
return 0;
}
// Accumulate for total average
mse_log_sum += log10(mse);
*YPSNRptr = 20.0 * log10(255.0) - 10.0 * mse_log_sum;
return 0;
}
} // namespace webrtc

View File

@ -0,0 +1,31 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_COMMON_VIDEO_LIBYUV_TEST_TEST_UTIL_H_
#define WEBRTC_COMMON_VIDEO_LIBYUV_TEST_TEST_UTIL_H_
#include "typedefs.h"
namespace webrtc {
int PrintFrame(const uint8_t* frame, int width, int height);
int PrintFrame(const uint8_t* frame, int width, int height, const char* str);
void CreateImage(int width, int height,
uint8_t* frame, int offset,
int height_factor, int width_factor);
int ImagePSNRfromBuffer(const uint8_t *ref_frame,
const uint8_t *test_frame,
int width, int height,
double *YPSNRptr);
} // namespace webrtc
#endif // WEBRTC_COMMON_VIDEO_LIBYUV_TEST_TEST_UTIL_H_

View File

@ -0,0 +1,386 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "common_video/libyuv/test/unit_test.h"
#include <math.h>
#include <string.h>
#include "gtest/gtest.h"
#include "common_video/libyuv/include/libyuv.h"
#include "common_video/libyuv/include/scaler.h"
#include "common_video/libyuv/test/test_util.h"
#include "system_wrappers/interface/tick_util.h"
namespace webrtc {
class LibYuvTest : public ::testing::Test {
protected:
LibYuvTest();
virtual void SetUp();
virtual void TearDown();
FILE* source_file_;
std::string inname_;
const int width_;
const int height_;
const int frame_length_;
};
void ScaleSequence(ScaleMethod method,
FILE* source_file, std::string out_name,
int src_width, int src_height,
int dst_width, int dst_height);
// TODO (mikhal): Update to new test file scheme when available.
// TODO (mikhal): Use scoped_ptr when handling buffers.
LibYuvTest::LibYuvTest()
: source_file_(NULL),
inname_("testFiles/foreman_cif.yuv"),
width_(352),
height_(288),
frame_length_(CalcBufferSize(kI420, 352, 288)) {
}
void LibYuvTest::SetUp() {
source_file_ = fopen(inname_.c_str(), "rb");
ASSERT_TRUE(source_file_ != NULL) << "Cannot read file: "<< inname_ << "\n";
}
void LibYuvTest::TearDown() {
if (source_file_ != NULL) {
ASSERT_EQ(0, fclose(source_file_));
}
source_file_ = NULL;
}
TEST_F(LibYuvTest, ConvertSanityTest) {
// TODO(mikhal)
}
TEST_F(LibYuvTest, ScaleSanityTest) {
Scaler test_scaler;
uint8_t* test_buffer = new uint8_t[frame_length_];
// Scaling without setting values
int size = 100;
EXPECT_EQ(-2, test_scaler.Scale(test_buffer, test_buffer, size));
// Setting bad initial values
EXPECT_EQ(-1, test_scaler.Set(0, 288, 352, 288, kI420, kI420, kScalePoint));
EXPECT_EQ(-1, test_scaler.Set(704, 0, 352, 288, kI420, kI420, kScaleBox));
EXPECT_EQ(-1, test_scaler.Set(704, 576, 352, 0, kI420, kI420,
kScaleBilinear));
EXPECT_EQ(-1, test_scaler.Set(704, 576, 0, 288, kI420, kI420, kScalePoint));
// Sending NULL pointer
size = 0;
EXPECT_EQ(-1, test_scaler.Scale(NULL, test_buffer, size));
// Sending a buffer which is too small (should reallocate and update size)
EXPECT_EQ(0, test_scaler.Set(352, 288, 144, 288, kI420, kI420, kScalePoint));
uint8_t* test_buffer2 = NULL;
size = 0;
fread(test_buffer, 1, frame_length_, source_file_);
EXPECT_EQ(0, test_scaler.Scale(test_buffer, test_buffer2, size));
EXPECT_EQ(144 * 288 * 3 / 2, size);
delete [] test_buffer;
}
TEST_F(LibYuvTest, MirrorSanityTest) {
// TODO (mikhal): look into scoped_ptr for implementation
// Sending NULL pointers
uint8_t* test_buffer1 = new uint8_t[frame_length_];
uint8_t* test_buffer2 = new uint8_t[frame_length_];
// Setting bad initial values
EXPECT_EQ(-1, MirrorI420LeftRight(test_buffer1, test_buffer2, width_, -30));
EXPECT_EQ(-1, MirrorI420LeftRight(test_buffer1, test_buffer2, -352, height_));
EXPECT_EQ(-1, MirrorI420LeftRight(NULL, test_buffer2, width_, height_));
EXPECT_EQ(-1, MirrorI420LeftRight(test_buffer1, NULL, width_, height_));
delete [] test_buffer1;
delete [] test_buffer2;
}
TEST_F(LibYuvTest, ConvertTest) {
// Reading YUV frame - testing on the first frame of the foreman sequence
int j = 0;
// TODO (mikhal): move to correct output path.
std::string out_name = "conversionTest_out.yuv";
FILE* output_file;
double psnr = 0;
output_file = fopen(out_name.c_str(), "wb");
ASSERT_TRUE(output_file != NULL);
uint8_t* orig_buffer = new uint8_t[frame_length_];
fread(orig_buffer, 1, frame_length_, source_file_);
// printf("\nConvert #%d I420 <-> RGB24\n", j);
uint8_t* res_rgb_buffer2 = new uint8_t[width_ * height_ * 3];
uint8_t* res_i420_buffer = new uint8_t[frame_length_];
EXPECT_EQ(0, ConvertFromI420(kRGB24, orig_buffer, width_, height_,
res_rgb_buffer2, false, kRotateNone));
EXPECT_EQ(0, ConvertToI420(kRGB24, res_rgb_buffer2, width_, height_,
res_i420_buffer, false, kRotateNone));
fwrite(res_i420_buffer, frame_length_, 1, output_file);
ImagePSNRfromBuffer(orig_buffer, res_i420_buffer, width_, height_, &psnr);
// Optimization Speed- quality trade-off => 45 dB only.
EXPECT_EQ(45.0, ceil(psnr));
j++;
delete [] res_rgb_buffer2;
// printf("\nConvert #%d I420 <-> UYVY\n", j);
uint8_t* out_uyvy_buffer = new uint8_t[width_ * height_ * 2];
EXPECT_EQ(0, ConvertFromI420(kUYVY, orig_buffer, width_,
height_, out_uyvy_buffer, false, kRotateNone));
EXPECT_EQ(0, ConvertToI420(kUYVY, out_uyvy_buffer, width_, height_,
res_i420_buffer, false, kRotateNone));
ImagePSNRfromBuffer(orig_buffer, res_i420_buffer, width_, height_, &psnr);
EXPECT_EQ(48.0, psnr);
fwrite(res_i420_buffer, frame_length_, 1, output_file);
j++;
delete [] out_uyvy_buffer;
// printf("\nConvert #%d I420 <-> I420 \n", j);
uint8_t* out_i420_buffer = new uint8_t[width_ * height_ * 2];
EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer, width_, height_,
out_i420_buffer, false, kRotateNone));
EXPECT_EQ(0, ConvertToI420(kI420 , out_i420_buffer, width_, height_,
res_i420_buffer, false, kRotateNone));
fwrite(res_i420_buffer, frame_length_, 1, output_file);
ImagePSNRfromBuffer(orig_buffer, res_i420_buffer, width_, height_, &psnr);
EXPECT_EQ(48.0, psnr);
j++;
delete [] out_i420_buffer;
// printf("\nConvert #%d I420 <-> YV12\n", j);
uint8_t* outYV120Buffer = new uint8_t[frame_length_];
EXPECT_EQ(0, ConvertFromI420(kYV12, orig_buffer, width_, height_,
outYV120Buffer, false, kRotateNone));
EXPECT_EQ(0, ConvertYV12ToI420(outYV120Buffer, width_, height_,
res_i420_buffer));
fwrite(res_i420_buffer, frame_length_, 1, output_file);
ImagePSNRfromBuffer(orig_buffer, res_i420_buffer, width_, height_, &psnr);
EXPECT_EQ(48.0, psnr);
j++;
delete [] outYV120Buffer;
// printf("\nTEST #%d I420 <-> YUY2\n", j);
uint8_t* out_yuy2_buffer = new uint8_t[width_ * height_ * 2];
EXPECT_EQ(0, ConvertFromI420(kYUY2, orig_buffer, width_, height_,
out_yuy2_buffer, false, kRotateNone));
EXPECT_EQ(0, ConvertToI420(kYUY2, out_yuy2_buffer, width_, height_,
res_i420_buffer, false, kRotateNone));
fwrite(res_i420_buffer, frame_length_, 1, output_file);
ImagePSNRfromBuffer(orig_buffer, res_i420_buffer, width_, height_, &psnr);
EXPECT_EQ(48.0, psnr);
delete [] out_yuy2_buffer;
delete [] res_i420_buffer;
delete [] orig_buffer;
}
//TODO (mikhal): Converge the test into one function that accepts the method.
TEST_F(LibYuvTest, PointScaleTest) {
ScaleMethod method = kScalePoint;
// TODO (mikhal): use webrtc::test::OutputPath()
std::string out_name = "PointScaleTest_176_144.yuv";
ScaleSequence(method,
source_file_, out_name,
width_, height_,
width_ / 2, height_ / 2);
out_name = "PointScaleTest_320_240.yuv";
ScaleSequence(method,
source_file_, out_name,
width_, height_,
320, 240);
out_name = "PointScaleTest_704_576.yuv";
ScaleSequence(method,
source_file_, out_name,
width_, height_,
width_ * 2, height_ * 2);
out_name = "PointScaleTest_300_200.yuv";
ScaleSequence(method,
source_file_, out_name,
width_, height_,
300, 200);
out_name = "PointScaleTest_400_300.yuv";
ScaleSequence(method,
source_file_, out_name,
width_, height_,
400, 300);
}
TEST_F(LibYuvTest, BiLinearScaleTest) {
ScaleMethod method = kScaleBilinear;
std::string out_name = "BilinearScaleTest_176_144.yuv";
ScaleSequence(method,
source_file_, out_name,
width_, height_,
width_ / 2, height_ / 2);
out_name = "BilinearScaleTest_320_240.yuv";
ScaleSequence(method,
source_file_, out_name,
width_, height_,
320, 240);
out_name = "BilinearScaleTest_704_576.yuv";
ScaleSequence(method,
source_file_, out_name,
width_, height_,
width_ * 2, height_ * 2);
out_name = "BilinearScaleTest_300_200.yuv";
ScaleSequence(method,
source_file_, out_name,
width_, height_,
300, 200);
out_name = "BilinearScaleTest_400_300.yuv";
ScaleSequence(method,
source_file_, out_name,
width_, height_,
400, 300);
}
TEST_F(LibYuvTest, BoxScaleTest) {
ScaleMethod method = kScaleBox;
std::string out_name = "BoxScaleTest_176_144.yuv";
ScaleSequence(method,
source_file_, out_name,
width_, height_,
width_ / 2, height_ / 2);
out_name = "BoxScaleTest_320_240.yuv";
ScaleSequence(method,
source_file_, out_name,
width_, height_,
320, 240);
out_name = "BoxScaleTest_704_576.yuv";
ScaleSequence(method,
source_file_, out_name,
width_, height_,
width_ * 2, height_ * 2);
out_name = "BoxScaleTest_300_200.yuv";
ScaleSequence(method,
source_file_, out_name,
width_, height_,
300, 200);
out_name = "BoxScaleTest_400_300.yuv";
ScaleSequence(method,
source_file_, out_name,
width_, height_,
400, 300);
}
TEST_F(LibYuvTest, MirrorTest) {
// TODO (mikhal): Add an automated test to confirm output.
std::string str;
int width = 16;
int height = 8;
int factor_y = 1;
int factor_u = 1;
int factor_v = 1;
int start_buffer_offset = 10;
int length = webrtc::CalcBufferSize(kI420, width, height);
uint8_t* test_frame = new uint8_t[length];
memset(test_frame, 255, length);
// Create input frame
uint8_t* in_frame = test_frame;
uint8_t* in_frame_cb = in_frame + width * height;
uint8_t* in_frame_cr = in_frame_cb + (width * height) / 4;
CreateImage(width, height, in_frame, 10, factor_y, 1); // Y
CreateImage(width / 2, height / 2, in_frame_cb, 100, factor_u, 1); // Cb
CreateImage(width / 2, height / 2, in_frame_cr, 200, factor_v, 1); // Cr
EXPECT_EQ(0, PrintFrame(test_frame, width, height, "InputFrame"));
uint8_t* test_frame2 = new uint8_t[length + start_buffer_offset * 2];
memset(test_frame2, 255, length + start_buffer_offset * 2);
uint8_t* out_frame = test_frame2;
// LeftRight
std::cout << "Test Mirror function: LeftRight" << std::endl;
EXPECT_EQ(0, MirrorI420LeftRight(in_frame, out_frame, width, height));
EXPECT_EQ(0, PrintFrame(test_frame2, width, height, "OutputFrame"));
EXPECT_EQ(0, MirrorI420LeftRight(out_frame, test_frame, width, height));
EXPECT_EQ(0, memcmp(in_frame, test_frame, length));
// UpDown
std::cout << "Test Mirror function: UpDown" << std::endl;
EXPECT_EQ(0, MirrorI420UpDown(in_frame, out_frame, width, height));
EXPECT_EQ(0, PrintFrame(test_frame2, width, height, "OutputFrame"));
EXPECT_EQ(0, MirrorI420UpDown(out_frame, test_frame, width, height));
EXPECT_EQ(0, memcmp(in_frame, test_frame, length));
// TODO(mikhal): Write to a file, and ask to look at the file.
std::cout << "Do the mirrored frames look correct?" << std::endl;
delete [] test_frame;
delete [] test_frame2;
}
// TODO (mikhal): Move part to a separate scale test.
void ScaleSequence(ScaleMethod method,
FILE* source_file, std::string out_name,
int src_width, int src_height,
int dst_width, int dst_height) {
Scaler test_scaler;
FILE* output_file;
EXPECT_EQ(0, test_scaler.Set(src_width, src_height,
dst_width, dst_height,
kI420, kI420, method));
output_file = fopen(out_name.c_str(), "wb");
ASSERT_TRUE(output_file != NULL);
rewind(source_file);
int out_required_size = dst_width * dst_height * 3 / 2;
int in_required_size = src_height * src_width * 3 / 2;
uint8_t* input_buffer = new uint8_t[in_required_size];
uint8_t* output_buffer = new uint8_t[out_required_size];
int64_t start_clock, total_clock;
total_clock = 0;
int frame_count = 0;
// Running through entire sequence
while (feof(source_file) == 0) {
if ((size_t)in_required_size !=
fread(input_buffer, 1, in_required_size, source_file))
break;
start_clock = TickTime::MillisecondTimestamp();
EXPECT_EQ(0, test_scaler.Scale(input_buffer, output_buffer,
out_required_size));
total_clock += TickTime::MillisecondTimestamp() - start_clock;
fwrite(output_buffer, out_required_size, 1, output_file);
frame_count++;
}
if (frame_count) {
printf("Scaling[%d %d] => [%d %d]: ",
src_width, src_height, dst_width, dst_height);
printf("Average time per frame[ms]: %.2lf\n",
(static_cast<double>(total_clock) / frame_count));
}
delete [] input_buffer;
delete [] output_buffer;
}
} // namespace

View File