Move matrix from VideoFrame to TextureBuffer.

Previously, the matrix in VideoFrame was used to crop and scale the
frame. This caused complications because webrtc::VideoFrame doesn't
include a matrix. cropAndScale method is added to VideoBuffer class for
cropping and scaling instead.

BUG=webrtc:7749, webrtc:7760

Review-Url: https://codereview.webrtc.org/2990583002
Cr-Commit-Position: refs/heads/master@{#19179}
This commit is contained in:
sakal
2017-07-28 07:12:23 -07:00
committed by Commit Bot
parent 5ba9730265
commit 836f60cda1
15 changed files with 369 additions and 257 deletions

View File

@ -417,12 +417,11 @@ class HardwareVideoDecoder
@Override
public void onTextureFrameAvailable(int oesTextureId, float[] transformMatrix, long timestampNs) {
VideoFrame.TextureBuffer oesBuffer = surfaceTextureHelper.createTextureBuffer(
renderedTextureMetadata.width, renderedTextureMetadata.height, transformMatrix);
Matrix matrix = RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix);
renderedTextureMetadata.width, renderedTextureMetadata.height,
RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix));
VideoFrame frame = new VideoFrame(oesBuffer, renderedTextureMetadata.rotation,
renderedTextureMetadata.presentationTimestampUs * 1000, matrix);
renderedTextureMetadata.presentationTimestampUs * 1000);
callback.onDecodedFrame(frame, renderedTextureMetadata.decodeTimeMs, null /* qp */);
frame.release();
}
@ -477,7 +476,7 @@ class HardwareVideoDecoder
}
long presentationTimeNs = info.presentationTimeUs * 1000;
VideoFrame frame = new VideoFrame(frameBuffer, rotation, presentationTimeNs, new Matrix());
VideoFrame frame = new VideoFrame(frameBuffer, rotation, presentationTimeNs);
// Note that qp is parsed on the C++ side.
callback.onDecodedFrame(frame, decodeTimeMs, null /* qp */);
@ -605,9 +604,9 @@ class HardwareVideoDecoder
activeOutputBuffers++;
}
I420BufferImpl.ReleaseCallback callback = new I420BufferImpl.ReleaseCallback() {
Runnable callback = new Runnable() {
@Override
public void onRelease() {
public void run() {
codec.releaseOutputBuffer(outputBufferIndex, false);
synchronized (activeOutputBuffersLock) {
activeOutputBuffers--;
@ -616,8 +615,20 @@ class HardwareVideoDecoder
}
};
buffer.position(yPos);
buffer.limit(uPos);
ByteBuffer dataY = buffer.slice();
buffer.position(uPos);
buffer.limit(vPos);
ByteBuffer dataU = buffer.slice();
buffer.position(vPos);
buffer.limit(vPos + uvStride * sliceHeight / 2);
ByteBuffer dataV = buffer.slice();
return new I420BufferImpl(
buffer, width, height, yPos, stride, uPos, uvStride, vPos, uvStride, callback);
width, height, dataY, stride, dataU, uvStride, dataV, uvStride, callback);
}
private static void copyI420(ByteBuffer src, int offset, VideoFrame.I420Buffer frameBuffer,

View File

@ -235,8 +235,8 @@ class HardwareVideoEncoder implements VideoEncoder {
}
// If input resolution changed, restart the codec with the new resolution.
int frameWidth = videoFrame.getWidth();
int frameHeight = videoFrame.getHeight();
int frameWidth = videoFrame.getBuffer().getWidth();
int frameHeight = videoFrame.getBuffer().getHeight();
if (frameWidth != width || frameHeight != height) {
VideoCodecStatus status = resetCodec(frameWidth, frameHeight);
if (status != VideoCodecStatus.OK) {
@ -271,8 +271,8 @@ class HardwareVideoEncoder implements VideoEncoder {
EncodedImage.Builder builder = EncodedImage.builder()
.setCaptureTimeMs(presentationTimestampMs)
.setCompleteFrame(true)
.setEncodedWidth(videoFrame.getWidth())
.setEncodedHeight(videoFrame.getHeight())
.setEncodedWidth(videoFrame.getBuffer().getWidth())
.setEncodedHeight(videoFrame.getBuffer().getHeight())
.setRotation(videoFrame.getRotation());
outputBuilders.offer(builder);
@ -293,7 +293,7 @@ class HardwareVideoEncoder implements VideoEncoder {
private VideoCodecStatus encodeTextureBuffer(
VideoFrame videoFrame, VideoFrame.TextureBuffer textureBuffer) {
Matrix matrix = videoFrame.getTransformMatrix();
Matrix matrix = textureBuffer.getTransformMatrix();
float[] transformationMatrix = RendererCommon.convertMatrixFromAndroidGraphicsMatrix(matrix);
try {

View File

@ -15,32 +15,28 @@ import org.webrtc.VideoFrame.I420Buffer;
/** Implementation of an I420 VideoFrame buffer. */
class I420BufferImpl implements VideoFrame.I420Buffer {
private final ByteBuffer buffer;
private final int width;
private final int height;
private final int chromaHeight;
private final int yPos;
private final ByteBuffer dataY;
private final ByteBuffer dataU;
private final ByteBuffer dataV;
private final int strideY;
private final int uPos;
private final int strideU;
private final int vPos;
private final int strideV;
private final ReleaseCallback releaseCallback;
private final Runnable releaseCallback;
private int refCount;
/** Allocates an I420Buffer backed by existing data. */
I420BufferImpl(ByteBuffer buffer, int width, int height, int yPos, int strideY, int uPos,
int strideU, int vPos, int strideV, ReleaseCallback releaseCallback) {
this.buffer = buffer;
/** Constructs an I420Buffer backed by existing data. */
I420BufferImpl(int width, int height, ByteBuffer dataY, int strideY, ByteBuffer dataU,
int strideU, ByteBuffer dataV, int strideV, Runnable releaseCallback) {
this.width = width;
this.height = height;
this.chromaHeight = (height + 1) / 2;
this.yPos = yPos;
this.dataY = dataY;
this.dataU = dataU;
this.dataV = dataV;
this.strideY = strideY;
this.uPos = uPos;
this.strideU = strideU;
this.vPos = vPos;
this.strideV = strideV;
this.releaseCallback = releaseCallback;
@ -54,9 +50,22 @@ class I420BufferImpl implements VideoFrame.I420Buffer {
int yPos = 0;
int uPos = yPos + width * height;
int vPos = uPos + strideUV * chromaHeight;
ByteBuffer buffer = ByteBuffer.allocateDirect(width * height + 2 * strideUV * chromaHeight);
return new I420BufferImpl(
buffer, width, height, yPos, width, uPos, strideUV, vPos, strideUV, null);
buffer.position(yPos);
buffer.limit(uPos);
ByteBuffer dataY = buffer.slice();
buffer.position(uPos);
buffer.limit(vPos);
ByteBuffer dataU = buffer.slice();
buffer.position(vPos);
buffer.limit(vPos + strideUV * chromaHeight);
ByteBuffer dataV = buffer.slice();
return new I420BufferImpl(width, height, dataY, width, dataU, strideUV, dataV, strideUV, null);
}
@Override
@ -71,26 +80,17 @@ class I420BufferImpl implements VideoFrame.I420Buffer {
@Override
public ByteBuffer getDataY() {
ByteBuffer data = buffer.slice();
data.position(yPos);
data.limit(yPos + getStrideY() * height);
return data;
return dataY;
}
@Override
public ByteBuffer getDataU() {
ByteBuffer data = buffer.slice();
data.position(uPos);
data.limit(uPos + strideU * chromaHeight);
return data;
return dataU;
}
@Override
public ByteBuffer getDataV() {
ByteBuffer data = buffer.slice();
data.position(vPos);
data.limit(vPos + strideV * chromaHeight);
return data;
return dataV;
}
@Override
@ -121,13 +121,14 @@ class I420BufferImpl implements VideoFrame.I420Buffer {
@Override
public void release() {
if (--refCount == 0 && releaseCallback != null) {
releaseCallback.onRelease();
releaseCallback.run();
}
}
// Callback called when the frame is no longer referenced.
interface ReleaseCallback {
// Called when the frame is no longer referenced.
void onRelease();
@Override
public VideoFrame.Buffer cropAndScale(
int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
return VideoFrame.cropAndScaleI420(
this, cropX, cropY, cropWidth, cropHeight, scaleWidth, scaleHeight);
}
}

View File

@ -0,0 +1,131 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.graphics.Matrix;
import java.nio.ByteBuffer;
/**
* Android texture buffer backed by a SurfaceTextureHelper's texture. The buffer calls
* |releaseCallback| when it is released.
*/
class TextureBufferImpl implements VideoFrame.TextureBuffer {
private final int width;
private final int height;
private final Type type;
private final int id;
private final Matrix transformMatrix;
private final SurfaceTextureHelper surfaceTextureHelper;
private final Runnable releaseCallback;
private int refCount;
public TextureBufferImpl(int width, int height, Type type, int id, Matrix transformMatrix,
SurfaceTextureHelper surfaceTextureHelper, Runnable releaseCallback) {
this.width = width;
this.height = height;
this.type = type;
this.id = id;
this.transformMatrix = transformMatrix;
this.surfaceTextureHelper = surfaceTextureHelper;
this.releaseCallback = releaseCallback;
this.refCount = 1; // Creator implicitly holds a reference.
}
@Override
public VideoFrame.TextureBuffer.Type getType() {
return type;
}
@Override
public int getTextureId() {
return id;
}
@Override
public Matrix getTransformMatrix() {
return transformMatrix;
}
@Override
public int getWidth() {
return width;
}
@Override
public int getHeight() {
return height;
}
@Override
public VideoFrame.I420Buffer toI420() {
// SurfaceTextureHelper requires a stride that is divisible by 8. Round width up.
// See SurfaceTextureHelper for details on the size and format.
int stride = ((width + 7) / 8) * 8;
int uvHeight = (height + 1) / 2;
// Due to the layout used by SurfaceTextureHelper, vPos + stride * uvHeight would overrun the
// buffer. Add one row at the bottom to compensate for this. There will never be data in the
// extra row, but now other code does not have to deal with v stride * v height exceeding the
// buffer's capacity.
int size = stride * (height + uvHeight + 1);
ByteBuffer buffer = ByteBuffer.allocateDirect(size);
surfaceTextureHelper.textureToYUV(buffer, width, height, stride, id,
RendererCommon.convertMatrixFromAndroidGraphicsMatrix(transformMatrix));
int yPos = 0;
int uPos = yPos + stride * height;
// Rows of U and V alternate in the buffer, so V data starts after the first row of U.
int vPos = uPos + stride / 2;
buffer.position(yPos);
buffer.limit(yPos + stride * height);
ByteBuffer dataY = buffer.slice();
buffer.position(uPos);
buffer.limit(uPos + stride * uvHeight);
ByteBuffer dataU = buffer.slice();
buffer.position(vPos);
buffer.limit(vPos + stride * uvHeight);
ByteBuffer dataV = buffer.slice();
// SurfaceTextureHelper uses the same stride for Y, U, and V data.
return new I420BufferImpl(width, height, dataY, stride, dataU, stride, dataV, stride, null);
}
@Override
public void retain() {
++refCount;
}
@Override
public void release() {
if (--refCount == 0) {
releaseCallback.run();
}
}
@Override
public VideoFrame.Buffer cropAndScale(
int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
retain();
Matrix newMatrix = new Matrix(transformMatrix);
newMatrix.postScale(cropWidth / (float) width, cropHeight / (float) height);
newMatrix.postTranslate(cropX / (float) width, cropY / (float) height);
return new TextureBufferImpl(
scaleWidth, scaleHeight, type, id, newMatrix, surfaceTextureHelper, new Runnable() {
@Override
public void run() {
release();
}
});
}
}

View File

@ -94,6 +94,13 @@ class WrappedNativeI420Buffer implements VideoFrame.I420Buffer {
nativeRelease(nativeBuffer);
}
@Override
public VideoFrame.Buffer cropAndScale(
int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
return VideoFrame.cropAndScaleI420(
this, cropX, cropY, cropWidth, cropHeight, scaleWidth, scaleHeight);
}
private static native long nativeAddRef(long nativeBuffer);
private static native long nativeRelease(long nativeBuffer);
}

View File

@ -34,42 +34,6 @@ Matrix::Matrix(JNIEnv* jni, jfloatArray a) {
jni->ReleaseFloatArrayElements(a, ptr, 0);
}
Matrix Matrix::fromAndroidGraphicsMatrix(JNIEnv* jni, jobject j_matrix) {
jfloatArray array_3x3 = jni->NewFloatArray(9);
jclass j_matrix_class = jni->FindClass("android/graphics/Matrix");
jni->CallVoidMethod(j_matrix,
GetMethodID(jni, j_matrix_class, "getValues", "([F)V"),
array_3x3);
jfloat* array_3x3_ptr = jni->GetFloatArrayElements(array_3x3, nullptr);
Matrix matrix;
memset(matrix.elem_, 0, sizeof(matrix.elem_));
// The android.graphics.Matrix looks like this:
// [x1 y1 w1]
// [x2 y2 w2]
// [x3 y3 w3]
// We want to contruct a matrix that looks like this:
// [x1 y1 0 w1]
// [x2 y2 0 w2]
// [ 0 0 1 0]
// [x3 y3 0 w3]
// Since it is stored in column-major order, it looks like this:
// [x1 x2 0 x3
// y1 y2 0 y3
// 0 0 1 0
// w1 w2 0 w3]
matrix.elem_[0 * 4 + 0] = array_3x3_ptr[0 * 3 + 0];
matrix.elem_[0 * 4 + 1] = array_3x3_ptr[1 * 3 + 0];
matrix.elem_[0 * 4 + 3] = array_3x3_ptr[2 * 3 + 0];
matrix.elem_[1 * 4 + 0] = array_3x3_ptr[0 * 3 + 1];
matrix.elem_[1 * 4 + 1] = array_3x3_ptr[1 * 3 + 1];
matrix.elem_[1 * 4 + 3] = array_3x3_ptr[2 * 3 + 1];
matrix.elem_[2 * 4 + 2] = 1; // Z-scale should be 1.
matrix.elem_[3 * 4 + 0] = array_3x3_ptr[0 * 3 + 2];
matrix.elem_[3 * 4 + 1] = array_3x3_ptr[1 * 3 + 2];
matrix.elem_[3 * 4 + 3] = array_3x3_ptr[2 * 3 + 2];
return matrix;
}
jfloatArray Matrix::ToJava(JNIEnv* jni) const {
jfloatArray matrix = jni->NewFloatArray(16);
jni->SetFloatArrayRegion(matrix, 0, 16, elem_);
@ -237,12 +201,10 @@ AndroidVideoBuffer::AndroidVideoBuffer(JNIEnv* jni,
jmethodID j_release_id,
int width,
int height,
const Matrix& matrix,
jobject j_video_frame_buffer)
: j_release_id_(j_release_id),
width_(width),
height_(height),
matrix_(matrix),
j_video_frame_buffer_(jni, j_video_frame_buffer) {
jni->CallVoidMethod(j_video_frame_buffer, j_retain_id);
}
@ -274,23 +236,19 @@ rtc::scoped_refptr<webrtc::I420BufferInterface> AndroidVideoBuffer::ToI420() {
}
jobject AndroidVideoBuffer::ToJavaI420Frame(JNIEnv* jni,
int width,
int height,
int rotation) {
jclass j_byte_buffer_class = jni->FindClass("java/nio/ByteBuffer");
jclass j_i420_frame_class =
FindClass(jni, "org/webrtc/VideoRenderer$I420Frame");
jmethodID j_i420_frame_ctor_id =
GetMethodID(jni, j_i420_frame_class, "<init>",
"(III[FLorg/webrtc/VideoFrame$Buffer;J)V");
jmethodID j_i420_frame_ctor_id = GetMethodID(
jni, j_i420_frame_class, "<init>", "(ILorg/webrtc/VideoFrame$Buffer;J)V");
// Java code just uses the native frame to hold a reference to the buffer so
// this is okay.
webrtc::VideoFrame* native_frame = new webrtc::VideoFrame(
this, 0 /* timestamp */, 0 /* render_time_ms */,
webrtc::VideoRotation::kVideoRotation_0 /* rotation */);
return jni->NewObject(j_i420_frame_class, j_i420_frame_ctor_id, width, height,
rotation, matrix_.ToJava(jni), *j_video_frame_buffer_,
jlongFromPointer(native_frame));
return jni->NewObject(j_i420_frame_class, j_i420_frame_ctor_id, rotation,
*j_video_frame_buffer_, jlongFromPointer(native_frame));
}
AndroidVideoBufferFactory::AndroidVideoBufferFactory(JNIEnv* jni)
@ -299,16 +257,8 @@ AndroidVideoBufferFactory::AndroidVideoBufferFactory(JNIEnv* jni)
*j_video_frame_class_,
"getBuffer",
"()Lorg/webrtc/VideoFrame$Buffer;")),
j_get_width_id_(
GetMethodID(jni, *j_video_frame_class_, "getWidth", "()I")),
j_get_height_id_(
GetMethodID(jni, *j_video_frame_class_, "getHeight", "()I")),
j_get_rotation_id_(
GetMethodID(jni, *j_video_frame_class_, "getRotation", "()I")),
j_get_transform_matrix_id_(GetMethodID(jni,
*j_video_frame_class_,
"getTransformMatrix",
"()Landroid/graphics/Matrix;")),
j_get_timestamp_ns_id_(
GetMethodID(jni, *j_video_frame_class_, "getTimestampNs", "()J")),
j_video_frame_buffer_class_(
@ -317,7 +267,11 @@ AndroidVideoBufferFactory::AndroidVideoBufferFactory(JNIEnv* jni)
j_retain_id_(
GetMethodID(jni, *j_video_frame_buffer_class_, "retain", "()V")),
j_release_id_(
GetMethodID(jni, *j_video_frame_buffer_class_, "release", "()V")) {}
GetMethodID(jni, *j_video_frame_buffer_class_, "release", "()V")),
j_get_width_id_(
GetMethodID(jni, *j_video_frame_buffer_class_, "getWidth", "()I")),
j_get_height_id_(
GetMethodID(jni, *j_video_frame_buffer_class_, "getHeight", "()I")) {}
webrtc::VideoFrame AndroidVideoBufferFactory::CreateFrame(
JNIEnv* jni,
@ -325,30 +279,23 @@ webrtc::VideoFrame AndroidVideoBufferFactory::CreateFrame(
uint32_t timestamp_rtp) const {
jobject j_video_frame_buffer =
jni->CallObjectMethod(j_video_frame, j_get_buffer_id_);
int width = jni->CallIntMethod(j_video_frame, j_get_width_id_);
int height = jni->CallIntMethod(j_video_frame, j_get_height_id_);
int rotation = jni->CallIntMethod(j_video_frame, j_get_rotation_id_);
jobject j_matrix =
jni->CallObjectMethod(j_video_frame, j_get_transform_matrix_id_);
Matrix matrix = Matrix::fromAndroidGraphicsMatrix(jni, j_matrix);
uint32_t timestamp_ns =
jni->CallLongMethod(j_video_frame, j_get_timestamp_ns_id_);
rtc::scoped_refptr<AndroidVideoBuffer> buffer =
CreateBuffer(width, height, matrix, j_video_frame_buffer);
CreateBuffer(j_video_frame_buffer);
return webrtc::VideoFrame(buffer, timestamp_rtp,
timestamp_ns / rtc::kNumNanosecsPerMillisec,
static_cast<webrtc::VideoRotation>(rotation));
}
rtc::scoped_refptr<AndroidVideoBuffer> AndroidVideoBufferFactory::CreateBuffer(
int width,
int height,
const Matrix& matrix,
jobject j_video_frame_buffer) const {
JNIEnv* jni = AttachCurrentThreadIfNeeded();
int width = jni->CallIntMethod(j_video_frame_buffer, j_get_width_id_);
int height = jni->CallIntMethod(j_video_frame_buffer, j_get_height_id_);
return new rtc::RefCountedObject<AndroidVideoBuffer>(
jni, j_retain_id_, j_release_id_, width, height, matrix,
j_video_frame_buffer);
jni, j_retain_id_, j_release_id_, width, height, j_video_frame_buffer);
}
} // namespace webrtc_jni

View File

@ -108,14 +108,13 @@ class AndroidVideoBuffer : public AndroidVideoFrameBuffer {
jmethodID j_release_id,
int width,
int height,
const Matrix& matrix,
jobject j_video_frame_buffer);
~AndroidVideoBuffer() override;
jobject video_frame_buffer() const;
// Returns an instance of VideoRenderer.I420Frame (deprecated)
jobject ToJavaI420Frame(JNIEnv* jni, int width, int height, int rotation);
jobject ToJavaI420Frame(JNIEnv* jni, int rotation);
private:
Type type() const override;
@ -129,7 +128,6 @@ class AndroidVideoBuffer : public AndroidVideoFrameBuffer {
const jmethodID j_release_id_;
const int width_;
const int height_;
const Matrix matrix_;
// Holds a VideoFrame.Buffer.
ScopedGlobalRef<jobject> j_video_frame_buffer_;
};
@ -143,23 +141,19 @@ class AndroidVideoBufferFactory {
uint32_t timestamp_rtp) const;
rtc::scoped_refptr<AndroidVideoBuffer> CreateBuffer(
int width,
int height,
const Matrix& matrix,
jobject j_video_frame_buffer) const;
private:
ScopedGlobalRef<jclass> j_video_frame_class_;
jmethodID j_get_buffer_id_;
jmethodID j_get_width_id_;
jmethodID j_get_height_id_;
jmethodID j_get_rotation_id_;
jmethodID j_get_transform_matrix_id_;
jmethodID j_get_timestamp_ns_id_;
ScopedGlobalRef<jclass> j_video_frame_buffer_class_;
jmethodID j_retain_id_;
jmethodID j_release_id_;
jmethodID j_get_width_id_;
jmethodID j_get_height_id_;
};
} // namespace webrtc_jni

View File

@ -59,9 +59,7 @@ class JavaVideoRendererWrapper
break;
case AndroidVideoFrameBuffer::AndroidType::kJavaBuffer:
j_frame = static_cast<AndroidVideoBuffer*>(android_buffer)
->ToJavaI420Frame(jni(), video_frame.width(),
video_frame.height(),
video_frame.rotation());
->ToJavaI420Frame(jni(), video_frame.rotation());
break;
default:
RTC_NOTREACHED();

View File

@ -0,0 +1,62 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <jni.h>
#include "libyuv/scale.h"
#include "webrtc/rtc_base/checks.h"
namespace webrtc_jni {
extern "C" JNIEXPORT void JNICALL
Java_org_webrtc_VideoFrame_nativeCropAndScaleI420(JNIEnv* jni,
jclass,
jobject j_src_y,
jint src_stride_y,
jobject j_src_u,
jint src_stride_u,
jobject j_src_v,
jint src_stride_v,
jint crop_x,
jint crop_y,
jint crop_width,
jint crop_height,
jobject j_dst_y,
jint dst_stride_y,
jobject j_dst_u,
jint dst_stride_u,
jobject j_dst_v,
jint dst_stride_v,
jint scale_width,
jint scale_height) {
uint8_t const* src_y =
static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_src_y));
uint8_t const* src_u =
static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_src_u));
uint8_t const* src_v =
static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_src_v));
uint8_t* dst_y = static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_y));
uint8_t* dst_u = static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_u));
uint8_t* dst_v = static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_v));
// Perform cropping using pointer arithmetic.
src_y += crop_x + crop_y * src_stride_y;
src_u += crop_x / 2 + crop_y / 2 * src_stride_u;
src_v += crop_x / 2 + crop_y / 2 * src_stride_v;
bool ret = libyuv::I420Scale(
src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v, crop_width,
crop_height, dst_y, dst_stride_y, dst_u, dst_stride_u, dst_v,
dst_stride_v, scale_width, scale_height, libyuv::kFilterBox);
RTC_DCHECK_EQ(ret, 0) << "I420Scale failed";
}
} // namespace webrtc_jni