Move matrix from VideoFrame to TextureBuffer.
Previously, the matrix in VideoFrame was used to crop and scale the frame. This caused complications because webrtc::VideoFrame doesn't include a matrix. cropAndScale method is added to VideoBuffer class for cropping and scaling instead. BUG=webrtc:7749, webrtc:7760 Review-Url: https://codereview.webrtc.org/2990583002 Cr-Commit-Position: refs/heads/master@{#19179}
This commit is contained in:
@ -108,6 +108,7 @@ rtc_static_library("video_jni") {
|
|||||||
"src/jni/videodecoderwrapper.cc",
|
"src/jni/videodecoderwrapper.cc",
|
||||||
"src/jni/videodecoderwrapper.h",
|
"src/jni/videodecoderwrapper.h",
|
||||||
"src/jni/videofilerenderer_jni.cc",
|
"src/jni/videofilerenderer_jni.cc",
|
||||||
|
"src/jni/videoframe_jni.cc",
|
||||||
"src/jni/videotrack_jni.cc",
|
"src/jni/videotrack_jni.cc",
|
||||||
"src/jni/wrapped_native_i420_buffer.cc",
|
"src/jni/wrapped_native_i420_buffer.cc",
|
||||||
"src/jni/wrapped_native_i420_buffer.h",
|
"src/jni/wrapped_native_i420_buffer.h",
|
||||||
@ -393,8 +394,8 @@ android_library("libjingle_peerconnection_java") {
|
|||||||
"api/org/webrtc/VideoSource.java",
|
"api/org/webrtc/VideoSource.java",
|
||||||
"api/org/webrtc/VideoTrack.java",
|
"api/org/webrtc/VideoTrack.java",
|
||||||
"src/java/org/webrtc/AndroidVideoTrackSourceObserver.java",
|
"src/java/org/webrtc/AndroidVideoTrackSourceObserver.java",
|
||||||
"src/java/org/webrtc/BitrateAdjuster.java",
|
|
||||||
"src/java/org/webrtc/BaseBitrateAdjuster.java",
|
"src/java/org/webrtc/BaseBitrateAdjuster.java",
|
||||||
|
"src/java/org/webrtc/BitrateAdjuster.java",
|
||||||
"src/java/org/webrtc/Camera1Session.java",
|
"src/java/org/webrtc/Camera1Session.java",
|
||||||
"src/java/org/webrtc/Camera2Session.java",
|
"src/java/org/webrtc/Camera2Session.java",
|
||||||
"src/java/org/webrtc/CameraCapturer.java",
|
"src/java/org/webrtc/CameraCapturer.java",
|
||||||
@ -407,9 +408,10 @@ android_library("libjingle_peerconnection_java") {
|
|||||||
"src/java/org/webrtc/HardwareVideoEncoder.java",
|
"src/java/org/webrtc/HardwareVideoEncoder.java",
|
||||||
"src/java/org/webrtc/Histogram.java",
|
"src/java/org/webrtc/Histogram.java",
|
||||||
"src/java/org/webrtc/I420BufferImpl.java",
|
"src/java/org/webrtc/I420BufferImpl.java",
|
||||||
"src/java/org/webrtc/VideoDecoderWrapperCallback.java",
|
|
||||||
"src/java/org/webrtc/MediaCodecUtils.java",
|
"src/java/org/webrtc/MediaCodecUtils.java",
|
||||||
|
"src/java/org/webrtc/TextureBufferImpl.java",
|
||||||
"src/java/org/webrtc/VideoCodecType.java",
|
"src/java/org/webrtc/VideoCodecType.java",
|
||||||
|
"src/java/org/webrtc/VideoDecoderWrapperCallback.java",
|
||||||
"src/java/org/webrtc/WrappedNativeI420Buffer.java",
|
"src/java/org/webrtc/WrappedNativeI420Buffer.java",
|
||||||
"src/java/org/webrtc/YuvConverter.java",
|
"src/java/org/webrtc/YuvConverter.java",
|
||||||
]
|
]
|
||||||
|
|||||||
@ -10,6 +10,7 @@
|
|||||||
|
|
||||||
package org.webrtc;
|
package org.webrtc;
|
||||||
|
|
||||||
|
import android.graphics.Matrix;
|
||||||
import android.graphics.SurfaceTexture;
|
import android.graphics.SurfaceTexture;
|
||||||
import android.opengl.GLES11Ext;
|
import android.opengl.GLES11Ext;
|
||||||
import android.opengl.GLES20;
|
import android.opengl.GLES20;
|
||||||
@ -288,86 +289,13 @@ public class SurfaceTextureHelper {
|
|||||||
* The returned TextureBuffer holds a reference to the SurfaceTextureHelper that created it. The
|
* The returned TextureBuffer holds a reference to the SurfaceTextureHelper that created it. The
|
||||||
* buffer calls returnTextureFrame() when it is released.
|
* buffer calls returnTextureFrame() when it is released.
|
||||||
*/
|
*/
|
||||||
public TextureBuffer createTextureBuffer(int width, int height, float[] transformMatrix) {
|
public TextureBuffer createTextureBuffer(int width, int height, Matrix transformMatrix) {
|
||||||
return new OesTextureBuffer(oesTextureId, width, height, transformMatrix, this);
|
return new TextureBufferImpl(
|
||||||
}
|
width, height, TextureBuffer.Type.OES, oesTextureId, transformMatrix, this, new Runnable() {
|
||||||
|
@Override
|
||||||
/**
|
public void run() {
|
||||||
* Android OES texture buffer backed by a SurfaceTextureHelper's texture. The buffer calls
|
returnTextureFrame();
|
||||||
* returnTextureFrame() when it is released.
|
}
|
||||||
*/
|
});
|
||||||
private static class OesTextureBuffer implements TextureBuffer {
|
|
||||||
private final int id;
|
|
||||||
private final int width;
|
|
||||||
private final int height;
|
|
||||||
private final float[] transformMatrix;
|
|
||||||
private final SurfaceTextureHelper helper;
|
|
||||||
private int refCount;
|
|
||||||
|
|
||||||
OesTextureBuffer(
|
|
||||||
int id, int width, int height, float[] transformMatrix, SurfaceTextureHelper helper) {
|
|
||||||
this.id = id;
|
|
||||||
this.width = width;
|
|
||||||
this.height = height;
|
|
||||||
this.transformMatrix = transformMatrix;
|
|
||||||
this.helper = helper;
|
|
||||||
this.refCount = 1; // Creator implicitly holds a reference.
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public TextureBuffer.Type getType() {
|
|
||||||
return TextureBuffer.Type.OES;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int getTextureId() {
|
|
||||||
return id;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int getWidth() {
|
|
||||||
return width;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int getHeight() {
|
|
||||||
return height;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public I420Buffer toI420() {
|
|
||||||
// SurfaceTextureHelper requires a stride that is divisible by 8. Round width up.
|
|
||||||
// See SurfaceTextureHelper for details on the size and format.
|
|
||||||
int stride = ((width + 7) / 8) * 8;
|
|
||||||
int uvHeight = (height + 1) / 2;
|
|
||||||
// Due to the layout used by SurfaceTextureHelper, vPos + stride * uvHeight would overrun the
|
|
||||||
// buffer. Add one row at the bottom to compensate for this. There will never be data in the
|
|
||||||
// extra row, but now other code does not have to deal with v stride * v height exceeding the
|
|
||||||
// buffer's capacity.
|
|
||||||
int size = stride * (height + uvHeight + 1);
|
|
||||||
ByteBuffer buffer = ByteBuffer.allocateDirect(size);
|
|
||||||
helper.textureToYUV(buffer, width, height, stride, id, transformMatrix);
|
|
||||||
|
|
||||||
int yPos = 0;
|
|
||||||
int uPos = yPos + stride * height;
|
|
||||||
// Rows of U and V alternate in the buffer, so V data starts after the first row of U.
|
|
||||||
int vPos = yPos + stride / 2;
|
|
||||||
|
|
||||||
// SurfaceTextureHelper uses the same stride for Y, U, and V data.
|
|
||||||
return new I420BufferImpl(
|
|
||||||
buffer, width, height, yPos, stride, uPos, stride, vPos, stride, null);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void retain() {
|
|
||||||
++refCount;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void release() {
|
|
||||||
if (--refCount == 0) {
|
|
||||||
helper.returnTextureFrame();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -44,6 +44,13 @@ public class VideoFrame {
|
|||||||
*/
|
*/
|
||||||
void retain();
|
void retain();
|
||||||
void release();
|
void release();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Crops a region defined by |cropx|, |cropY|, |cropWidth| and |cropHeight|. Scales it to size
|
||||||
|
* |scaleWidth| x |scaleHeight|.
|
||||||
|
*/
|
||||||
|
Buffer cropAndScale(
|
||||||
|
int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -67,24 +74,26 @@ public class VideoFrame {
|
|||||||
|
|
||||||
Type getType();
|
Type getType();
|
||||||
int getTextureId();
|
int getTextureId();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Retrieve the transform matrix associated with the frame. This transform matrix maps 2D
|
||||||
|
* homogeneous coordinates of the form (s, t, 1) with s and t in the inclusive range [0, 1] to
|
||||||
|
* the coordinate that should be used to sample that location from the buffer.
|
||||||
|
*/
|
||||||
|
public Matrix getTransformMatrix();
|
||||||
}
|
}
|
||||||
|
|
||||||
private final Buffer buffer;
|
private final Buffer buffer;
|
||||||
private final int rotation;
|
private final int rotation;
|
||||||
private final long timestampNs;
|
private final long timestampNs;
|
||||||
private final Matrix transformMatrix;
|
|
||||||
|
|
||||||
public VideoFrame(Buffer buffer, int rotation, long timestampNs, Matrix transformMatrix) {
|
public VideoFrame(Buffer buffer, int rotation, long timestampNs) {
|
||||||
if (buffer == null) {
|
if (buffer == null) {
|
||||||
throw new IllegalArgumentException("buffer not allowed to be null");
|
throw new IllegalArgumentException("buffer not allowed to be null");
|
||||||
}
|
}
|
||||||
if (transformMatrix == null) {
|
|
||||||
throw new IllegalArgumentException("transformMatrix not allowed to be null");
|
|
||||||
}
|
|
||||||
this.buffer = buffer;
|
this.buffer = buffer;
|
||||||
this.rotation = rotation;
|
this.rotation = rotation;
|
||||||
this.timestampNs = timestampNs;
|
this.timestampNs = timestampNs;
|
||||||
this.transformMatrix = transformMatrix;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public Buffer getBuffer() {
|
public Buffer getBuffer() {
|
||||||
@ -105,26 +114,6 @@ public class VideoFrame {
|
|||||||
return timestampNs;
|
return timestampNs;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Retrieve the transform matrix associated with the frame. This transform matrix maps 2D
|
|
||||||
* homogeneous coordinates of the form (s, t, 1) with s and t in the inclusive range [0, 1] to the
|
|
||||||
* coordinate that should be used to sample that location from the buffer.
|
|
||||||
*/
|
|
||||||
public Matrix getTransformMatrix() {
|
|
||||||
return transformMatrix;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Resolution of the frame in pixels.
|
|
||||||
*/
|
|
||||||
public int getWidth() {
|
|
||||||
return buffer.getWidth();
|
|
||||||
}
|
|
||||||
|
|
||||||
public int getHeight() {
|
|
||||||
return buffer.getHeight();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Reference counting of the underlying buffer.
|
* Reference counting of the underlying buffer.
|
||||||
*/
|
*/
|
||||||
@ -135,4 +124,41 @@ public class VideoFrame {
|
|||||||
public void release() {
|
public void release() {
|
||||||
buffer.release();
|
buffer.release();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static VideoFrame.Buffer cropAndScaleI420(final I420Buffer buffer, int cropX, int cropY,
|
||||||
|
int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
|
||||||
|
if (cropWidth == scaleWidth && cropHeight == scaleHeight) {
|
||||||
|
// No scaling.
|
||||||
|
ByteBuffer dataY = buffer.getDataY();
|
||||||
|
ByteBuffer dataU = buffer.getDataU();
|
||||||
|
ByteBuffer dataV = buffer.getDataV();
|
||||||
|
|
||||||
|
dataY.position(cropX + cropY * buffer.getStrideY());
|
||||||
|
dataU.position(cropX / 2 + cropY / 2 * buffer.getStrideU());
|
||||||
|
dataV.position(cropX / 2 + cropY / 2 * buffer.getStrideV());
|
||||||
|
|
||||||
|
buffer.retain();
|
||||||
|
return new I420BufferImpl(buffer.getWidth(), buffer.getHeight(), dataY.slice(),
|
||||||
|
buffer.getStrideY(), dataU.slice(), buffer.getStrideU(), dataV.slice(),
|
||||||
|
buffer.getStrideV(), new Runnable() {
|
||||||
|
@Override
|
||||||
|
public void run() {
|
||||||
|
buffer.release();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
I420BufferImpl newBuffer = I420BufferImpl.allocate(scaleWidth, scaleHeight);
|
||||||
|
nativeCropAndScaleI420(buffer.getDataY(), buffer.getStrideY(), buffer.getDataU(),
|
||||||
|
buffer.getStrideU(), buffer.getDataV(), buffer.getStrideV(), cropX, cropY, cropWidth,
|
||||||
|
cropHeight, newBuffer.getDataY(), newBuffer.getStrideY(), newBuffer.getDataU(),
|
||||||
|
newBuffer.getStrideU(), newBuffer.getDataV(), newBuffer.getStrideV(), scaleWidth,
|
||||||
|
scaleHeight);
|
||||||
|
return newBuffer;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static native void nativeCropAndScaleI420(ByteBuffer srcY, int srcStrideY,
|
||||||
|
ByteBuffer srcU, int srcStrideU, ByteBuffer srcV, int srcStrideV, int cropX, int cropY,
|
||||||
|
int cropWidth, int cropHeight, ByteBuffer dstY, int dstStrideY, ByteBuffer dstU,
|
||||||
|
int dstStrideU, ByteBuffer dstV, int dstStrideV, int scaleWidth, int scaleHeight);
|
||||||
}
|
}
|
||||||
|
|||||||
@ -84,12 +84,11 @@ public class VideoRenderer {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Construct a frame of the given dimensions from VideoFrame.Buffer.
|
* Construct a frame from VideoFrame.Buffer.
|
||||||
*/
|
*/
|
||||||
public I420Frame(int width, int height, int rotationDegree, float[] samplingMatrix,
|
public I420Frame(int rotationDegree, VideoFrame.Buffer buffer, long nativeFramePointer) {
|
||||||
VideoFrame.Buffer buffer, long nativeFramePointer) {
|
this.width = buffer.getWidth();
|
||||||
this.width = width;
|
this.height = buffer.getHeight();
|
||||||
this.height = height;
|
|
||||||
this.rotationDegree = rotationDegree;
|
this.rotationDegree = rotationDegree;
|
||||||
if (rotationDegree % 90 != 0) {
|
if (rotationDegree % 90 != 0) {
|
||||||
throw new IllegalArgumentException("Rotation degree not multiple of 90: " + rotationDegree);
|
throw new IllegalArgumentException("Rotation degree not multiple of 90: " + rotationDegree);
|
||||||
@ -98,7 +97,8 @@ public class VideoRenderer {
|
|||||||
VideoFrame.TextureBuffer textureBuffer = (VideoFrame.TextureBuffer) buffer;
|
VideoFrame.TextureBuffer textureBuffer = (VideoFrame.TextureBuffer) buffer;
|
||||||
this.yuvFrame = false;
|
this.yuvFrame = false;
|
||||||
this.textureId = textureBuffer.getTextureId();
|
this.textureId = textureBuffer.getTextureId();
|
||||||
this.samplingMatrix = samplingMatrix;
|
this.samplingMatrix = RendererCommon.convertMatrixFromAndroidGraphicsMatrix(
|
||||||
|
textureBuffer.getTransformMatrix());
|
||||||
|
|
||||||
this.yuvStrides = null;
|
this.yuvStrides = null;
|
||||||
this.yuvPlanes = null;
|
this.yuvPlanes = null;
|
||||||
@ -113,8 +113,7 @@ public class VideoRenderer {
|
|||||||
// top-left corner of the image, but in glTexImage2D() the first element corresponds to the
|
// top-left corner of the image, but in glTexImage2D() the first element corresponds to the
|
||||||
// bottom-left corner. This discrepancy is corrected by multiplying the sampling matrix with
|
// bottom-left corner. This discrepancy is corrected by multiplying the sampling matrix with
|
||||||
// a vertical flip matrix.
|
// a vertical flip matrix.
|
||||||
this.samplingMatrix =
|
this.samplingMatrix = RendererCommon.verticalFlipMatrix();
|
||||||
RendererCommon.multiplyMatrices(samplingMatrix, RendererCommon.verticalFlipMatrix());
|
|
||||||
|
|
||||||
this.textureId = 0;
|
this.textureId = 0;
|
||||||
}
|
}
|
||||||
|
|||||||
@ -18,8 +18,8 @@ import android.annotation.TargetApi;
|
|||||||
import android.graphics.Matrix;
|
import android.graphics.Matrix;
|
||||||
import android.support.test.filters.MediumTest;
|
import android.support.test.filters.MediumTest;
|
||||||
import android.util.Log;
|
import android.util.Log;
|
||||||
import java.util.concurrent.atomic.AtomicReference;
|
|
||||||
import java.util.concurrent.CountDownLatch;
|
import java.util.concurrent.CountDownLatch;
|
||||||
|
import java.util.concurrent.atomic.AtomicReference;
|
||||||
import org.chromium.base.test.BaseJUnit4ClassRunner;
|
import org.chromium.base.test.BaseJUnit4ClassRunner;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.junit.runner.RunWith;
|
import org.junit.runner.RunWith;
|
||||||
@ -123,8 +123,7 @@ public final class HardwareVideoDecoderTest {
|
|||||||
|
|
||||||
// First, encode a frame.
|
// First, encode a frame.
|
||||||
VideoFrame.I420Buffer buffer = I420BufferImpl.allocate(SETTINGS.width, SETTINGS.height);
|
VideoFrame.I420Buffer buffer = I420BufferImpl.allocate(SETTINGS.width, SETTINGS.height);
|
||||||
VideoFrame frame =
|
VideoFrame frame = new VideoFrame(buffer, rotation, presentationTimestampUs * 1000);
|
||||||
new VideoFrame(buffer, rotation, presentationTimestampUs * 1000, new Matrix());
|
|
||||||
VideoEncoder.EncodeInfo info = new VideoEncoder.EncodeInfo(
|
VideoEncoder.EncodeInfo info = new VideoEncoder.EncodeInfo(
|
||||||
new EncodedImage.FrameType[] {EncodedImage.FrameType.VideoFrameKey});
|
new EncodedImage.FrameType[] {EncodedImage.FrameType.VideoFrameKey});
|
||||||
|
|
||||||
@ -141,9 +140,8 @@ public final class HardwareVideoDecoderTest {
|
|||||||
frame = decoded.get();
|
frame = decoded.get();
|
||||||
assertEquals(frame.getRotation(), rotation);
|
assertEquals(frame.getRotation(), rotation);
|
||||||
assertEquals(frame.getTimestampNs(), presentationTimestampUs * 1000);
|
assertEquals(frame.getTimestampNs(), presentationTimestampUs * 1000);
|
||||||
assertEquals(frame.getTransformMatrix(), new Matrix());
|
assertEquals(frame.getBuffer().getWidth(), SETTINGS.width);
|
||||||
assertEquals(frame.getWidth(), SETTINGS.width);
|
assertEquals(frame.getBuffer().getHeight(), SETTINGS.height);
|
||||||
assertEquals(frame.getHeight(), SETTINGS.height);
|
|
||||||
|
|
||||||
frame.release();
|
frame.release();
|
||||||
assertEquals(decoder.release(), VideoCodecStatus.OK);
|
assertEquals(decoder.release(), VideoCodecStatus.OK);
|
||||||
@ -200,8 +198,7 @@ public final class HardwareVideoDecoderTest {
|
|||||||
|
|
||||||
// First, encode a frame.
|
// First, encode a frame.
|
||||||
VideoFrame.I420Buffer buffer = I420BufferImpl.allocate(SETTINGS.width, SETTINGS.height);
|
VideoFrame.I420Buffer buffer = I420BufferImpl.allocate(SETTINGS.width, SETTINGS.height);
|
||||||
VideoFrame frame =
|
VideoFrame frame = new VideoFrame(buffer, rotation, presentationTimestampUs * 1000);
|
||||||
new VideoFrame(buffer, rotation, presentationTimestampUs * 1000, new Matrix());
|
|
||||||
VideoEncoder.EncodeInfo info = new VideoEncoder.EncodeInfo(
|
VideoEncoder.EncodeInfo info = new VideoEncoder.EncodeInfo(
|
||||||
new EncodedImage.FrameType[] {EncodedImage.FrameType.VideoFrameKey});
|
new EncodedImage.FrameType[] {EncodedImage.FrameType.VideoFrameKey});
|
||||||
|
|
||||||
@ -218,13 +215,13 @@ public final class HardwareVideoDecoderTest {
|
|||||||
frame = decoded.get();
|
frame = decoded.get();
|
||||||
assertEquals(frame.getRotation(), rotation);
|
assertEquals(frame.getRotation(), rotation);
|
||||||
assertEquals(frame.getTimestampNs(), presentationTimestampUs * 1000);
|
assertEquals(frame.getTimestampNs(), presentationTimestampUs * 1000);
|
||||||
// TODO(mellem): Compare the matrix to whatever we expect to get back?
|
|
||||||
assertNotNull(frame.getTransformMatrix());
|
|
||||||
assertEquals(frame.getWidth(), SETTINGS.width);
|
|
||||||
assertEquals(frame.getHeight(), SETTINGS.height);
|
|
||||||
|
|
||||||
assertTrue(frame.getBuffer() instanceof VideoFrame.TextureBuffer);
|
assertTrue(frame.getBuffer() instanceof VideoFrame.TextureBuffer);
|
||||||
VideoFrame.TextureBuffer textureBuffer = (VideoFrame.TextureBuffer) frame.getBuffer();
|
VideoFrame.TextureBuffer textureBuffer = (VideoFrame.TextureBuffer) frame.getBuffer();
|
||||||
|
// TODO(mellem): Compare the matrix to whatever we expect to get back?
|
||||||
|
assertNotNull(textureBuffer.getTransformMatrix());
|
||||||
|
assertEquals(textureBuffer.getWidth(), SETTINGS.width);
|
||||||
|
assertEquals(textureBuffer.getHeight(), SETTINGS.height);
|
||||||
assertEquals(textureBuffer.getType(), VideoFrame.TextureBuffer.Type.OES);
|
assertEquals(textureBuffer.getType(), VideoFrame.TextureBuffer.Type.OES);
|
||||||
|
|
||||||
assertEquals(decoder.release(), VideoCodecStatus.OK);
|
assertEquals(decoder.release(), VideoCodecStatus.OK);
|
||||||
|
|||||||
@ -101,8 +101,7 @@ public class HardwareVideoEncoderTest {
|
|||||||
assertEquals(encoder.initEncode(SETTINGS, callback), VideoCodecStatus.OK);
|
assertEquals(encoder.initEncode(SETTINGS, callback), VideoCodecStatus.OK);
|
||||||
|
|
||||||
VideoFrame.I420Buffer buffer = I420BufferImpl.allocate(SETTINGS.width, SETTINGS.height);
|
VideoFrame.I420Buffer buffer = I420BufferImpl.allocate(SETTINGS.width, SETTINGS.height);
|
||||||
VideoFrame frame =
|
VideoFrame frame = new VideoFrame(buffer, 0 /* rotation */, presentationTimestampUs * 1000);
|
||||||
new VideoFrame(buffer, 0 /* rotation */, presentationTimestampUs * 1000, new Matrix());
|
|
||||||
VideoEncoder.EncodeInfo info = new VideoEncoder.EncodeInfo(
|
VideoEncoder.EncodeInfo info = new VideoEncoder.EncodeInfo(
|
||||||
new EncodedImage.FrameType[] {EncodedImage.FrameType.VideoFrameKey});
|
new EncodedImage.FrameType[] {EncodedImage.FrameType.VideoFrameKey});
|
||||||
|
|
||||||
@ -162,6 +161,11 @@ public class HardwareVideoEncoderTest {
|
|||||||
return oesTextureId;
|
return oesTextureId;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Matrix getTransformMatrix() {
|
||||||
|
return new Matrix();
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int getWidth() {
|
public int getWidth() {
|
||||||
return SETTINGS.width;
|
return SETTINGS.width;
|
||||||
@ -182,9 +186,14 @@ public class HardwareVideoEncoderTest {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void release() {}
|
public void release() {}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public VideoFrame.Buffer cropAndScale(
|
||||||
|
int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
};
|
};
|
||||||
VideoFrame frame =
|
VideoFrame frame = new VideoFrame(buffer, 0 /* rotation */, presentationTimestampUs * 1000);
|
||||||
new VideoFrame(buffer, 0 /* rotation */, presentationTimestampUs * 1000, new Matrix());
|
|
||||||
VideoEncoder.EncodeInfo info = new VideoEncoder.EncodeInfo(
|
VideoEncoder.EncodeInfo info = new VideoEncoder.EncodeInfo(
|
||||||
new EncodedImage.FrameType[] {EncodedImage.FrameType.VideoFrameKey});
|
new EncodedImage.FrameType[] {EncodedImage.FrameType.VideoFrameKey});
|
||||||
|
|
||||||
|
|||||||
@ -417,12 +417,11 @@ class HardwareVideoDecoder
|
|||||||
@Override
|
@Override
|
||||||
public void onTextureFrameAvailable(int oesTextureId, float[] transformMatrix, long timestampNs) {
|
public void onTextureFrameAvailable(int oesTextureId, float[] transformMatrix, long timestampNs) {
|
||||||
VideoFrame.TextureBuffer oesBuffer = surfaceTextureHelper.createTextureBuffer(
|
VideoFrame.TextureBuffer oesBuffer = surfaceTextureHelper.createTextureBuffer(
|
||||||
renderedTextureMetadata.width, renderedTextureMetadata.height, transformMatrix);
|
renderedTextureMetadata.width, renderedTextureMetadata.height,
|
||||||
|
RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix));
|
||||||
Matrix matrix = RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix);
|
|
||||||
|
|
||||||
VideoFrame frame = new VideoFrame(oesBuffer, renderedTextureMetadata.rotation,
|
VideoFrame frame = new VideoFrame(oesBuffer, renderedTextureMetadata.rotation,
|
||||||
renderedTextureMetadata.presentationTimestampUs * 1000, matrix);
|
renderedTextureMetadata.presentationTimestampUs * 1000);
|
||||||
callback.onDecodedFrame(frame, renderedTextureMetadata.decodeTimeMs, null /* qp */);
|
callback.onDecodedFrame(frame, renderedTextureMetadata.decodeTimeMs, null /* qp */);
|
||||||
frame.release();
|
frame.release();
|
||||||
}
|
}
|
||||||
@ -477,7 +476,7 @@ class HardwareVideoDecoder
|
|||||||
}
|
}
|
||||||
|
|
||||||
long presentationTimeNs = info.presentationTimeUs * 1000;
|
long presentationTimeNs = info.presentationTimeUs * 1000;
|
||||||
VideoFrame frame = new VideoFrame(frameBuffer, rotation, presentationTimeNs, new Matrix());
|
VideoFrame frame = new VideoFrame(frameBuffer, rotation, presentationTimeNs);
|
||||||
|
|
||||||
// Note that qp is parsed on the C++ side.
|
// Note that qp is parsed on the C++ side.
|
||||||
callback.onDecodedFrame(frame, decodeTimeMs, null /* qp */);
|
callback.onDecodedFrame(frame, decodeTimeMs, null /* qp */);
|
||||||
@ -605,9 +604,9 @@ class HardwareVideoDecoder
|
|||||||
activeOutputBuffers++;
|
activeOutputBuffers++;
|
||||||
}
|
}
|
||||||
|
|
||||||
I420BufferImpl.ReleaseCallback callback = new I420BufferImpl.ReleaseCallback() {
|
Runnable callback = new Runnable() {
|
||||||
@Override
|
@Override
|
||||||
public void onRelease() {
|
public void run() {
|
||||||
codec.releaseOutputBuffer(outputBufferIndex, false);
|
codec.releaseOutputBuffer(outputBufferIndex, false);
|
||||||
synchronized (activeOutputBuffersLock) {
|
synchronized (activeOutputBuffersLock) {
|
||||||
activeOutputBuffers--;
|
activeOutputBuffers--;
|
||||||
@ -616,8 +615,20 @@ class HardwareVideoDecoder
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
buffer.position(yPos);
|
||||||
|
buffer.limit(uPos);
|
||||||
|
ByteBuffer dataY = buffer.slice();
|
||||||
|
|
||||||
|
buffer.position(uPos);
|
||||||
|
buffer.limit(vPos);
|
||||||
|
ByteBuffer dataU = buffer.slice();
|
||||||
|
|
||||||
|
buffer.position(vPos);
|
||||||
|
buffer.limit(vPos + uvStride * sliceHeight / 2);
|
||||||
|
ByteBuffer dataV = buffer.slice();
|
||||||
|
|
||||||
return new I420BufferImpl(
|
return new I420BufferImpl(
|
||||||
buffer, width, height, yPos, stride, uPos, uvStride, vPos, uvStride, callback);
|
width, height, dataY, stride, dataU, uvStride, dataV, uvStride, callback);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void copyI420(ByteBuffer src, int offset, VideoFrame.I420Buffer frameBuffer,
|
private static void copyI420(ByteBuffer src, int offset, VideoFrame.I420Buffer frameBuffer,
|
||||||
|
|||||||
@ -235,8 +235,8 @@ class HardwareVideoEncoder implements VideoEncoder {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// If input resolution changed, restart the codec with the new resolution.
|
// If input resolution changed, restart the codec with the new resolution.
|
||||||
int frameWidth = videoFrame.getWidth();
|
int frameWidth = videoFrame.getBuffer().getWidth();
|
||||||
int frameHeight = videoFrame.getHeight();
|
int frameHeight = videoFrame.getBuffer().getHeight();
|
||||||
if (frameWidth != width || frameHeight != height) {
|
if (frameWidth != width || frameHeight != height) {
|
||||||
VideoCodecStatus status = resetCodec(frameWidth, frameHeight);
|
VideoCodecStatus status = resetCodec(frameWidth, frameHeight);
|
||||||
if (status != VideoCodecStatus.OK) {
|
if (status != VideoCodecStatus.OK) {
|
||||||
@ -271,8 +271,8 @@ class HardwareVideoEncoder implements VideoEncoder {
|
|||||||
EncodedImage.Builder builder = EncodedImage.builder()
|
EncodedImage.Builder builder = EncodedImage.builder()
|
||||||
.setCaptureTimeMs(presentationTimestampMs)
|
.setCaptureTimeMs(presentationTimestampMs)
|
||||||
.setCompleteFrame(true)
|
.setCompleteFrame(true)
|
||||||
.setEncodedWidth(videoFrame.getWidth())
|
.setEncodedWidth(videoFrame.getBuffer().getWidth())
|
||||||
.setEncodedHeight(videoFrame.getHeight())
|
.setEncodedHeight(videoFrame.getBuffer().getHeight())
|
||||||
.setRotation(videoFrame.getRotation());
|
.setRotation(videoFrame.getRotation());
|
||||||
outputBuilders.offer(builder);
|
outputBuilders.offer(builder);
|
||||||
|
|
||||||
@ -293,7 +293,7 @@ class HardwareVideoEncoder implements VideoEncoder {
|
|||||||
|
|
||||||
private VideoCodecStatus encodeTextureBuffer(
|
private VideoCodecStatus encodeTextureBuffer(
|
||||||
VideoFrame videoFrame, VideoFrame.TextureBuffer textureBuffer) {
|
VideoFrame videoFrame, VideoFrame.TextureBuffer textureBuffer) {
|
||||||
Matrix matrix = videoFrame.getTransformMatrix();
|
Matrix matrix = textureBuffer.getTransformMatrix();
|
||||||
float[] transformationMatrix = RendererCommon.convertMatrixFromAndroidGraphicsMatrix(matrix);
|
float[] transformationMatrix = RendererCommon.convertMatrixFromAndroidGraphicsMatrix(matrix);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
|||||||
@ -15,32 +15,28 @@ import org.webrtc.VideoFrame.I420Buffer;
|
|||||||
|
|
||||||
/** Implementation of an I420 VideoFrame buffer. */
|
/** Implementation of an I420 VideoFrame buffer. */
|
||||||
class I420BufferImpl implements VideoFrame.I420Buffer {
|
class I420BufferImpl implements VideoFrame.I420Buffer {
|
||||||
private final ByteBuffer buffer;
|
|
||||||
private final int width;
|
private final int width;
|
||||||
private final int height;
|
private final int height;
|
||||||
private final int chromaHeight;
|
private final ByteBuffer dataY;
|
||||||
private final int yPos;
|
private final ByteBuffer dataU;
|
||||||
|
private final ByteBuffer dataV;
|
||||||
private final int strideY;
|
private final int strideY;
|
||||||
private final int uPos;
|
|
||||||
private final int strideU;
|
private final int strideU;
|
||||||
private final int vPos;
|
|
||||||
private final int strideV;
|
private final int strideV;
|
||||||
private final ReleaseCallback releaseCallback;
|
private final Runnable releaseCallback;
|
||||||
|
|
||||||
private int refCount;
|
private int refCount;
|
||||||
|
|
||||||
/** Allocates an I420Buffer backed by existing data. */
|
/** Constructs an I420Buffer backed by existing data. */
|
||||||
I420BufferImpl(ByteBuffer buffer, int width, int height, int yPos, int strideY, int uPos,
|
I420BufferImpl(int width, int height, ByteBuffer dataY, int strideY, ByteBuffer dataU,
|
||||||
int strideU, int vPos, int strideV, ReleaseCallback releaseCallback) {
|
int strideU, ByteBuffer dataV, int strideV, Runnable releaseCallback) {
|
||||||
this.buffer = buffer;
|
|
||||||
this.width = width;
|
this.width = width;
|
||||||
this.height = height;
|
this.height = height;
|
||||||
this.chromaHeight = (height + 1) / 2;
|
this.dataY = dataY;
|
||||||
this.yPos = yPos;
|
this.dataU = dataU;
|
||||||
|
this.dataV = dataV;
|
||||||
this.strideY = strideY;
|
this.strideY = strideY;
|
||||||
this.uPos = uPos;
|
|
||||||
this.strideU = strideU;
|
this.strideU = strideU;
|
||||||
this.vPos = vPos;
|
|
||||||
this.strideV = strideV;
|
this.strideV = strideV;
|
||||||
this.releaseCallback = releaseCallback;
|
this.releaseCallback = releaseCallback;
|
||||||
|
|
||||||
@ -54,9 +50,22 @@ class I420BufferImpl implements VideoFrame.I420Buffer {
|
|||||||
int yPos = 0;
|
int yPos = 0;
|
||||||
int uPos = yPos + width * height;
|
int uPos = yPos + width * height;
|
||||||
int vPos = uPos + strideUV * chromaHeight;
|
int vPos = uPos + strideUV * chromaHeight;
|
||||||
|
|
||||||
ByteBuffer buffer = ByteBuffer.allocateDirect(width * height + 2 * strideUV * chromaHeight);
|
ByteBuffer buffer = ByteBuffer.allocateDirect(width * height + 2 * strideUV * chromaHeight);
|
||||||
return new I420BufferImpl(
|
|
||||||
buffer, width, height, yPos, width, uPos, strideUV, vPos, strideUV, null);
|
buffer.position(yPos);
|
||||||
|
buffer.limit(uPos);
|
||||||
|
ByteBuffer dataY = buffer.slice();
|
||||||
|
|
||||||
|
buffer.position(uPos);
|
||||||
|
buffer.limit(vPos);
|
||||||
|
ByteBuffer dataU = buffer.slice();
|
||||||
|
|
||||||
|
buffer.position(vPos);
|
||||||
|
buffer.limit(vPos + strideUV * chromaHeight);
|
||||||
|
ByteBuffer dataV = buffer.slice();
|
||||||
|
|
||||||
|
return new I420BufferImpl(width, height, dataY, width, dataU, strideUV, dataV, strideUV, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -71,26 +80,17 @@ class I420BufferImpl implements VideoFrame.I420Buffer {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ByteBuffer getDataY() {
|
public ByteBuffer getDataY() {
|
||||||
ByteBuffer data = buffer.slice();
|
return dataY;
|
||||||
data.position(yPos);
|
|
||||||
data.limit(yPos + getStrideY() * height);
|
|
||||||
return data;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ByteBuffer getDataU() {
|
public ByteBuffer getDataU() {
|
||||||
ByteBuffer data = buffer.slice();
|
return dataU;
|
||||||
data.position(uPos);
|
|
||||||
data.limit(uPos + strideU * chromaHeight);
|
|
||||||
return data;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ByteBuffer getDataV() {
|
public ByteBuffer getDataV() {
|
||||||
ByteBuffer data = buffer.slice();
|
return dataV;
|
||||||
data.position(vPos);
|
|
||||||
data.limit(vPos + strideV * chromaHeight);
|
|
||||||
return data;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -121,13 +121,14 @@ class I420BufferImpl implements VideoFrame.I420Buffer {
|
|||||||
@Override
|
@Override
|
||||||
public void release() {
|
public void release() {
|
||||||
if (--refCount == 0 && releaseCallback != null) {
|
if (--refCount == 0 && releaseCallback != null) {
|
||||||
releaseCallback.onRelease();
|
releaseCallback.run();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Callback called when the frame is no longer referenced.
|
@Override
|
||||||
interface ReleaseCallback {
|
public VideoFrame.Buffer cropAndScale(
|
||||||
// Called when the frame is no longer referenced.
|
int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
|
||||||
void onRelease();
|
return VideoFrame.cropAndScaleI420(
|
||||||
|
this, cropX, cropY, cropWidth, cropHeight, scaleWidth, scaleHeight);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
131
webrtc/sdk/android/src/java/org/webrtc/TextureBufferImpl.java
Normal file
131
webrtc/sdk/android/src/java/org/webrtc/TextureBufferImpl.java
Normal file
@ -0,0 +1,131 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||||
|
*
|
||||||
|
* Use of this source code is governed by a BSD-style license
|
||||||
|
* that can be found in the LICENSE file in the root of the source
|
||||||
|
* tree. An additional intellectual property rights grant can be found
|
||||||
|
* in the file PATENTS. All contributing project authors may
|
||||||
|
* be found in the AUTHORS file in the root of the source tree.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.webrtc;
|
||||||
|
|
||||||
|
import android.graphics.Matrix;
|
||||||
|
import java.nio.ByteBuffer;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Android texture buffer backed by a SurfaceTextureHelper's texture. The buffer calls
|
||||||
|
* |releaseCallback| when it is released.
|
||||||
|
*/
|
||||||
|
class TextureBufferImpl implements VideoFrame.TextureBuffer {
|
||||||
|
private final int width;
|
||||||
|
private final int height;
|
||||||
|
private final Type type;
|
||||||
|
private final int id;
|
||||||
|
private final Matrix transformMatrix;
|
||||||
|
private final SurfaceTextureHelper surfaceTextureHelper;
|
||||||
|
private final Runnable releaseCallback;
|
||||||
|
private int refCount;
|
||||||
|
|
||||||
|
public TextureBufferImpl(int width, int height, Type type, int id, Matrix transformMatrix,
|
||||||
|
SurfaceTextureHelper surfaceTextureHelper, Runnable releaseCallback) {
|
||||||
|
this.width = width;
|
||||||
|
this.height = height;
|
||||||
|
this.type = type;
|
||||||
|
this.id = id;
|
||||||
|
this.transformMatrix = transformMatrix;
|
||||||
|
this.surfaceTextureHelper = surfaceTextureHelper;
|
||||||
|
this.releaseCallback = releaseCallback;
|
||||||
|
this.refCount = 1; // Creator implicitly holds a reference.
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public VideoFrame.TextureBuffer.Type getType() {
|
||||||
|
return type;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int getTextureId() {
|
||||||
|
return id;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Matrix getTransformMatrix() {
|
||||||
|
return transformMatrix;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int getWidth() {
|
||||||
|
return width;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int getHeight() {
|
||||||
|
return height;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public VideoFrame.I420Buffer toI420() {
|
||||||
|
// SurfaceTextureHelper requires a stride that is divisible by 8. Round width up.
|
||||||
|
// See SurfaceTextureHelper for details on the size and format.
|
||||||
|
int stride = ((width + 7) / 8) * 8;
|
||||||
|
int uvHeight = (height + 1) / 2;
|
||||||
|
// Due to the layout used by SurfaceTextureHelper, vPos + stride * uvHeight would overrun the
|
||||||
|
// buffer. Add one row at the bottom to compensate for this. There will never be data in the
|
||||||
|
// extra row, but now other code does not have to deal with v stride * v height exceeding the
|
||||||
|
// buffer's capacity.
|
||||||
|
int size = stride * (height + uvHeight + 1);
|
||||||
|
ByteBuffer buffer = ByteBuffer.allocateDirect(size);
|
||||||
|
surfaceTextureHelper.textureToYUV(buffer, width, height, stride, id,
|
||||||
|
RendererCommon.convertMatrixFromAndroidGraphicsMatrix(transformMatrix));
|
||||||
|
|
||||||
|
int yPos = 0;
|
||||||
|
int uPos = yPos + stride * height;
|
||||||
|
// Rows of U and V alternate in the buffer, so V data starts after the first row of U.
|
||||||
|
int vPos = uPos + stride / 2;
|
||||||
|
|
||||||
|
buffer.position(yPos);
|
||||||
|
buffer.limit(yPos + stride * height);
|
||||||
|
ByteBuffer dataY = buffer.slice();
|
||||||
|
|
||||||
|
buffer.position(uPos);
|
||||||
|
buffer.limit(uPos + stride * uvHeight);
|
||||||
|
ByteBuffer dataU = buffer.slice();
|
||||||
|
|
||||||
|
buffer.position(vPos);
|
||||||
|
buffer.limit(vPos + stride * uvHeight);
|
||||||
|
ByteBuffer dataV = buffer.slice();
|
||||||
|
|
||||||
|
// SurfaceTextureHelper uses the same stride for Y, U, and V data.
|
||||||
|
return new I420BufferImpl(width, height, dataY, stride, dataU, stride, dataV, stride, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void retain() {
|
||||||
|
++refCount;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void release() {
|
||||||
|
if (--refCount == 0) {
|
||||||
|
releaseCallback.run();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public VideoFrame.Buffer cropAndScale(
|
||||||
|
int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
|
||||||
|
retain();
|
||||||
|
Matrix newMatrix = new Matrix(transformMatrix);
|
||||||
|
newMatrix.postScale(cropWidth / (float) width, cropHeight / (float) height);
|
||||||
|
newMatrix.postTranslate(cropX / (float) width, cropY / (float) height);
|
||||||
|
|
||||||
|
return new TextureBufferImpl(
|
||||||
|
scaleWidth, scaleHeight, type, id, newMatrix, surfaceTextureHelper, new Runnable() {
|
||||||
|
@Override
|
||||||
|
public void run() {
|
||||||
|
release();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -94,6 +94,13 @@ class WrappedNativeI420Buffer implements VideoFrame.I420Buffer {
|
|||||||
nativeRelease(nativeBuffer);
|
nativeRelease(nativeBuffer);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public VideoFrame.Buffer cropAndScale(
|
||||||
|
int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
|
||||||
|
return VideoFrame.cropAndScaleI420(
|
||||||
|
this, cropX, cropY, cropWidth, cropHeight, scaleWidth, scaleHeight);
|
||||||
|
}
|
||||||
|
|
||||||
private static native long nativeAddRef(long nativeBuffer);
|
private static native long nativeAddRef(long nativeBuffer);
|
||||||
private static native long nativeRelease(long nativeBuffer);
|
private static native long nativeRelease(long nativeBuffer);
|
||||||
}
|
}
|
||||||
|
|||||||
@ -34,42 +34,6 @@ Matrix::Matrix(JNIEnv* jni, jfloatArray a) {
|
|||||||
jni->ReleaseFloatArrayElements(a, ptr, 0);
|
jni->ReleaseFloatArrayElements(a, ptr, 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
Matrix Matrix::fromAndroidGraphicsMatrix(JNIEnv* jni, jobject j_matrix) {
|
|
||||||
jfloatArray array_3x3 = jni->NewFloatArray(9);
|
|
||||||
jclass j_matrix_class = jni->FindClass("android/graphics/Matrix");
|
|
||||||
jni->CallVoidMethod(j_matrix,
|
|
||||||
GetMethodID(jni, j_matrix_class, "getValues", "([F)V"),
|
|
||||||
array_3x3);
|
|
||||||
jfloat* array_3x3_ptr = jni->GetFloatArrayElements(array_3x3, nullptr);
|
|
||||||
Matrix matrix;
|
|
||||||
memset(matrix.elem_, 0, sizeof(matrix.elem_));
|
|
||||||
// The android.graphics.Matrix looks like this:
|
|
||||||
// [x1 y1 w1]
|
|
||||||
// [x2 y2 w2]
|
|
||||||
// [x3 y3 w3]
|
|
||||||
// We want to contruct a matrix that looks like this:
|
|
||||||
// [x1 y1 0 w1]
|
|
||||||
// [x2 y2 0 w2]
|
|
||||||
// [ 0 0 1 0]
|
|
||||||
// [x3 y3 0 w3]
|
|
||||||
// Since it is stored in column-major order, it looks like this:
|
|
||||||
// [x1 x2 0 x3
|
|
||||||
// y1 y2 0 y3
|
|
||||||
// 0 0 1 0
|
|
||||||
// w1 w2 0 w3]
|
|
||||||
matrix.elem_[0 * 4 + 0] = array_3x3_ptr[0 * 3 + 0];
|
|
||||||
matrix.elem_[0 * 4 + 1] = array_3x3_ptr[1 * 3 + 0];
|
|
||||||
matrix.elem_[0 * 4 + 3] = array_3x3_ptr[2 * 3 + 0];
|
|
||||||
matrix.elem_[1 * 4 + 0] = array_3x3_ptr[0 * 3 + 1];
|
|
||||||
matrix.elem_[1 * 4 + 1] = array_3x3_ptr[1 * 3 + 1];
|
|
||||||
matrix.elem_[1 * 4 + 3] = array_3x3_ptr[2 * 3 + 1];
|
|
||||||
matrix.elem_[2 * 4 + 2] = 1; // Z-scale should be 1.
|
|
||||||
matrix.elem_[3 * 4 + 0] = array_3x3_ptr[0 * 3 + 2];
|
|
||||||
matrix.elem_[3 * 4 + 1] = array_3x3_ptr[1 * 3 + 2];
|
|
||||||
matrix.elem_[3 * 4 + 3] = array_3x3_ptr[2 * 3 + 2];
|
|
||||||
return matrix;
|
|
||||||
}
|
|
||||||
|
|
||||||
jfloatArray Matrix::ToJava(JNIEnv* jni) const {
|
jfloatArray Matrix::ToJava(JNIEnv* jni) const {
|
||||||
jfloatArray matrix = jni->NewFloatArray(16);
|
jfloatArray matrix = jni->NewFloatArray(16);
|
||||||
jni->SetFloatArrayRegion(matrix, 0, 16, elem_);
|
jni->SetFloatArrayRegion(matrix, 0, 16, elem_);
|
||||||
@ -237,12 +201,10 @@ AndroidVideoBuffer::AndroidVideoBuffer(JNIEnv* jni,
|
|||||||
jmethodID j_release_id,
|
jmethodID j_release_id,
|
||||||
int width,
|
int width,
|
||||||
int height,
|
int height,
|
||||||
const Matrix& matrix,
|
|
||||||
jobject j_video_frame_buffer)
|
jobject j_video_frame_buffer)
|
||||||
: j_release_id_(j_release_id),
|
: j_release_id_(j_release_id),
|
||||||
width_(width),
|
width_(width),
|
||||||
height_(height),
|
height_(height),
|
||||||
matrix_(matrix),
|
|
||||||
j_video_frame_buffer_(jni, j_video_frame_buffer) {
|
j_video_frame_buffer_(jni, j_video_frame_buffer) {
|
||||||
jni->CallVoidMethod(j_video_frame_buffer, j_retain_id);
|
jni->CallVoidMethod(j_video_frame_buffer, j_retain_id);
|
||||||
}
|
}
|
||||||
@ -274,23 +236,19 @@ rtc::scoped_refptr<webrtc::I420BufferInterface> AndroidVideoBuffer::ToI420() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
jobject AndroidVideoBuffer::ToJavaI420Frame(JNIEnv* jni,
|
jobject AndroidVideoBuffer::ToJavaI420Frame(JNIEnv* jni,
|
||||||
int width,
|
|
||||||
int height,
|
|
||||||
int rotation) {
|
int rotation) {
|
||||||
jclass j_byte_buffer_class = jni->FindClass("java/nio/ByteBuffer");
|
jclass j_byte_buffer_class = jni->FindClass("java/nio/ByteBuffer");
|
||||||
jclass j_i420_frame_class =
|
jclass j_i420_frame_class =
|
||||||
FindClass(jni, "org/webrtc/VideoRenderer$I420Frame");
|
FindClass(jni, "org/webrtc/VideoRenderer$I420Frame");
|
||||||
jmethodID j_i420_frame_ctor_id =
|
jmethodID j_i420_frame_ctor_id = GetMethodID(
|
||||||
GetMethodID(jni, j_i420_frame_class, "<init>",
|
jni, j_i420_frame_class, "<init>", "(ILorg/webrtc/VideoFrame$Buffer;J)V");
|
||||||
"(III[FLorg/webrtc/VideoFrame$Buffer;J)V");
|
|
||||||
// Java code just uses the native frame to hold a reference to the buffer so
|
// Java code just uses the native frame to hold a reference to the buffer so
|
||||||
// this is okay.
|
// this is okay.
|
||||||
webrtc::VideoFrame* native_frame = new webrtc::VideoFrame(
|
webrtc::VideoFrame* native_frame = new webrtc::VideoFrame(
|
||||||
this, 0 /* timestamp */, 0 /* render_time_ms */,
|
this, 0 /* timestamp */, 0 /* render_time_ms */,
|
||||||
webrtc::VideoRotation::kVideoRotation_0 /* rotation */);
|
webrtc::VideoRotation::kVideoRotation_0 /* rotation */);
|
||||||
return jni->NewObject(j_i420_frame_class, j_i420_frame_ctor_id, width, height,
|
return jni->NewObject(j_i420_frame_class, j_i420_frame_ctor_id, rotation,
|
||||||
rotation, matrix_.ToJava(jni), *j_video_frame_buffer_,
|
*j_video_frame_buffer_, jlongFromPointer(native_frame));
|
||||||
jlongFromPointer(native_frame));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
AndroidVideoBufferFactory::AndroidVideoBufferFactory(JNIEnv* jni)
|
AndroidVideoBufferFactory::AndroidVideoBufferFactory(JNIEnv* jni)
|
||||||
@ -299,16 +257,8 @@ AndroidVideoBufferFactory::AndroidVideoBufferFactory(JNIEnv* jni)
|
|||||||
*j_video_frame_class_,
|
*j_video_frame_class_,
|
||||||
"getBuffer",
|
"getBuffer",
|
||||||
"()Lorg/webrtc/VideoFrame$Buffer;")),
|
"()Lorg/webrtc/VideoFrame$Buffer;")),
|
||||||
j_get_width_id_(
|
|
||||||
GetMethodID(jni, *j_video_frame_class_, "getWidth", "()I")),
|
|
||||||
j_get_height_id_(
|
|
||||||
GetMethodID(jni, *j_video_frame_class_, "getHeight", "()I")),
|
|
||||||
j_get_rotation_id_(
|
j_get_rotation_id_(
|
||||||
GetMethodID(jni, *j_video_frame_class_, "getRotation", "()I")),
|
GetMethodID(jni, *j_video_frame_class_, "getRotation", "()I")),
|
||||||
j_get_transform_matrix_id_(GetMethodID(jni,
|
|
||||||
*j_video_frame_class_,
|
|
||||||
"getTransformMatrix",
|
|
||||||
"()Landroid/graphics/Matrix;")),
|
|
||||||
j_get_timestamp_ns_id_(
|
j_get_timestamp_ns_id_(
|
||||||
GetMethodID(jni, *j_video_frame_class_, "getTimestampNs", "()J")),
|
GetMethodID(jni, *j_video_frame_class_, "getTimestampNs", "()J")),
|
||||||
j_video_frame_buffer_class_(
|
j_video_frame_buffer_class_(
|
||||||
@ -317,7 +267,11 @@ AndroidVideoBufferFactory::AndroidVideoBufferFactory(JNIEnv* jni)
|
|||||||
j_retain_id_(
|
j_retain_id_(
|
||||||
GetMethodID(jni, *j_video_frame_buffer_class_, "retain", "()V")),
|
GetMethodID(jni, *j_video_frame_buffer_class_, "retain", "()V")),
|
||||||
j_release_id_(
|
j_release_id_(
|
||||||
GetMethodID(jni, *j_video_frame_buffer_class_, "release", "()V")) {}
|
GetMethodID(jni, *j_video_frame_buffer_class_, "release", "()V")),
|
||||||
|
j_get_width_id_(
|
||||||
|
GetMethodID(jni, *j_video_frame_buffer_class_, "getWidth", "()I")),
|
||||||
|
j_get_height_id_(
|
||||||
|
GetMethodID(jni, *j_video_frame_buffer_class_, "getHeight", "()I")) {}
|
||||||
|
|
||||||
webrtc::VideoFrame AndroidVideoBufferFactory::CreateFrame(
|
webrtc::VideoFrame AndroidVideoBufferFactory::CreateFrame(
|
||||||
JNIEnv* jni,
|
JNIEnv* jni,
|
||||||
@ -325,30 +279,23 @@ webrtc::VideoFrame AndroidVideoBufferFactory::CreateFrame(
|
|||||||
uint32_t timestamp_rtp) const {
|
uint32_t timestamp_rtp) const {
|
||||||
jobject j_video_frame_buffer =
|
jobject j_video_frame_buffer =
|
||||||
jni->CallObjectMethod(j_video_frame, j_get_buffer_id_);
|
jni->CallObjectMethod(j_video_frame, j_get_buffer_id_);
|
||||||
int width = jni->CallIntMethod(j_video_frame, j_get_width_id_);
|
|
||||||
int height = jni->CallIntMethod(j_video_frame, j_get_height_id_);
|
|
||||||
int rotation = jni->CallIntMethod(j_video_frame, j_get_rotation_id_);
|
int rotation = jni->CallIntMethod(j_video_frame, j_get_rotation_id_);
|
||||||
jobject j_matrix =
|
|
||||||
jni->CallObjectMethod(j_video_frame, j_get_transform_matrix_id_);
|
|
||||||
Matrix matrix = Matrix::fromAndroidGraphicsMatrix(jni, j_matrix);
|
|
||||||
uint32_t timestamp_ns =
|
uint32_t timestamp_ns =
|
||||||
jni->CallLongMethod(j_video_frame, j_get_timestamp_ns_id_);
|
jni->CallLongMethod(j_video_frame, j_get_timestamp_ns_id_);
|
||||||
rtc::scoped_refptr<AndroidVideoBuffer> buffer =
|
rtc::scoped_refptr<AndroidVideoBuffer> buffer =
|
||||||
CreateBuffer(width, height, matrix, j_video_frame_buffer);
|
CreateBuffer(j_video_frame_buffer);
|
||||||
return webrtc::VideoFrame(buffer, timestamp_rtp,
|
return webrtc::VideoFrame(buffer, timestamp_rtp,
|
||||||
timestamp_ns / rtc::kNumNanosecsPerMillisec,
|
timestamp_ns / rtc::kNumNanosecsPerMillisec,
|
||||||
static_cast<webrtc::VideoRotation>(rotation));
|
static_cast<webrtc::VideoRotation>(rotation));
|
||||||
}
|
}
|
||||||
|
|
||||||
rtc::scoped_refptr<AndroidVideoBuffer> AndroidVideoBufferFactory::CreateBuffer(
|
rtc::scoped_refptr<AndroidVideoBuffer> AndroidVideoBufferFactory::CreateBuffer(
|
||||||
int width,
|
|
||||||
int height,
|
|
||||||
const Matrix& matrix,
|
|
||||||
jobject j_video_frame_buffer) const {
|
jobject j_video_frame_buffer) const {
|
||||||
JNIEnv* jni = AttachCurrentThreadIfNeeded();
|
JNIEnv* jni = AttachCurrentThreadIfNeeded();
|
||||||
|
int width = jni->CallIntMethod(j_video_frame_buffer, j_get_width_id_);
|
||||||
|
int height = jni->CallIntMethod(j_video_frame_buffer, j_get_height_id_);
|
||||||
return new rtc::RefCountedObject<AndroidVideoBuffer>(
|
return new rtc::RefCountedObject<AndroidVideoBuffer>(
|
||||||
jni, j_retain_id_, j_release_id_, width, height, matrix,
|
jni, j_retain_id_, j_release_id_, width, height, j_video_frame_buffer);
|
||||||
j_video_frame_buffer);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace webrtc_jni
|
} // namespace webrtc_jni
|
||||||
|
|||||||
@ -108,14 +108,13 @@ class AndroidVideoBuffer : public AndroidVideoFrameBuffer {
|
|||||||
jmethodID j_release_id,
|
jmethodID j_release_id,
|
||||||
int width,
|
int width,
|
||||||
int height,
|
int height,
|
||||||
const Matrix& matrix,
|
|
||||||
jobject j_video_frame_buffer);
|
jobject j_video_frame_buffer);
|
||||||
~AndroidVideoBuffer() override;
|
~AndroidVideoBuffer() override;
|
||||||
|
|
||||||
jobject video_frame_buffer() const;
|
jobject video_frame_buffer() const;
|
||||||
|
|
||||||
// Returns an instance of VideoRenderer.I420Frame (deprecated)
|
// Returns an instance of VideoRenderer.I420Frame (deprecated)
|
||||||
jobject ToJavaI420Frame(JNIEnv* jni, int width, int height, int rotation);
|
jobject ToJavaI420Frame(JNIEnv* jni, int rotation);
|
||||||
|
|
||||||
private:
|
private:
|
||||||
Type type() const override;
|
Type type() const override;
|
||||||
@ -129,7 +128,6 @@ class AndroidVideoBuffer : public AndroidVideoFrameBuffer {
|
|||||||
const jmethodID j_release_id_;
|
const jmethodID j_release_id_;
|
||||||
const int width_;
|
const int width_;
|
||||||
const int height_;
|
const int height_;
|
||||||
const Matrix matrix_;
|
|
||||||
// Holds a VideoFrame.Buffer.
|
// Holds a VideoFrame.Buffer.
|
||||||
ScopedGlobalRef<jobject> j_video_frame_buffer_;
|
ScopedGlobalRef<jobject> j_video_frame_buffer_;
|
||||||
};
|
};
|
||||||
@ -143,23 +141,19 @@ class AndroidVideoBufferFactory {
|
|||||||
uint32_t timestamp_rtp) const;
|
uint32_t timestamp_rtp) const;
|
||||||
|
|
||||||
rtc::scoped_refptr<AndroidVideoBuffer> CreateBuffer(
|
rtc::scoped_refptr<AndroidVideoBuffer> CreateBuffer(
|
||||||
int width,
|
|
||||||
int height,
|
|
||||||
const Matrix& matrix,
|
|
||||||
jobject j_video_frame_buffer) const;
|
jobject j_video_frame_buffer) const;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
ScopedGlobalRef<jclass> j_video_frame_class_;
|
ScopedGlobalRef<jclass> j_video_frame_class_;
|
||||||
jmethodID j_get_buffer_id_;
|
jmethodID j_get_buffer_id_;
|
||||||
jmethodID j_get_width_id_;
|
|
||||||
jmethodID j_get_height_id_;
|
|
||||||
jmethodID j_get_rotation_id_;
|
jmethodID j_get_rotation_id_;
|
||||||
jmethodID j_get_transform_matrix_id_;
|
|
||||||
jmethodID j_get_timestamp_ns_id_;
|
jmethodID j_get_timestamp_ns_id_;
|
||||||
|
|
||||||
ScopedGlobalRef<jclass> j_video_frame_buffer_class_;
|
ScopedGlobalRef<jclass> j_video_frame_buffer_class_;
|
||||||
jmethodID j_retain_id_;
|
jmethodID j_retain_id_;
|
||||||
jmethodID j_release_id_;
|
jmethodID j_release_id_;
|
||||||
|
jmethodID j_get_width_id_;
|
||||||
|
jmethodID j_get_height_id_;
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace webrtc_jni
|
} // namespace webrtc_jni
|
||||||
|
|||||||
@ -59,9 +59,7 @@ class JavaVideoRendererWrapper
|
|||||||
break;
|
break;
|
||||||
case AndroidVideoFrameBuffer::AndroidType::kJavaBuffer:
|
case AndroidVideoFrameBuffer::AndroidType::kJavaBuffer:
|
||||||
j_frame = static_cast<AndroidVideoBuffer*>(android_buffer)
|
j_frame = static_cast<AndroidVideoBuffer*>(android_buffer)
|
||||||
->ToJavaI420Frame(jni(), video_frame.width(),
|
->ToJavaI420Frame(jni(), video_frame.rotation());
|
||||||
video_frame.height(),
|
|
||||||
video_frame.rotation());
|
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
RTC_NOTREACHED();
|
RTC_NOTREACHED();
|
||||||
|
|||||||
62
webrtc/sdk/android/src/jni/videoframe_jni.cc
Normal file
62
webrtc/sdk/android/src/jni/videoframe_jni.cc
Normal file
@ -0,0 +1,62 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||||
|
*
|
||||||
|
* Use of this source code is governed by a BSD-style license
|
||||||
|
* that can be found in the LICENSE file in the root of the source
|
||||||
|
* tree. An additional intellectual property rights grant can be found
|
||||||
|
* in the file PATENTS. All contributing project authors may
|
||||||
|
* be found in the AUTHORS file in the root of the source tree.
|
||||||
|
*/
|
||||||
|
|
||||||
|
#include <jni.h>
|
||||||
|
|
||||||
|
#include "libyuv/scale.h"
|
||||||
|
|
||||||
|
#include "webrtc/rtc_base/checks.h"
|
||||||
|
|
||||||
|
namespace webrtc_jni {
|
||||||
|
|
||||||
|
extern "C" JNIEXPORT void JNICALL
|
||||||
|
Java_org_webrtc_VideoFrame_nativeCropAndScaleI420(JNIEnv* jni,
|
||||||
|
jclass,
|
||||||
|
jobject j_src_y,
|
||||||
|
jint src_stride_y,
|
||||||
|
jobject j_src_u,
|
||||||
|
jint src_stride_u,
|
||||||
|
jobject j_src_v,
|
||||||
|
jint src_stride_v,
|
||||||
|
jint crop_x,
|
||||||
|
jint crop_y,
|
||||||
|
jint crop_width,
|
||||||
|
jint crop_height,
|
||||||
|
jobject j_dst_y,
|
||||||
|
jint dst_stride_y,
|
||||||
|
jobject j_dst_u,
|
||||||
|
jint dst_stride_u,
|
||||||
|
jobject j_dst_v,
|
||||||
|
jint dst_stride_v,
|
||||||
|
jint scale_width,
|
||||||
|
jint scale_height) {
|
||||||
|
uint8_t const* src_y =
|
||||||
|
static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_src_y));
|
||||||
|
uint8_t const* src_u =
|
||||||
|
static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_src_u));
|
||||||
|
uint8_t const* src_v =
|
||||||
|
static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_src_v));
|
||||||
|
uint8_t* dst_y = static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_y));
|
||||||
|
uint8_t* dst_u = static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_u));
|
||||||
|
uint8_t* dst_v = static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_v));
|
||||||
|
|
||||||
|
// Perform cropping using pointer arithmetic.
|
||||||
|
src_y += crop_x + crop_y * src_stride_y;
|
||||||
|
src_u += crop_x / 2 + crop_y / 2 * src_stride_u;
|
||||||
|
src_v += crop_x / 2 + crop_y / 2 * src_stride_v;
|
||||||
|
|
||||||
|
bool ret = libyuv::I420Scale(
|
||||||
|
src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v, crop_width,
|
||||||
|
crop_height, dst_y, dst_stride_y, dst_u, dst_stride_u, dst_v,
|
||||||
|
dst_stride_v, scale_width, scale_height, libyuv::kFilterBox);
|
||||||
|
RTC_DCHECK_EQ(ret, 0) << "I420Scale failed";
|
||||||
|
}
|
||||||
|
|
||||||
|
} // namespace webrtc_jni
|
||||||
Reference in New Issue
Block a user