Files
platform-external-webrtc/sdk/android/api/org/webrtc/VideoRenderer.java
Magnus Jedvert 1a759c6354 Android: Only use Java VideoFrames internally
This CL removes internal support for anything else than Android frames
that are wrapped Java VideoFrames. This allows for a big internal
cleanup and we can remove the internal class AndroidTextureBuffer and
all logic related to that. Also, the C++ AndroidVideoTrackSource no
longer needs to hold on to a C++ SurfaceTextureHelper and we can
remove all JNI code related to SurfaceTextureHelper. Also, when these
methods are removed, it's possible to let VideoSource implement the
CapturerObserver interface directly and there is no longer any need for
AndroidVideoTrackSourceObserver. Clients can then initialize
VideoCapturers themselves outside the PeerConnectionFactory, and a new
method is added in the PeerConnectionFactory to allow clients to create
standalone VideoSources that can be connected to a VideoCapturer outside
the factory.

Bug: webrtc:9181
Change-Id: Ie292ea9214f382d44dce9120725c62602a646ed8
Reviewed-on: https://webrtc-review.googlesource.com/71666
Commit-Queue: Magnus Jedvert <magjed@webrtc.org>
Reviewed-by: Sami Kalliomäki <sakal@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#23004}
2018-04-24 13:51:11 +00:00

232 lines
9.0 KiB
Java

/*
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import javax.annotation.Nullable;
import java.nio.ByteBuffer;
import org.webrtc.VideoFrame;
/**
* Java version of VideoSinkInterface. In addition to allowing clients to
* define their own rendering behavior (by passing in a Callbacks object), this
* class also provides a createGui() method for creating a GUI-rendering window
* on various platforms.
*/
@JNINamespace("webrtc::jni")
public class VideoRenderer {
/**
* Java version of webrtc::VideoFrame. Frames are only constructed from native code and test
* code.
*/
public static class I420Frame {
public final int width;
public final int height;
@Nullable public final int[] yuvStrides;
@Nullable public ByteBuffer[] yuvPlanes;
public final boolean yuvFrame;
// Matrix that transforms standard coordinates to their proper sampling locations in
// the texture. This transform compensates for any properties of the video source that
// cause it to appear different from a normalized texture. This matrix does not take
// |rotationDegree| into account.
@Nullable public final float[] samplingMatrix;
public int textureId;
// Frame pointer in C++.
private long nativeFramePointer;
// rotationDegree is the degree that the frame must be rotated clockwisely
// to be rendered correctly.
public int rotationDegree;
// If this I420Frame was constructed from VideoFrame.Buffer, this points to
// the backing buffer.
@Nullable private final VideoFrame.Buffer backingBuffer;
/**
* Construct a frame of the given dimensions with the specified planar data.
*/
public I420Frame(int width, int height, int rotationDegree, int[] yuvStrides,
ByteBuffer[] yuvPlanes, long nativeFramePointer) {
this.width = width;
this.height = height;
this.yuvStrides = yuvStrides;
this.yuvPlanes = yuvPlanes;
this.yuvFrame = true;
this.rotationDegree = rotationDegree;
this.nativeFramePointer = nativeFramePointer;
backingBuffer = null;
if (rotationDegree % 90 != 0) {
throw new IllegalArgumentException("Rotation degree not multiple of 90: " + rotationDegree);
}
// The convention in WebRTC is that the first element in a ByteBuffer corresponds to the
// top-left corner of the image, but in glTexImage2D() the first element corresponds to the
// bottom-left corner. This discrepancy is corrected by setting a vertical flip as sampling
// matrix.
samplingMatrix = RendererCommon.verticalFlipMatrix();
}
/**
* Construct a texture frame of the given dimensions with data in SurfaceTexture
*/
public I420Frame(int width, int height, int rotationDegree, int textureId,
float[] samplingMatrix, long nativeFramePointer) {
this.width = width;
this.height = height;
this.yuvStrides = null;
this.yuvPlanes = null;
this.samplingMatrix = samplingMatrix;
this.textureId = textureId;
this.yuvFrame = false;
this.rotationDegree = rotationDegree;
this.nativeFramePointer = nativeFramePointer;
backingBuffer = null;
if (rotationDegree % 90 != 0) {
throw new IllegalArgumentException("Rotation degree not multiple of 90: " + rotationDegree);
}
}
/**
* Construct a frame from VideoFrame.Buffer.
*/
@CalledByNative("I420Frame")
public I420Frame(int rotationDegree, VideoFrame.Buffer buffer, long nativeFramePointer) {
this.width = buffer.getWidth();
this.height = buffer.getHeight();
this.rotationDegree = rotationDegree;
if (rotationDegree % 90 != 0) {
throw new IllegalArgumentException("Rotation degree not multiple of 90: " + rotationDegree);
}
if (buffer instanceof VideoFrame.TextureBuffer
&& ((VideoFrame.TextureBuffer) buffer).getType() == VideoFrame.TextureBuffer.Type.OES) {
VideoFrame.TextureBuffer textureBuffer = (VideoFrame.TextureBuffer) buffer;
this.yuvFrame = false;
this.textureId = textureBuffer.getTextureId();
this.samplingMatrix = RendererCommon.convertMatrixFromAndroidGraphicsMatrix(
textureBuffer.getTransformMatrix());
this.yuvStrides = null;
this.yuvPlanes = null;
} else if (buffer instanceof VideoFrame.I420Buffer) {
VideoFrame.I420Buffer i420Buffer = (VideoFrame.I420Buffer) buffer;
this.yuvFrame = true;
this.yuvStrides =
new int[] {i420Buffer.getStrideY(), i420Buffer.getStrideU(), i420Buffer.getStrideV()};
this.yuvPlanes =
new ByteBuffer[] {i420Buffer.getDataY(), i420Buffer.getDataU(), i420Buffer.getDataV()};
// The convention in WebRTC is that the first element in a ByteBuffer corresponds to the
// top-left corner of the image, but in glTexImage2D() the first element corresponds to the
// bottom-left corner. This discrepancy is corrected by multiplying the sampling matrix with
// a vertical flip matrix.
this.samplingMatrix = RendererCommon.verticalFlipMatrix();
this.textureId = 0;
} else {
this.yuvFrame = false;
this.textureId = 0;
this.samplingMatrix = null;
this.yuvStrides = null;
this.yuvPlanes = null;
}
this.nativeFramePointer = nativeFramePointer;
backingBuffer = buffer;
}
public int rotatedWidth() {
return (rotationDegree % 180 == 0) ? width : height;
}
public int rotatedHeight() {
return (rotationDegree % 180 == 0) ? height : width;
}
@Override
public String toString() {
final String type = yuvFrame
? "Y: " + yuvStrides[0] + ", U: " + yuvStrides[1] + ", V: " + yuvStrides[2]
: "Texture: " + textureId;
return width + "x" + height + ", " + type;
}
/**
* Convert the frame to VideoFrame. It is no longer safe to use the I420Frame after calling
* this.
*/
VideoFrame toVideoFrame() {
final VideoFrame.Buffer buffer;
if (backingBuffer != null) {
// We were construted from a VideoFrame.Buffer, just return it.
// Make sure webrtc::VideoFrame object is released.
backingBuffer.retain();
VideoRenderer.renderFrameDone(this);
buffer = backingBuffer;
} else if (yuvFrame) {
buffer = JavaI420Buffer.wrap(width, height, yuvPlanes[0], yuvStrides[0], yuvPlanes[1],
yuvStrides[1], yuvPlanes[2], yuvStrides[2],
() -> { VideoRenderer.renderFrameDone(this); });
} else {
// Note: No Handler or YuvConverter means calling toI420 will crash.
buffer = new TextureBufferImpl(width, height, VideoFrame.TextureBuffer.Type.OES, textureId,
RendererCommon.convertMatrixToAndroidGraphicsMatrix(samplingMatrix), null /* handler */,
null /* yuvConverter */, () -> VideoRenderer.renderFrameDone(this));
}
return new VideoFrame(buffer, rotationDegree, 0 /* timestampNs */);
}
@CalledByNative("I420Frame")
static I420Frame createI420Frame(int width, int height, int rotationDegree, int y_stride,
ByteBuffer y_buffer, int u_stride, ByteBuffer u_buffer, int v_stride, ByteBuffer v_buffer,
long nativeFramePointer) {
return new I420Frame(width, height, rotationDegree, new int[] {y_stride, u_stride, v_stride},
new ByteBuffer[] {y_buffer, u_buffer, v_buffer}, nativeFramePointer);
}
}
/** The real meat of VideoSinkInterface. */
public static interface Callbacks {
// |frame| might have pending rotation and implementation of Callbacks
// should handle that by applying rotation during rendering. The callee
// is responsible for signaling when it is done with |frame| by calling
// renderFrameDone(frame).
@CalledByNative("Callbacks") void renderFrame(I420Frame frame);
}
/**
* This must be called after every renderFrame() to release the frame.
*/
public static void renderFrameDone(I420Frame frame) {
frame.yuvPlanes = null;
frame.textureId = 0;
if (frame.nativeFramePointer != 0) {
nativeReleaseFrame(frame.nativeFramePointer);
frame.nativeFramePointer = 0;
}
}
long nativeVideoRenderer;
public VideoRenderer(Callbacks callbacks) {
nativeVideoRenderer = nativeCreateVideoRenderer(callbacks);
}
public void dispose() {
if (nativeVideoRenderer == 0) {
// Already disposed.
return;
}
nativeFreeWrappedVideoRenderer(nativeVideoRenderer);
nativeVideoRenderer = 0;
}
private static native long nativeCreateVideoRenderer(Callbacks callbacks);
private static native void nativeFreeWrappedVideoRenderer(long videoRenderer);
private static native void nativeReleaseFrame(long framePointer);
}