Android: Only use Java VideoFrames internally

This CL removes internal support for anything else than Android frames
that are wrapped Java VideoFrames. This allows for a big internal
cleanup and we can remove the internal class AndroidTextureBuffer and
all logic related to that. Also, the C++ AndroidVideoTrackSource no
longer needs to hold on to a C++ SurfaceTextureHelper and we can
remove all JNI code related to SurfaceTextureHelper. Also, when these
methods are removed, it's possible to let VideoSource implement the
CapturerObserver interface directly and there is no longer any need for
AndroidVideoTrackSourceObserver. Clients can then initialize
VideoCapturers themselves outside the PeerConnectionFactory, and a new
method is added in the PeerConnectionFactory to allow clients to create
standalone VideoSources that can be connected to a VideoCapturer outside
the factory.

Bug: webrtc:9181
Change-Id: Ie292ea9214f382d44dce9120725c62602a646ed8
Reviewed-on: https://webrtc-review.googlesource.com/71666
Commit-Queue: Magnus Jedvert <magjed@webrtc.org>
Reviewed-by: Sami Kalliomäki <sakal@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#23004}
This commit is contained in:
Magnus Jedvert
2018-04-24 15:11:02 +02:00
committed by Commit Bot
parent 498592d391
commit 1a759c6354
27 changed files with 124 additions and 743 deletions

View File

@ -612,25 +612,6 @@ public class MediaCodecVideoEncoder {
}
}
@CalledByNativeUnchecked
boolean encodeTexture(boolean isKeyframe, int oesTextureId, float[] transformationMatrix,
long presentationTimestampUs) {
checkOnMediaCodecThread();
try {
checkKeyFrameRequired(isKeyframe, presentationTimestampUs);
eglBase.makeCurrent();
// TODO(perkj): glClear() shouldn't be necessary since every pixel is covered anyway,
// but it's a workaround for bug webrtc:5147.
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
drawer.drawOes(oesTextureId, transformationMatrix, width, height, 0, 0, width, height);
eglBase.swapBuffers(TimeUnit.MICROSECONDS.toNanos(presentationTimestampUs));
return true;
} catch (RuntimeException e) {
Logging.e(TAG, "encodeTexture failed", e);
return false;
}
}
/**
* Encodes a new style VideoFrame. |bufferIndex| is -1 if we are not encoding in surface mode.
*/

View File

@ -350,18 +350,20 @@ public class PeerConnectionFactory {
return new MediaStream(nativeCreateLocalMediaStream(nativeFactory, label));
}
public VideoSource createVideoSource(boolean isScreencast) {
return new VideoSource(nativeCreateVideoSource(nativeFactory, isScreencast));
}
public VideoSource createVideoSource(VideoCapturer capturer) {
final EglBase.Context eglContext =
localEglbase == null ? null : localEglbase.getEglBaseContext();
final SurfaceTextureHelper surfaceTextureHelper =
SurfaceTextureHelper.create(VIDEO_CAPTURER_THREAD_NAME, eglContext);
long nativeAndroidVideoTrackSource =
nativeCreateVideoSource(nativeFactory, surfaceTextureHelper, capturer.isScreencast());
VideoCapturer.CapturerObserver capturerObserver =
new AndroidVideoTrackSourceObserver(nativeAndroidVideoTrackSource);
capturer.initialize(
surfaceTextureHelper, ContextUtils.getApplicationContext(), capturerObserver);
return new VideoSource(nativeAndroidVideoTrackSource);
final VideoSource videoSource = new VideoSource(
nativeCreateVideoSource(nativeFactory, capturer.isScreencast()), surfaceTextureHelper);
capturer.initialize(surfaceTextureHelper, ContextUtils.getApplicationContext(),
videoSource.getCapturerObserver());
return videoSource;
}
public VideoTrack createVideoTrack(String id, VideoSource source) {
@ -498,8 +500,7 @@ public class PeerConnectionFactory {
private static native long nativeCreatePeerConnection(long factory,
PeerConnection.RTCConfiguration rtcConfig, MediaConstraints constraints, long nativeObserver);
private static native long nativeCreateLocalMediaStream(long factory, String label);
private static native long nativeCreateVideoSource(
long factory, SurfaceTextureHelper surfaceTextureHelper, boolean is_screencast);
private static native long nativeCreateVideoSource(long factory, boolean is_screencast);
private static native long nativeCreateVideoTrack(
long factory, String id, long nativeVideoSource);
private static native long nativeCreateAudioSource(long factory, MediaConstraints constraints);

View File

@ -27,7 +27,7 @@ import javax.annotation.Nullable;
* {@code SurfaceTexture} using a {@code SurfaceTextureHelper}.
* The {@code SurfaceTextureHelper} is created by the native code and passed to this capturer in
* {@code VideoCapturer.initialize()}. On receiving a new frame, this capturer passes it
* as a texture to the native code via {@code CapturerObserver.onTextureFrameCaptured()}. This takes
* as a texture to the native code via {@code CapturerObserver.onFrameCaptured()}. This takes
* place on the HandlerThread of the given {@code SurfaceTextureHelper}. When done with each frame,
* the native code returns the buffer to the {@code SurfaceTextureHelper} to be used for new
* frames. At any time, at most one frame is being processed.

View File

@ -51,7 +51,6 @@ public class SurfaceTextureHelper {
* thread and handler is created for handling the SurfaceTexture. May return null if EGL fails to
* initialize a pixel buffer surface and make it current.
*/
@CalledByNative
public static SurfaceTextureHelper create(
final String threadName, final EglBase.Context sharedContext) {
final HandlerThread thread = new HandlerThread(threadName);
@ -195,7 +194,6 @@ public class SurfaceTextureHelper {
* onTextureFrameAvailable(). Only one texture frame can be in flight at once, so you must call
* this function in order to receive a new frame.
*/
@CalledByNative
public void returnTextureFrame() {
handler.post(new Runnable() {
@Override
@ -219,7 +217,6 @@ public class SurfaceTextureHelper {
* stopped when the texture frame has been returned by a call to returnTextureFrame(). You are
* guaranteed to not receive any more onTextureFrameAvailable() after this function returns.
*/
@CalledByNative
public void dispose() {
Logging.d(TAG, "dispose()");
ThreadUtils.invokeAtFrontUninterruptibly(handler, new Runnable() {
@ -233,20 +230,6 @@ public class SurfaceTextureHelper {
});
}
/** Deprecated, use textureToYuv. */
@Deprecated
@SuppressWarnings("deprecation") // yuvConverter.convert is deprecated
@CalledByNative
void textureToYUV(final ByteBuffer buf, final int width, final int height, final int stride,
final int textureId, final float[] transformMatrix) {
if (textureId != oesTextureId) {
throw new IllegalStateException("textureToByteBuffer called with unexpected textureId");
}
ThreadUtils.invokeAtFrontUninterruptibly(handler,
() -> yuvConverter.convert(buf, width, height, stride, textureId, transformMatrix));
}
/**
* Posts to the correct thread to convert |textureBuffer| to I420.
*/

View File

@ -46,8 +46,8 @@ public interface VideoCapturer {
* capture observer. It will be called only once and before any startCapture() request. The
* camera thread is guaranteed to be valid until dispose() is called. If the VideoCapturer wants
* to deliver texture frames, it should do this by rendering on the SurfaceTexture in
* |surfaceTextureHelper|, register itself as a listener, and forward the texture frames to
* CapturerObserver.onTextureFrameCaptured().
* |surfaceTextureHelper|, register itself as a listener, and forward the frames to
* CapturerObserver.onFrameCaptured().
*/
void initialize(SurfaceTextureHelper surfaceTextureHelper, Context applicationContext,
CapturerObserver capturerObserver);

View File

@ -186,13 +186,6 @@ public class VideoRenderer {
return new I420Frame(width, height, rotationDegree, new int[] {y_stride, u_stride, v_stride},
new ByteBuffer[] {y_buffer, u_buffer, v_buffer}, nativeFramePointer);
}
@CalledByNative("I420Frame")
static I420Frame createTextureFrame(int width, int height, int rotationDegree, int textureId,
float[] samplingMatrix, long nativeFramePointer) {
return new I420Frame(
width, height, rotationDegree, textureId, samplingMatrix, nativeFramePointer);
}
}
/** The real meat of VideoSinkInterface. */

View File

@ -10,13 +10,84 @@
package org.webrtc;
import javax.annotation.Nullable;
/**
* Java wrapper of native AndroidVideoTrackSource.
*/
@JNINamespace("webrtc::jni")
public class VideoSource extends MediaSource {
private static class NativeCapturerObserver implements VideoCapturer.CapturerObserver {
private final long nativeSource;
// TODO(bugs.webrtc.org/9181): Remove.
@Nullable private final SurfaceTextureHelper surfaceTextureHelper;
public NativeCapturerObserver(long nativeSource) {
this.nativeSource = nativeSource;
this.surfaceTextureHelper = null;
}
// TODO(bugs.webrtc.org/9181): Remove.
public NativeCapturerObserver(long nativeSource, SurfaceTextureHelper surfaceTextureHelper) {
this.nativeSource = nativeSource;
this.surfaceTextureHelper = surfaceTextureHelper;
}
@Override
public void onCapturerStarted(boolean success) {
nativeCapturerStarted(nativeSource, success);
}
@Override
public void onCapturerStopped() {
nativeCapturerStopped(nativeSource);
}
// TODO(bugs.webrtc.org/9181): Remove.
@Override
@SuppressWarnings("deprecation")
public void onByteBufferFrameCaptured(
byte[] data, int width, int height, int rotation, long timestampNs) {
// This NV21Buffer is not possible to retain. This is safe only because the native code will
// always call cropAndScale() and directly make a deep copy of the buffer.
final VideoFrame.Buffer nv21Buffer =
new NV21Buffer(data, width, height, null /* releaseCallback */);
final VideoFrame frame = new VideoFrame(nv21Buffer, rotation, timestampNs);
onFrameCaptured(frame);
frame.release();
}
// TODO(bugs.webrtc.org/9181): Remove.
@Override
@SuppressWarnings("deprecation")
public void onTextureFrameCaptured(int width, int height, int oesTextureId,
float[] transformMatrix, int rotation, long timestampNs) {
final VideoFrame.Buffer buffer = surfaceTextureHelper.createTextureBuffer(
width, height, RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix));
final VideoFrame frame = new VideoFrame(buffer, rotation, timestampNs);
onFrameCaptured(frame);
frame.release();
}
@Override
public void onFrameCaptured(VideoFrame frame) {
nativeOnFrameCaptured(nativeSource, frame.getBuffer().getWidth(),
frame.getBuffer().getHeight(), frame.getRotation(), frame.getTimestampNs(),
frame.getBuffer());
}
}
private final VideoCapturer.CapturerObserver capturerObserver;
public VideoSource(long nativeSource) {
super(nativeSource);
this.capturerObserver = new NativeCapturerObserver(nativeSource);
}
// TODO(bugs.webrtc.org/9181): Remove.
VideoSource(long nativeSource, SurfaceTextureHelper surfaceTextureHelper) {
super(nativeSource);
this.capturerObserver = new NativeCapturerObserver(nativeSource, surfaceTextureHelper);
}
/**
@ -29,5 +100,13 @@ public class VideoSource extends MediaSource {
nativeAdaptOutputFormat(nativeSource, width, height, fps);
}
public VideoCapturer.CapturerObserver getCapturerObserver() {
return capturerObserver;
}
private static native void nativeAdaptOutputFormat(long source, int width, int height, int fps);
private static native void nativeCapturerStarted(long source, boolean success);
private static native void nativeCapturerStopped(long source);
private static native void nativeOnFrameCaptured(
long source, int width, int height, int rotation, long timestampNs, VideoFrame.Buffer frame);
}