Android: Only use Java VideoFrames internally

This CL removes internal support for anything else than Android frames
that are wrapped Java VideoFrames. This allows for a big internal
cleanup and we can remove the internal class AndroidTextureBuffer and
all logic related to that. Also, the C++ AndroidVideoTrackSource no
longer needs to hold on to a C++ SurfaceTextureHelper and we can
remove all JNI code related to SurfaceTextureHelper. Also, when these
methods are removed, it's possible to let VideoSource implement the
CapturerObserver interface directly and there is no longer any need for
AndroidVideoTrackSourceObserver. Clients can then initialize
VideoCapturers themselves outside the PeerConnectionFactory, and a new
method is added in the PeerConnectionFactory to allow clients to create
standalone VideoSources that can be connected to a VideoCapturer outside
the factory.

Bug: webrtc:9181
Change-Id: Ie292ea9214f382d44dce9120725c62602a646ed8
Reviewed-on: https://webrtc-review.googlesource.com/71666
Commit-Queue: Magnus Jedvert <magjed@webrtc.org>
Reviewed-by: Sami Kalliomäki <sakal@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#23004}
This commit is contained in:
Magnus Jedvert
2018-04-24 15:11:02 +02:00
committed by Commit Bot
parent 498592d391
commit 1a759c6354
27 changed files with 124 additions and 743 deletions

View File

@ -10,13 +10,84 @@
package org.webrtc;
import javax.annotation.Nullable;
/**
* Java wrapper of native AndroidVideoTrackSource.
*/
@JNINamespace("webrtc::jni")
public class VideoSource extends MediaSource {
private static class NativeCapturerObserver implements VideoCapturer.CapturerObserver {
private final long nativeSource;
// TODO(bugs.webrtc.org/9181): Remove.
@Nullable private final SurfaceTextureHelper surfaceTextureHelper;
public NativeCapturerObserver(long nativeSource) {
this.nativeSource = nativeSource;
this.surfaceTextureHelper = null;
}
// TODO(bugs.webrtc.org/9181): Remove.
public NativeCapturerObserver(long nativeSource, SurfaceTextureHelper surfaceTextureHelper) {
this.nativeSource = nativeSource;
this.surfaceTextureHelper = surfaceTextureHelper;
}
@Override
public void onCapturerStarted(boolean success) {
nativeCapturerStarted(nativeSource, success);
}
@Override
public void onCapturerStopped() {
nativeCapturerStopped(nativeSource);
}
// TODO(bugs.webrtc.org/9181): Remove.
@Override
@SuppressWarnings("deprecation")
public void onByteBufferFrameCaptured(
byte[] data, int width, int height, int rotation, long timestampNs) {
// This NV21Buffer is not possible to retain. This is safe only because the native code will
// always call cropAndScale() and directly make a deep copy of the buffer.
final VideoFrame.Buffer nv21Buffer =
new NV21Buffer(data, width, height, null /* releaseCallback */);
final VideoFrame frame = new VideoFrame(nv21Buffer, rotation, timestampNs);
onFrameCaptured(frame);
frame.release();
}
// TODO(bugs.webrtc.org/9181): Remove.
@Override
@SuppressWarnings("deprecation")
public void onTextureFrameCaptured(int width, int height, int oesTextureId,
float[] transformMatrix, int rotation, long timestampNs) {
final VideoFrame.Buffer buffer = surfaceTextureHelper.createTextureBuffer(
width, height, RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix));
final VideoFrame frame = new VideoFrame(buffer, rotation, timestampNs);
onFrameCaptured(frame);
frame.release();
}
@Override
public void onFrameCaptured(VideoFrame frame) {
nativeOnFrameCaptured(nativeSource, frame.getBuffer().getWidth(),
frame.getBuffer().getHeight(), frame.getRotation(), frame.getTimestampNs(),
frame.getBuffer());
}
}
private final VideoCapturer.CapturerObserver capturerObserver;
public VideoSource(long nativeSource) {
super(nativeSource);
this.capturerObserver = new NativeCapturerObserver(nativeSource);
}
// TODO(bugs.webrtc.org/9181): Remove.
VideoSource(long nativeSource, SurfaceTextureHelper surfaceTextureHelper) {
super(nativeSource);
this.capturerObserver = new NativeCapturerObserver(nativeSource, surfaceTextureHelper);
}
/**
@ -29,5 +100,13 @@ public class VideoSource extends MediaSource {
nativeAdaptOutputFormat(nativeSource, width, height, fps);
}
public VideoCapturer.CapturerObserver getCapturerObserver() {
return capturerObserver;
}
private static native void nativeAdaptOutputFormat(long source, int width, int height, int fps);
private static native void nativeCapturerStarted(long source, boolean success);
private static native void nativeCapturerStopped(long source);
private static native void nativeOnFrameCaptured(
long source, int width, int height, int rotation, long timestampNs, VideoFrame.Buffer frame);
}