Android: Output VideoFrames from SurfaceTextureHelper

Bug: webrtc:9412
Change-Id: Iffc8dae2fdfb8d7e5c730b433614b7aa30ceb55b
Reviewed-on: https://webrtc-review.googlesource.com/83943
Reviewed-by: Sami Kalliomäki <sakal@webrtc.org>
Commit-Queue: Magnus Jedvert <magjed@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#23867}
This commit is contained in:
Magnus Jedvert
2018-07-06 11:15:13 +02:00
committed by Commit Bot
parent 419b6dc691
commit 80e7a7fd1a
9 changed files with 195 additions and 162 deletions

View File

@ -368,6 +368,7 @@ public class MediaCodecVideoDecoder {
SurfaceTextureHelper.create("Decoder SurfaceTextureHelper", eglContext); SurfaceTextureHelper.create("Decoder SurfaceTextureHelper", eglContext);
if (surfaceTextureHelper != null) { if (surfaceTextureHelper != null) {
textureListener = new TextureListener(surfaceTextureHelper); textureListener = new TextureListener(surfaceTextureHelper);
textureListener.setSize(width, height);
surface = new Surface(surfaceTextureHelper.getSurfaceTexture()); surface = new Surface(surfaceTextureHelper.getSurfaceTexture());
} }
} }
@ -414,6 +415,9 @@ public class MediaCodecVideoDecoder {
this.width = width; this.width = width;
this.height = height; this.height = height;
if (textureListener != null) {
textureListener.setSize(width, height);
}
decodeStartTimeMs.clear(); decodeStartTimeMs.clear();
dequeuedSurfaceOutputBuffers.clear(); dequeuedSurfaceOutputBuffers.clear();
hasDecodedFirstFrame = false; hasDecodedFirstFrame = false;
@ -634,12 +638,12 @@ public class MediaCodecVideoDecoder {
} }
// Poll based texture listener. // Poll based texture listener.
private class TextureListener implements SurfaceTextureHelper.OnTextureFrameAvailableListener { private class TextureListener implements VideoSink {
private final SurfaceTextureHelper surfaceTextureHelper; private final SurfaceTextureHelper surfaceTextureHelper;
// |newFrameLock| is used to synchronize arrival of new frames with wait()/notifyAll(). // |newFrameLock| is used to synchronize arrival of new frames with wait()/notifyAll().
private final Object newFrameLock = new Object(); private final Object newFrameLock = new Object();
// |bufferToRender| is non-null when waiting for transition between addBufferToRender() to // |bufferToRender| is non-null when waiting for transition between addBufferToRender() to
// onTextureFrameAvailable(). // onFrame().
@Nullable private DecodedOutputBuffer bufferToRender; @Nullable private DecodedOutputBuffer bufferToRender;
@Nullable private DecodedTextureBuffer renderedBuffer; @Nullable private DecodedTextureBuffer renderedBuffer;
@ -662,19 +666,21 @@ public class MediaCodecVideoDecoder {
} }
} }
public void setSize(int width, int height) {
surfaceTextureHelper.setTextureSize(width, height);
}
// Callback from |surfaceTextureHelper|. May be called on an arbitrary thread. // Callback from |surfaceTextureHelper|. May be called on an arbitrary thread.
@Override @Override
public void onTextureFrameAvailable( public void onFrame(VideoFrame frame) {
int oesTextureId, float[] transformMatrix, long timestampNs) {
synchronized (newFrameLock) { synchronized (newFrameLock) {
if (renderedBuffer != null) { if (renderedBuffer != null) {
Logging.e( Logging.e(TAG, "Unexpected onFrame() called while already holding a texture.");
TAG, "Unexpected onTextureFrameAvailable() called while already holding a texture.");
throw new IllegalStateException("Already holding a texture."); throw new IllegalStateException("Already holding a texture.");
} }
// |timestampNs| is always zero on some Android versions. // |timestampNs| is always zero on some Android versions.
final VideoFrame.Buffer buffer = surfaceTextureHelper.createTextureBuffer( final VideoFrame.Buffer buffer = frame.getBuffer();
width, height, RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix)); buffer.retain();
renderedBuffer = new DecodedTextureBuffer(buffer, bufferToRender.presentationTimeStampMs, renderedBuffer = new DecodedTextureBuffer(buffer, bufferToRender.presentationTimeStampMs,
bufferToRender.timeStampMs, bufferToRender.ntpTimeStampMs, bufferToRender.decodeTimeMs, bufferToRender.timeStampMs, bufferToRender.ntpTimeStampMs, bufferToRender.decodeTimeMs,
SystemClock.elapsedRealtime() - bufferToRender.endDecodeTimeMs); SystemClock.elapsedRealtime() - bufferToRender.endDecodeTimeMs);
@ -703,9 +709,9 @@ public class MediaCodecVideoDecoder {
} }
public void release() { public void release() {
// SurfaceTextureHelper.stopListening() will block until any onTextureFrameAvailable() in // SurfaceTextureHelper.stopListening() will block until any onFrame() in progress is done.
// progress is done. Therefore, the call must be outside any synchronized // Therefore, the call must be outside any synchronized statement that is also used in the
// statement that is also used in the onTextureFrameAvailable() above to avoid deadlocks. // onFrame() above to avoid deadlocks.
surfaceTextureHelper.stopListening(); surfaceTextureHelper.stopListening();
synchronized (newFrameLock) { synchronized (newFrameLock) {
if (renderedBuffer != null) { if (renderedBuffer != null) {
@ -763,6 +769,9 @@ public class MediaCodecVideoDecoder {
} }
width = newWidth; width = newWidth;
height = newHeight; height = newHeight;
if (textureListener != null) {
textureListener.setSize(width, height);
}
if (!useSurface && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) { if (!useSurface && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) {
colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT); colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);

View File

@ -33,8 +33,7 @@ import javax.annotation.Nullable;
* frames. At any time, at most one frame is being processed. * frames. At any time, at most one frame is being processed.
*/ */
@TargetApi(21) @TargetApi(21)
public class ScreenCapturerAndroid public class ScreenCapturerAndroid implements VideoCapturer, VideoSink {
implements VideoCapturer, SurfaceTextureHelper.OnTextureFrameAvailableListener {
private static final int DISPLAY_FLAGS = private static final int DISPLAY_FLAGS =
DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC | DisplayManager.VIRTUAL_DISPLAY_FLAG_PRESENTATION; DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC | DisplayManager.VIRTUAL_DISPLAY_FLAG_PRESENTATION;
// DPI for VirtualDisplay, does not seem to matter for us. // DPI for VirtualDisplay, does not seem to matter for us.
@ -186,7 +185,7 @@ public class ScreenCapturerAndroid
} }
private void createVirtualDisplay() { private void createVirtualDisplay() {
surfaceTextureHelper.getSurfaceTexture().setDefaultBufferSize(width, height); surfaceTextureHelper.setTextureSize(width, height);
virtualDisplay = mediaProjection.createVirtualDisplay("WebRTC_ScreenCapture", width, height, virtualDisplay = mediaProjection.createVirtualDisplay("WebRTC_ScreenCapture", width, height,
VIRTUAL_DISPLAY_DPI, DISPLAY_FLAGS, new Surface(surfaceTextureHelper.getSurfaceTexture()), VIRTUAL_DISPLAY_DPI, DISPLAY_FLAGS, new Surface(surfaceTextureHelper.getSurfaceTexture()),
null /* callback */, null /* callback handler */); null /* callback */, null /* callback handler */);
@ -194,13 +193,9 @@ public class ScreenCapturerAndroid
// This is called on the internal looper thread of {@Code SurfaceTextureHelper}. // This is called on the internal looper thread of {@Code SurfaceTextureHelper}.
@Override @Override
public void onTextureFrameAvailable(int oesTextureId, float[] transformMatrix, long timestampNs) { public void onFrame(VideoFrame frame) {
numCapturedFrames++; numCapturedFrames++;
final VideoFrame.Buffer buffer = surfaceTextureHelper.createTextureBuffer(
width, height, RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix));
final VideoFrame frame = new VideoFrame(buffer, 0 /* rotation */, timestampNs);
capturerObserver.onFrameCaptured(frame); capturerObserver.onFrameCaptured(frame);
frame.release();
} }
@Override @Override

View File

@ -25,14 +25,11 @@ import org.webrtc.EglBase;
import org.webrtc.VideoFrame.TextureBuffer; import org.webrtc.VideoFrame.TextureBuffer;
/** /**
* Helper class to create and synchronize access to a SurfaceTexture. The caller will get notified * Helper class for using a SurfaceTexture to create WebRTC VideoFrames. In order to create WebRTC
* of new frames in onTextureFrameAvailable(), and should call returnTextureFrame() when done with * VideoFrames, render onto the SurfaceTexture. The frames will be delivered to the listener. Only
* the frame. Only one texture frame can be in flight at once, so returnTextureFrame() must be * one texture frame can be in flight at once, so the frame must be released in order to receive a
* called in order to receive a new frame. Call stopListening() to stop receiveing new frames. Call * new frame. Call stopListening() to stop receiveing new frames. Call dispose to release all
* dispose to release all resources once the texture frame is returned. * resources once the texture frame is released.
* Note that there is a C++ counter part of this class that optionally can be used. It is used for
* wrapping texture frames into webrtc::VideoFrames and also handles calling returnTextureFrame()
* when the webrtc::VideoFrame is no longer used.
*/ */
public class SurfaceTextureHelper { public class SurfaceTextureHelper {
private static final String TAG = "SurfaceTextureHelper"; private static final String TAG = "SurfaceTextureHelper";
@ -40,10 +37,12 @@ public class SurfaceTextureHelper {
* Callback interface for being notified that a new texture frame is available. The calls will be * Callback interface for being notified that a new texture frame is available. The calls will be
* made on the SurfaceTextureHelper handler thread, with a bound EGLContext. The callee is not * made on the SurfaceTextureHelper handler thread, with a bound EGLContext. The callee is not
* allowed to make another EGLContext current on the calling thread. * allowed to make another EGLContext current on the calling thread.
*
* @deprecated Use a VideoSink as listener instead.
*/ */
@Deprecated
public interface OnTextureFrameAvailableListener { public interface OnTextureFrameAvailableListener {
abstract void onTextureFrameAvailable( void onTextureFrameAvailable(int oesTextureId, float[] transformMatrix, long timestampNs);
int oesTextureId, float[] transformMatrix, long timestampNs);
} }
/** /**
@ -82,14 +81,20 @@ public class SurfaceTextureHelper {
private final YuvConverter yuvConverter = new YuvConverter(); private final YuvConverter yuvConverter = new YuvConverter();
// These variables are only accessed from the |handler| thread. // These variables are only accessed from the |handler| thread.
@Nullable private OnTextureFrameAvailableListener listener; // The type of |listener| is either a VideoSink or the deprecated OnTextureFrameAvailableListener.
@Nullable private Object listener;
// The possible states of this class. // The possible states of this class.
private boolean hasPendingTexture = false; private boolean hasPendingTexture = false;
private volatile boolean isTextureInUse = false; private volatile boolean isTextureInUse = false;
private boolean isQuitting = false; private boolean isQuitting = false;
private int frameRotation;
private int textureWidth;
private int textureHeight;
// |pendingListener| is set in setListener() and the runnable is posted to the handler thread. // |pendingListener| is set in setListener() and the runnable is posted to the handler thread.
// setListener() is not allowed to be called again before stopListening(), so this is thread safe. // setListener() is not allowed to be called again before stopListening(), so this is thread safe.
@Nullable private OnTextureFrameAvailableListener pendingListener; // The type of |pendingListener| is either a VideoSink or the deprecated
// OnTextureFrameAvailableListener.
@Nullable private Object pendingListener;
final Runnable setListenerRunnable = new Runnable() { final Runnable setListenerRunnable = new Runnable() {
@Override @Override
public void run() { public void run() {
@ -148,8 +153,23 @@ public class SurfaceTextureHelper {
/** /**
* Start to stream textures to the given |listener|. If you need to change listener, you need to * Start to stream textures to the given |listener|. If you need to change listener, you need to
* call stopListening() first. * call stopListening() first.
*
* @deprecated Use a VideoSink as listener instead.
*/ */
@Deprecated
public void startListening(final OnTextureFrameAvailableListener listener) { public void startListening(final OnTextureFrameAvailableListener listener) {
startListeningInternal(listener);
}
/**
* Start to stream textures to the given |listener|. If you need to change listener, you need to
* call stopListening() first.
*/
public void startListening(final VideoSink listener) {
startListeningInternal(listener);
}
private void startListeningInternal(Object listener) {
if (this.listener != null || this.pendingListener != null) { if (this.listener != null || this.pendingListener != null) {
throw new IllegalStateException("SurfaceTextureHelper listener has already been set."); throw new IllegalStateException("SurfaceTextureHelper listener has already been set.");
} }
@ -164,15 +184,36 @@ public class SurfaceTextureHelper {
public void stopListening() { public void stopListening() {
Logging.d(TAG, "stopListening()"); Logging.d(TAG, "stopListening()");
handler.removeCallbacks(setListenerRunnable); handler.removeCallbacks(setListenerRunnable);
ThreadUtils.invokeAtFrontUninterruptibly(handler, new Runnable() { ThreadUtils.invokeAtFrontUninterruptibly(handler, () -> {
@Override
public void run() {
listener = null; listener = null;
pendingListener = null; pendingListener = null;
}
}); });
} }
/**
* Use this function to set the texture size. Note, do not call setDefaultBufferSize() yourself
* since this class needs to be aware of the texture size.
*/
public void setTextureSize(int textureWidth, int textureHeight) {
if (textureWidth <= 0) {
throw new IllegalArgumentException("Texture width must be positive, but was " + textureWidth);
}
if (textureHeight <= 0) {
throw new IllegalArgumentException(
"Texture height must be positive, but was " + textureHeight);
}
surfaceTexture.setDefaultBufferSize(textureWidth, textureHeight);
handler.post(() -> {
this.textureWidth = textureWidth;
this.textureHeight = textureHeight;
});
}
/** Set the rotation of the delivered frames. */
public void setFrameRotation(int rotation) {
handler.post(() -> this.frameRotation = rotation);
}
/** /**
* Retrieve the underlying SurfaceTexture. The SurfaceTexture should be passed in to a video * Retrieve the underlying SurfaceTexture. The SurfaceTexture should be passed in to a video
* producer such as a camera or decoder. * producer such as a camera or decoder.
@ -193,18 +234,18 @@ public class SurfaceTextureHelper {
* Call this function to signal that you are done with the frame received in * Call this function to signal that you are done with the frame received in
* onTextureFrameAvailable(). Only one texture frame can be in flight at once, so you must call * onTextureFrameAvailable(). Only one texture frame can be in flight at once, so you must call
* this function in order to receive a new frame. * this function in order to receive a new frame.
*
* @deprecated Use a VideoSink as listener instead.
*/ */
@Deprecated
public void returnTextureFrame() { public void returnTextureFrame() {
handler.post(new Runnable() { handler.post(() -> {
@Override
public void run() {
isTextureInUse = false; isTextureInUse = false;
if (isQuitting) { if (isQuitting) {
release(); release();
} else { } else {
tryDeliverTextureFrame(); tryDeliverTextureFrame();
} }
}
}); });
} }
@ -219,23 +260,22 @@ public class SurfaceTextureHelper {
*/ */
public void dispose() { public void dispose() {
Logging.d(TAG, "dispose()"); Logging.d(TAG, "dispose()");
ThreadUtils.invokeAtFrontUninterruptibly(handler, new Runnable() { ThreadUtils.invokeAtFrontUninterruptibly(handler, () -> {
@Override
public void run() {
isQuitting = true; isQuitting = true;
if (!isTextureInUse) { if (!isTextureInUse) {
release(); release();
} }
}
}); });
} }
/** /**
* Posts to the correct thread to convert |textureBuffer| to I420. * Posts to the correct thread to convert |textureBuffer| to I420.
*
* @deprecated Use toI420() instead.
*/ */
@Deprecated
public VideoFrame.I420Buffer textureToYuv(final TextureBuffer textureBuffer) { public VideoFrame.I420Buffer textureToYuv(final TextureBuffer textureBuffer) {
return ThreadUtils.invokeAtFrontUninterruptibly( return textureBuffer.toI420();
handler, () -> yuvConverter.convert(textureBuffer));
} }
private void updateTexImage() { private void updateTexImage() {
@ -262,7 +302,19 @@ public class SurfaceTextureHelper {
final float[] transformMatrix = new float[16]; final float[] transformMatrix = new float[16];
surfaceTexture.getTransformMatrix(transformMatrix); surfaceTexture.getTransformMatrix(transformMatrix);
final long timestampNs = surfaceTexture.getTimestamp(); final long timestampNs = surfaceTexture.getTimestamp();
listener.onTextureFrameAvailable(oesTextureId, transformMatrix, timestampNs); if (listener instanceof OnTextureFrameAvailableListener) {
((OnTextureFrameAvailableListener) listener)
.onTextureFrameAvailable(oesTextureId, transformMatrix, timestampNs);
} else if (listener instanceof VideoSink) {
if (textureWidth == 0 || textureHeight == 0) {
throw new RuntimeException("Texture size has not been set.");
}
final VideoFrame.Buffer buffer = createTextureBuffer(textureWidth, textureHeight,
RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix));
final VideoFrame frame = new VideoFrame(buffer, frameRotation, timestampNs);
((VideoSink) listener).onFrame(frame);
frame.release();
}
} }
private void release() { private void release() {
@ -286,7 +338,10 @@ public class SurfaceTextureHelper {
* *
* The returned TextureBuffer holds a reference to the SurfaceTextureHelper that created it. The * The returned TextureBuffer holds a reference to the SurfaceTextureHelper that created it. The
* buffer calls returnTextureFrame() when it is released. * buffer calls returnTextureFrame() when it is released.
*
* @deprecated Use a VideoSink as listener instead.
*/ */
@Deprecated
public TextureBufferImpl createTextureBuffer(int width, int height, Matrix transformMatrix) { public TextureBufferImpl createTextureBuffer(int width, int height, Matrix transformMatrix) {
return new TextureBufferImpl(width, height, TextureBuffer.Type.OES, oesTextureId, return new TextureBufferImpl(width, height, TextureBuffer.Type.OES, oesTextureId,
transformMatrix, handler, yuvConverter, this ::returnTextureFrame); transformMatrix, handler, yuvConverter, this ::returnTextureFrame);

View File

@ -237,11 +237,11 @@ public class GlRectDrawerTest {
private final GlRectDrawer drawer; private final GlRectDrawer drawer;
private final int rgbTexture; private final int rgbTexture;
public StubOesTextureProducer( public StubOesTextureProducer(EglBase.Context sharedContext,
EglBase.Context sharedContext, SurfaceTexture surfaceTexture, int width, int height) { SurfaceTextureHelper surfaceTextureHelper, int width, int height) {
eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PLAIN); eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PLAIN);
surfaceTexture.setDefaultBufferSize(width, height); surfaceTextureHelper.setTextureSize(width, height);
eglBase.createSurface(surfaceTexture); eglBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
assertEquals(eglBase.surfaceWidth(), width); assertEquals(eglBase.surfaceWidth(), width);
assertEquals(eglBase.surfaceHeight(), height); assertEquals(eglBase.surfaceHeight(), height);
@ -281,7 +281,7 @@ public class GlRectDrawerTest {
final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create( final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(
"SurfaceTextureHelper test" /* threadName */, eglBase.getEglBaseContext()); "SurfaceTextureHelper test" /* threadName */, eglBase.getEglBaseContext());
final StubOesTextureProducer oesProducer = new StubOesTextureProducer( final StubOesTextureProducer oesProducer = new StubOesTextureProducer(
eglBase.getEglBaseContext(), surfaceTextureHelper.getSurfaceTexture(), WIDTH, HEIGHT); eglBase.getEglBaseContext(), surfaceTextureHelper, WIDTH, HEIGHT);
final SurfaceTextureHelperTest.MockTextureListener listener = final SurfaceTextureHelperTest.MockTextureListener listener =
new SurfaceTextureHelperTest.MockTextureListener(); new SurfaceTextureHelperTest.MockTextureListener();
surfaceTextureHelper.startListening(listener); surfaceTextureHelper.startListening(listener);

View File

@ -129,7 +129,7 @@ public class SurfaceTextureHelperTest {
"SurfaceTextureHelper test" /* threadName */, eglBase.getEglBaseContext()); "SurfaceTextureHelper test" /* threadName */, eglBase.getEglBaseContext());
final MockTextureListener listener = new MockTextureListener(); final MockTextureListener listener = new MockTextureListener();
surfaceTextureHelper.startListening(listener); surfaceTextureHelper.startListening(listener);
surfaceTextureHelper.getSurfaceTexture().setDefaultBufferSize(width, height); surfaceTextureHelper.setTextureSize(width, height);
// Create resources for stubbing an OES texture producer. |eglOesBase| has the SurfaceTexture in // Create resources for stubbing an OES texture producer. |eglOesBase| has the SurfaceTexture in
// |surfaceTextureHelper| as the target EGLSurface. // |surfaceTextureHelper| as the target EGLSurface.
@ -197,7 +197,7 @@ public class SurfaceTextureHelperTest {
"SurfaceTextureHelper test" /* threadName */, eglBase.getEglBaseContext()); "SurfaceTextureHelper test" /* threadName */, eglBase.getEglBaseContext());
final MockTextureListener listener = new MockTextureListener(); final MockTextureListener listener = new MockTextureListener();
surfaceTextureHelper.startListening(listener); surfaceTextureHelper.startListening(listener);
surfaceTextureHelper.getSurfaceTexture().setDefaultBufferSize(width, height); surfaceTextureHelper.setTextureSize(width, height);
// Create resources for stubbing an OES texture producer. |eglOesBase| has the SurfaceTexture in // Create resources for stubbing an OES texture producer. |eglOesBase| has the SurfaceTexture in
// |surfaceTextureHelper| as the target EGLSurface. // |surfaceTextureHelper| as the target EGLSurface.
@ -453,7 +453,7 @@ public class SurfaceTextureHelperTest {
"SurfaceTextureHelper test" /* threadName */, eglBase.getEglBaseContext()); "SurfaceTextureHelper test" /* threadName */, eglBase.getEglBaseContext());
final MockTextureListener listener = new MockTextureListener(); final MockTextureListener listener = new MockTextureListener();
surfaceTextureHelper.startListening(listener); surfaceTextureHelper.startListening(listener);
surfaceTextureHelper.getSurfaceTexture().setDefaultBufferSize(width, height); surfaceTextureHelper.setTextureSize(width, height);
// Create resources for stubbing an OES texture producer. |eglBase| has the SurfaceTexture in // Create resources for stubbing an OES texture producer. |eglBase| has the SurfaceTexture in
// |surfaceTextureHelper| as the target EGLSurface. // |surfaceTextureHelper| as the target EGLSurface.

View File

@ -181,8 +181,7 @@ public class VideoFrameBufferTest {
// Create resources for generating OES textures. // Create resources for generating OES textures.
final SurfaceTextureHelper surfaceTextureHelper = final SurfaceTextureHelper surfaceTextureHelper =
SurfaceTextureHelper.create("SurfaceTextureHelper test", eglContext); SurfaceTextureHelper.create("SurfaceTextureHelper test", eglContext);
final SurfaceTexture surfaceTexture = surfaceTextureHelper.getSurfaceTexture(); surfaceTextureHelper.setTextureSize(width, height);
surfaceTexture.setDefaultBufferSize(width, height);
final HandlerThread renderThread = new HandlerThread("OES texture thread"); final HandlerThread renderThread = new HandlerThread("OES texture thread");
renderThread.start(); renderThread.start();
@ -191,7 +190,7 @@ public class VideoFrameBufferTest {
ThreadUtils.invokeAtFrontUninterruptibly(renderThreadHandler, () -> { ThreadUtils.invokeAtFrontUninterruptibly(renderThreadHandler, () -> {
// Create EGL base with the SurfaceTexture as display output. // Create EGL base with the SurfaceTexture as display output.
final EglBase eglBase = EglBase.create(eglContext, EglBase.CONFIG_PLAIN); final EglBase eglBase = EglBase.create(eglContext, EglBase.CONFIG_PLAIN);
eglBase.createSurface(surfaceTexture); eglBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
eglBase.makeCurrent(); eglBase.makeCurrent();
assertEquals(width, eglBase.surfaceWidth()); assertEquals(width, eglBase.surfaceWidth());
assertEquals(height, eglBase.surfaceHeight()); assertEquals(height, eglBase.surfaceHeight());

View File

@ -174,6 +174,8 @@ class Camera1Session implements CameraSession {
this.captureFormat = captureFormat; this.captureFormat = captureFormat;
this.constructionTimeNs = constructionTimeNs; this.constructionTimeNs = constructionTimeNs;
surfaceTextureHelper.setTextureSize(captureFormat.width, captureFormat.height);
startCapturing(); startCapturing();
} }
@ -247,19 +249,11 @@ class Camera1Session implements CameraSession {
} }
private void listenForTextureFrames() { private void listenForTextureFrames() {
surfaceTextureHelper.startListening(new SurfaceTextureHelper.OnTextureFrameAvailableListener() { surfaceTextureHelper.startListening((VideoFrame frame) -> {
@Override
public void onTextureFrameAvailable(
int oesTextureId, float[] transformMatrix, long timestampNs) {
checkIsOnCameraThread(); checkIsOnCameraThread();
final TextureBufferImpl buffer =
surfaceTextureHelper.createTextureBuffer(captureFormat.width, captureFormat.height,
RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix));
if (state != SessionState.RUNNING) { if (state != SessionState.RUNNING) {
Logging.d(TAG, "Texture frame captured but camera is no longer running."); Logging.d(TAG, "Texture frame captured but camera is no longer running.");
buffer.release();
return; return;
} }
@ -272,15 +266,14 @@ class Camera1Session implements CameraSession {
// Undo the mirror that the OS "helps" us with. // Undo the mirror that the OS "helps" us with.
// http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int) // http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int)
final VideoFrame frame = new VideoFrame( final VideoFrame modifiedFrame = new VideoFrame(
CameraSession.createTextureBufferWithModifiedTransformMatrix(buffer, CameraSession.createTextureBufferWithModifiedTransformMatrix(
(TextureBufferImpl) frame.getBuffer(),
/* mirror= */ info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT, /* mirror= */ info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT,
/* rotation= */ 0), /* rotation= */ 0),
/* rotation= */ getFrameOrientation(), timestampNs); /* rotation= */ getFrameOrientation(), frame.getTimestampNs());
buffer.release(); events.onFrameCaptured(Camera1Session.this, modifiedFrame);
events.onFrameCaptured(Camera1Session.this, frame); modifiedFrame.release();
frame.release();
}
}); });
} }

View File

@ -121,9 +121,8 @@ class Camera2Session implements CameraSession {
Logging.d(TAG, "Camera opened."); Logging.d(TAG, "Camera opened.");
cameraDevice = camera; cameraDevice = camera;
final SurfaceTexture surfaceTexture = surfaceTextureHelper.getSurfaceTexture(); surfaceTextureHelper.setTextureSize(captureFormat.width, captureFormat.height);
surfaceTexture.setDefaultBufferSize(captureFormat.width, captureFormat.height); surface = new Surface(surfaceTextureHelper.getSurfaceTexture());
surface = new Surface(surfaceTexture);
try { try {
camera.createCaptureSession( camera.createCaptureSession(
Arrays.asList(surface), new CaptureSessionCallback(), cameraThreadHandler); Arrays.asList(surface), new CaptureSessionCallback(), cameraThreadHandler);
@ -183,20 +182,11 @@ class Camera2Session implements CameraSession {
return; return;
} }
surfaceTextureHelper.startListening( surfaceTextureHelper.startListening((VideoFrame frame) -> {
new SurfaceTextureHelper.OnTextureFrameAvailableListener() {
@Override
public void onTextureFrameAvailable(
int oesTextureId, float[] transformMatrix, long timestampNs) {
checkIsOnCameraThread(); checkIsOnCameraThread();
final TextureBufferImpl buffer = surfaceTextureHelper.createTextureBuffer(
captureFormat.width, captureFormat.height,
RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix));
if (state != SessionState.RUNNING) { if (state != SessionState.RUNNING) {
Logging.d(TAG, "Texture frame captured but camera is no longer running."); Logging.d(TAG, "Texture frame captured but camera is no longer running.");
buffer.release();
return; return;
} }
@ -210,15 +200,14 @@ class Camera2Session implements CameraSession {
// Undo the mirror that the OS "helps" us with. // Undo the mirror that the OS "helps" us with.
// http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int) // http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int)
// Also, undo camera orientation, we report it as rotation instead. // Also, undo camera orientation, we report it as rotation instead.
final VideoFrame frame = new VideoFrame( final VideoFrame modifiedFrame =
CameraSession.createTextureBufferWithModifiedTransformMatrix(buffer, new VideoFrame(CameraSession.createTextureBufferWithModifiedTransformMatrix(
(TextureBufferImpl) frame.getBuffer(),
/* mirror= */ isCameraFrontFacing, /* mirror= */ isCameraFrontFacing,
/* rotation= */ -cameraOrientation), /* rotation= */ -cameraOrientation),
/* rotation= */ getFrameOrientation(), timestampNs); /* rotation= */ getFrameOrientation(), frame.getTimestampNs());
buffer.release(); events.onFrameCaptured(Camera2Session.this, modifiedFrame);
events.onFrameCaptured(Camera2Session.this, frame); modifiedFrame.release();
frame.release();
}
}); });
Logging.d(TAG, "Camera device successfully started."); Logging.d(TAG, "Camera device successfully started.");
callback.onDone(Camera2Session.this); callback.onDone(Camera2Session.this);

View File

@ -27,8 +27,7 @@ import org.webrtc.ThreadUtils.ThreadChecker;
/** Android hardware video decoder. */ /** Android hardware video decoder. */
@TargetApi(16) @TargetApi(16)
@SuppressWarnings("deprecation") // Cannot support API 16 without using deprecated methods. @SuppressWarnings("deprecation") // Cannot support API 16 without using deprecated methods.
class HardwareVideoDecoder class HardwareVideoDecoder implements VideoDecoder, VideoSink {
implements VideoDecoder, SurfaceTextureHelper.OnTextureFrameAvailableListener {
private static final String TAG = "HardwareVideoDecoder"; private static final String TAG = "HardwareVideoDecoder";
// TODO(magjed): Use MediaFormat.KEY_* constants when part of the public API. // TODO(magjed): Use MediaFormat.KEY_* constants when part of the public API.
@ -106,17 +105,10 @@ class HardwareVideoDecoder
@Nullable private Surface surface = null; @Nullable private Surface surface = null;
private static class DecodedTextureMetadata { private static class DecodedTextureMetadata {
final int width;
final int height;
final int rotation;
final long presentationTimestampUs; final long presentationTimestampUs;
final Integer decodeTimeMs; final Integer decodeTimeMs;
DecodedTextureMetadata( DecodedTextureMetadata(long presentationTimestampUs, Integer decodeTimeMs) {
int width, int height, int rotation, long presentationTimestampUs, Integer decodeTimeMs) {
this.width = width;
this.height = height;
this.rotation = rotation;
this.presentationTimestampUs = presentationTimestampUs; this.presentationTimestampUs = presentationTimestampUs;
this.decodeTimeMs = decodeTimeMs; this.decodeTimeMs = decodeTimeMs;
} }
@ -223,7 +215,8 @@ class HardwareVideoDecoder
} }
// Load dimensions from shared memory under the dimension lock. // Load dimensions from shared memory under the dimension lock.
int width, height; final int width;
final int height;
synchronized (dimensionLock) { synchronized (dimensionLock) {
width = this.width; width = this.width;
height = this.height; height = this.height;
@ -418,7 +411,8 @@ class HardwareVideoDecoder
private void deliverTextureFrame(final int index, final MediaCodec.BufferInfo info, private void deliverTextureFrame(final int index, final MediaCodec.BufferInfo info,
final int rotation, final Integer decodeTimeMs) { final int rotation, final Integer decodeTimeMs) {
// Load dimensions from shared memory under the dimension lock. // Load dimensions from shared memory under the dimension lock.
final int width, height; final int width;
final int height;
synchronized (dimensionLock) { synchronized (dimensionLock) {
width = this.width; width = this.width;
height = this.height; height = this.height;
@ -428,32 +422,31 @@ class HardwareVideoDecoder
if (renderedTextureMetadata != null) { if (renderedTextureMetadata != null) {
return; // We are still waiting for texture for the previous frame, drop this one. return; // We are still waiting for texture for the previous frame, drop this one.
} }
renderedTextureMetadata = new DecodedTextureMetadata( surfaceTextureHelper.setTextureSize(width, height);
width, height, rotation, info.presentationTimeUs, decodeTimeMs); surfaceTextureHelper.setFrameRotation(rotation);
codec.releaseOutputBuffer(index, true); renderedTextureMetadata = new DecodedTextureMetadata(info.presentationTimeUs, decodeTimeMs);
codec.releaseOutputBuffer(index, /* render= */ true);
} }
} }
@Override @Override
public void onTextureFrameAvailable(int oesTextureId, float[] transformMatrix, long timestampNs) { public void onFrame(VideoFrame frame) {
final VideoFrame frame; final VideoFrame newFrame;
final int decodeTimeMs; final int decodeTimeMs;
final long timestampNs;
synchronized (renderedTextureMetadataLock) { synchronized (renderedTextureMetadataLock) {
if (renderedTextureMetadata == null) { if (renderedTextureMetadata == null) {
throw new IllegalStateException( throw new IllegalStateException(
"Rendered texture metadata was null in onTextureFrameAvailable."); "Rendered texture metadata was null in onTextureFrameAvailable.");
} }
VideoFrame.TextureBuffer oesBuffer = surfaceTextureHelper.createTextureBuffer( timestampNs = renderedTextureMetadata.presentationTimestampUs * 1000;
renderedTextureMetadata.width, renderedTextureMetadata.height,
RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix));
frame = new VideoFrame(oesBuffer, renderedTextureMetadata.rotation,
renderedTextureMetadata.presentationTimestampUs * 1000);
decodeTimeMs = renderedTextureMetadata.decodeTimeMs; decodeTimeMs = renderedTextureMetadata.decodeTimeMs;
renderedTextureMetadata = null; renderedTextureMetadata = null;
} }
// Change timestamp of frame.
callback.onDecodedFrame(frame, decodeTimeMs, null /* qp */); final VideoFrame frameWithModifiedTimeStamp =
frame.release(); new VideoFrame(frame.getBuffer(), frame.getRotation(), timestampNs);
callback.onDecodedFrame(frameWithModifiedTimeStamp, decodeTimeMs, null /* qp */);
} }
private void deliverByteFrame( private void deliverByteFrame(
@ -493,7 +486,7 @@ class HardwareVideoDecoder
// All other supported color formats are NV12. // All other supported color formats are NV12.
frameBuffer = copyNV12ToI420Buffer(buffer, stride, sliceHeight, width, height); frameBuffer = copyNV12ToI420Buffer(buffer, stride, sliceHeight, width, height);
} }
codec.releaseOutputBuffer(result, false); codec.releaseOutputBuffer(result, /* render= */ false);
long presentationTimeNs = info.presentationTimeUs * 1000; long presentationTimeNs = info.presentationTimeUs * 1000;
VideoFrame frame = new VideoFrame(frameBuffer, rotation, presentationTimeNs); VideoFrame frame = new VideoFrame(frameBuffer, rotation, presentationTimeNs);