Android: Output VideoFrames from SurfaceTextureHelper

Bug: webrtc:9412
Change-Id: Iffc8dae2fdfb8d7e5c730b433614b7aa30ceb55b
Reviewed-on: https://webrtc-review.googlesource.com/83943
Reviewed-by: Sami Kalliomäki <sakal@webrtc.org>
Commit-Queue: Magnus Jedvert <magjed@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#23867}
This commit is contained in:
Magnus Jedvert
2018-07-06 11:15:13 +02:00
committed by Commit Bot
parent 419b6dc691
commit 80e7a7fd1a
9 changed files with 195 additions and 162 deletions

View File

@ -174,6 +174,8 @@ class Camera1Session implements CameraSession {
this.captureFormat = captureFormat;
this.constructionTimeNs = constructionTimeNs;
surfaceTextureHelper.setTextureSize(captureFormat.width, captureFormat.height);
startCapturing();
}
@ -247,40 +249,31 @@ class Camera1Session implements CameraSession {
}
private void listenForTextureFrames() {
surfaceTextureHelper.startListening(new SurfaceTextureHelper.OnTextureFrameAvailableListener() {
@Override
public void onTextureFrameAvailable(
int oesTextureId, float[] transformMatrix, long timestampNs) {
checkIsOnCameraThread();
surfaceTextureHelper.startListening((VideoFrame frame) -> {
checkIsOnCameraThread();
final TextureBufferImpl buffer =
surfaceTextureHelper.createTextureBuffer(captureFormat.width, captureFormat.height,
RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix));
if (state != SessionState.RUNNING) {
Logging.d(TAG, "Texture frame captured but camera is no longer running.");
buffer.release();
return;
}
if (!firstFrameReported) {
final int startTimeMs =
(int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs);
camera1StartTimeMsHistogram.addSample(startTimeMs);
firstFrameReported = true;
}
// Undo the mirror that the OS "helps" us with.
// http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int)
final VideoFrame frame = new VideoFrame(
CameraSession.createTextureBufferWithModifiedTransformMatrix(buffer,
/* mirror= */ info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT,
/* rotation= */ 0),
/* rotation= */ getFrameOrientation(), timestampNs);
buffer.release();
events.onFrameCaptured(Camera1Session.this, frame);
frame.release();
if (state != SessionState.RUNNING) {
Logging.d(TAG, "Texture frame captured but camera is no longer running.");
return;
}
if (!firstFrameReported) {
final int startTimeMs =
(int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs);
camera1StartTimeMsHistogram.addSample(startTimeMs);
firstFrameReported = true;
}
// Undo the mirror that the OS "helps" us with.
// http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int)
final VideoFrame modifiedFrame = new VideoFrame(
CameraSession.createTextureBufferWithModifiedTransformMatrix(
(TextureBufferImpl) frame.getBuffer(),
/* mirror= */ info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT,
/* rotation= */ 0),
/* rotation= */ getFrameOrientation(), frame.getTimestampNs());
events.onFrameCaptured(Camera1Session.this, modifiedFrame);
modifiedFrame.release();
});
}

View File

@ -121,9 +121,8 @@ class Camera2Session implements CameraSession {
Logging.d(TAG, "Camera opened.");
cameraDevice = camera;
final SurfaceTexture surfaceTexture = surfaceTextureHelper.getSurfaceTexture();
surfaceTexture.setDefaultBufferSize(captureFormat.width, captureFormat.height);
surface = new Surface(surfaceTexture);
surfaceTextureHelper.setTextureSize(captureFormat.width, captureFormat.height);
surface = new Surface(surfaceTextureHelper.getSurfaceTexture());
try {
camera.createCaptureSession(
Arrays.asList(surface), new CaptureSessionCallback(), cameraThreadHandler);
@ -183,43 +182,33 @@ class Camera2Session implements CameraSession {
return;
}
surfaceTextureHelper.startListening(
new SurfaceTextureHelper.OnTextureFrameAvailableListener() {
@Override
public void onTextureFrameAvailable(
int oesTextureId, float[] transformMatrix, long timestampNs) {
checkIsOnCameraThread();
surfaceTextureHelper.startListening((VideoFrame frame) -> {
checkIsOnCameraThread();
final TextureBufferImpl buffer = surfaceTextureHelper.createTextureBuffer(
captureFormat.width, captureFormat.height,
RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix));
if (state != SessionState.RUNNING) {
Logging.d(TAG, "Texture frame captured but camera is no longer running.");
return;
}
if (state != SessionState.RUNNING) {
Logging.d(TAG, "Texture frame captured but camera is no longer running.");
buffer.release();
return;
}
if (!firstFrameReported) {
firstFrameReported = true;
final int startTimeMs =
(int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs);
camera2StartTimeMsHistogram.addSample(startTimeMs);
}
if (!firstFrameReported) {
firstFrameReported = true;
final int startTimeMs =
(int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs);
camera2StartTimeMsHistogram.addSample(startTimeMs);
}
// Undo the mirror that the OS "helps" us with.
// http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int)
// Also, undo camera orientation, we report it as rotation instead.
final VideoFrame frame = new VideoFrame(
CameraSession.createTextureBufferWithModifiedTransformMatrix(buffer,
/* mirror= */ isCameraFrontFacing,
/* rotation= */ -cameraOrientation),
/* rotation= */ getFrameOrientation(), timestampNs);
buffer.release();
events.onFrameCaptured(Camera2Session.this, frame);
frame.release();
}
});
// Undo the mirror that the OS "helps" us with.
// http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int)
// Also, undo camera orientation, we report it as rotation instead.
final VideoFrame modifiedFrame =
new VideoFrame(CameraSession.createTextureBufferWithModifiedTransformMatrix(
(TextureBufferImpl) frame.getBuffer(),
/* mirror= */ isCameraFrontFacing,
/* rotation= */ -cameraOrientation),
/* rotation= */ getFrameOrientation(), frame.getTimestampNs());
events.onFrameCaptured(Camera2Session.this, modifiedFrame);
modifiedFrame.release();
});
Logging.d(TAG, "Camera device successfully started.");
callback.onDone(Camera2Session.this);
}

View File

@ -27,8 +27,7 @@ import org.webrtc.ThreadUtils.ThreadChecker;
/** Android hardware video decoder. */
@TargetApi(16)
@SuppressWarnings("deprecation") // Cannot support API 16 without using deprecated methods.
class HardwareVideoDecoder
implements VideoDecoder, SurfaceTextureHelper.OnTextureFrameAvailableListener {
class HardwareVideoDecoder implements VideoDecoder, VideoSink {
private static final String TAG = "HardwareVideoDecoder";
// TODO(magjed): Use MediaFormat.KEY_* constants when part of the public API.
@ -106,17 +105,10 @@ class HardwareVideoDecoder
@Nullable private Surface surface = null;
private static class DecodedTextureMetadata {
final int width;
final int height;
final int rotation;
final long presentationTimestampUs;
final Integer decodeTimeMs;
DecodedTextureMetadata(
int width, int height, int rotation, long presentationTimestampUs, Integer decodeTimeMs) {
this.width = width;
this.height = height;
this.rotation = rotation;
DecodedTextureMetadata(long presentationTimestampUs, Integer decodeTimeMs) {
this.presentationTimestampUs = presentationTimestampUs;
this.decodeTimeMs = decodeTimeMs;
}
@ -223,7 +215,8 @@ class HardwareVideoDecoder
}
// Load dimensions from shared memory under the dimension lock.
int width, height;
final int width;
final int height;
synchronized (dimensionLock) {
width = this.width;
height = this.height;
@ -418,7 +411,8 @@ class HardwareVideoDecoder
private void deliverTextureFrame(final int index, final MediaCodec.BufferInfo info,
final int rotation, final Integer decodeTimeMs) {
// Load dimensions from shared memory under the dimension lock.
final int width, height;
final int width;
final int height;
synchronized (dimensionLock) {
width = this.width;
height = this.height;
@ -428,32 +422,31 @@ class HardwareVideoDecoder
if (renderedTextureMetadata != null) {
return; // We are still waiting for texture for the previous frame, drop this one.
}
renderedTextureMetadata = new DecodedTextureMetadata(
width, height, rotation, info.presentationTimeUs, decodeTimeMs);
codec.releaseOutputBuffer(index, true);
surfaceTextureHelper.setTextureSize(width, height);
surfaceTextureHelper.setFrameRotation(rotation);
renderedTextureMetadata = new DecodedTextureMetadata(info.presentationTimeUs, decodeTimeMs);
codec.releaseOutputBuffer(index, /* render= */ true);
}
}
@Override
public void onTextureFrameAvailable(int oesTextureId, float[] transformMatrix, long timestampNs) {
final VideoFrame frame;
public void onFrame(VideoFrame frame) {
final VideoFrame newFrame;
final int decodeTimeMs;
final long timestampNs;
synchronized (renderedTextureMetadataLock) {
if (renderedTextureMetadata == null) {
throw new IllegalStateException(
"Rendered texture metadata was null in onTextureFrameAvailable.");
}
VideoFrame.TextureBuffer oesBuffer = surfaceTextureHelper.createTextureBuffer(
renderedTextureMetadata.width, renderedTextureMetadata.height,
RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix));
frame = new VideoFrame(oesBuffer, renderedTextureMetadata.rotation,
renderedTextureMetadata.presentationTimestampUs * 1000);
timestampNs = renderedTextureMetadata.presentationTimestampUs * 1000;
decodeTimeMs = renderedTextureMetadata.decodeTimeMs;
renderedTextureMetadata = null;
}
callback.onDecodedFrame(frame, decodeTimeMs, null /* qp */);
frame.release();
// Change timestamp of frame.
final VideoFrame frameWithModifiedTimeStamp =
new VideoFrame(frame.getBuffer(), frame.getRotation(), timestampNs);
callback.onDecodedFrame(frameWithModifiedTimeStamp, decodeTimeMs, null /* qp */);
}
private void deliverByteFrame(
@ -493,7 +486,7 @@ class HardwareVideoDecoder
// All other supported color formats are NV12.
frameBuffer = copyNV12ToI420Buffer(buffer, stride, sliceHeight, width, height);
}
codec.releaseOutputBuffer(result, false);
codec.releaseOutputBuffer(result, /* render= */ false);
long presentationTimeNs = info.presentationTimeUs * 1000;
VideoFrame frame = new VideoFrame(frameBuffer, rotation, presentationTimeNs);