Android camera: Replace custom matrix operations with android.opengl.Matrix

Bug: webrtc:9412, webrtc:9487
Change-Id: I68e5a03026b1ab8236a05ece79690d4a8755c093
Reviewed-on: https://webrtc-review.googlesource.com/86947
Commit-Queue: Magnus Jedvert <magjed@webrtc.org>
Reviewed-by: Sami Kalliomäki <sakal@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#23851}
This commit is contained in:
Magnus Jedvert
2018-07-05 13:34:17 +02:00
committed by Commit Bot
parent 918f50c5d1
commit 783c6e3a72
6 changed files with 86 additions and 85 deletions

View File

@ -287,7 +287,7 @@ public class SurfaceTextureHelper {
* The returned TextureBuffer holds a reference to the SurfaceTextureHelper that created it. The * The returned TextureBuffer holds a reference to the SurfaceTextureHelper that created it. The
* buffer calls returnTextureFrame() when it is released. * buffer calls returnTextureFrame() when it is released.
*/ */
public TextureBuffer createTextureBuffer(int width, int height, Matrix transformMatrix) { public TextureBufferImpl createTextureBuffer(int width, int height, Matrix transformMatrix) {
return new TextureBufferImpl(width, height, TextureBuffer.Type.OES, oesTextureId, return new TextureBufferImpl(width, height, TextureBuffer.Type.OES, oesTextureId,
transformMatrix, handler, yuvConverter, this ::returnTextureFrame); transformMatrix, handler, yuvConverter, this ::returnTextureFrame);
} }

View File

@ -84,15 +84,27 @@ public class TextureBufferImpl implements VideoFrame.TextureBuffer {
@Override @Override
public VideoFrame.Buffer cropAndScale( public VideoFrame.Buffer cropAndScale(
int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) { int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
final Matrix newMatrix = new Matrix(transformMatrix); final Matrix cropAndScaleMatrix = new Matrix();
// In WebRTC, Y=0 is the top row, while in OpenGL Y=0 is the bottom row. This means that the Y // In WebRTC, Y=0 is the top row, while in OpenGL Y=0 is the bottom row. This means that the Y
// direction is effectively reversed. // direction is effectively reversed.
final int cropYFromBottom = height - (cropY + cropHeight); final int cropYFromBottom = height - (cropY + cropHeight);
newMatrix.preTranslate(cropX / (float) width, cropYFromBottom / (float) height); cropAndScaleMatrix.preTranslate(cropX / (float) width, cropYFromBottom / (float) height);
newMatrix.preScale(cropWidth / (float) width, cropHeight / (float) height); cropAndScaleMatrix.preScale(cropWidth / (float) width, cropHeight / (float) height);
return applyTransformMatrix(cropAndScaleMatrix, scaleWidth, scaleHeight);
}
/**
* Create a new TextureBufferImpl with an applied transform matrix and a new size. The
* existing buffer is unchanged. The given transform matrix is applied first when texture
* coordinates are still in the unmodified [0, 1] range.
*/
public TextureBufferImpl applyTransformMatrix(
Matrix transformMatrix, int newWidth, int newHeight) {
final Matrix newMatrix = new Matrix(this.transformMatrix);
newMatrix.preConcat(transformMatrix);
retain(); retain();
return new TextureBufferImpl( return new TextureBufferImpl(
scaleWidth, scaleHeight, type, id, newMatrix, toI420Handler, yuvConverter, this ::release); newWidth, newHeight, type, id, newMatrix, toI420Handler, yuvConverter, this ::release);
} }
} }

View File

@ -118,7 +118,7 @@ public class VideoFrame implements RefCounted {
* homogeneous coordinates of the form (s, t, 1) with s and t in the inclusive range [0, 1] to * homogeneous coordinates of the form (s, t, 1) with s and t in the inclusive range [0, 1] to
* the coordinate that should be used to sample that location from the buffer. * the coordinate that should be used to sample that location from the buffer.
*/ */
public Matrix getTransformMatrix(); Matrix getTransformMatrix();
} }
private final Buffer buffer; private final Buffer buffer;

View File

@ -14,13 +14,13 @@ import android.content.Context;
import android.os.Handler; import android.os.Handler;
import android.os.SystemClock; import android.os.SystemClock;
import android.view.Surface; import android.view.Surface;
import android.view.WindowManager;
import java.io.IOException; import java.io.IOException;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.List; import java.util.List;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import javax.annotation.Nullable; import javax.annotation.Nullable;
import org.webrtc.CameraEnumerationAndroid.CaptureFormat; import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
import android.graphics.Matrix;
@SuppressWarnings("deprecation") @SuppressWarnings("deprecation")
class Camera1Session implements CameraSession { class Camera1Session implements CameraSession {
@ -253,9 +253,13 @@ class Camera1Session implements CameraSession {
int oesTextureId, float[] transformMatrix, long timestampNs) { int oesTextureId, float[] transformMatrix, long timestampNs) {
checkIsOnCameraThread(); checkIsOnCameraThread();
final TextureBufferImpl buffer =
surfaceTextureHelper.createTextureBuffer(captureFormat.width, captureFormat.height,
RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix));
if (state != SessionState.RUNNING) { if (state != SessionState.RUNNING) {
Logging.d(TAG, "Texture frame captured but camera is no longer running."); Logging.d(TAG, "Texture frame captured but camera is no longer running.");
surfaceTextureHelper.returnTextureFrame(); buffer.release();
return; return;
} }
@ -266,17 +270,14 @@ class Camera1Session implements CameraSession {
firstFrameReported = true; firstFrameReported = true;
} }
int rotation = getFrameOrientation(); // Undo the mirror that the OS "helps" us with.
if (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT) { // http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int)
// Undo the mirror that the OS "helps" us with. final VideoFrame frame = new VideoFrame(
// http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int) CameraSession.createTextureBufferWithModifiedTransformMatrix(buffer,
transformMatrix = RendererCommon.multiplyMatrices( /* mirror= */ info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT,
transformMatrix, RendererCommon.horizontalFlipMatrix()); /* rotation= */ 0),
} /* rotation= */ getFrameOrientation(), timestampNs);
final VideoFrame.Buffer buffer = buffer.release();
surfaceTextureHelper.createTextureBuffer(captureFormat.width, captureFormat.height,
RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix));
final VideoFrame frame = new VideoFrame(buffer, rotation, timestampNs);
events.onFrameCaptured(Camera1Session.this, frame); events.onFrameCaptured(Camera1Session.this, frame);
frame.release(); frame.release();
} }
@ -321,30 +322,8 @@ class Camera1Session implements CameraSession {
}); });
} }
private int getDeviceOrientation() {
int orientation = 0;
WindowManager wm = (WindowManager) applicationContext.getSystemService(Context.WINDOW_SERVICE);
switch (wm.getDefaultDisplay().getRotation()) {
case Surface.ROTATION_90:
orientation = 90;
break;
case Surface.ROTATION_180:
orientation = 180;
break;
case Surface.ROTATION_270:
orientation = 270;
break;
case Surface.ROTATION_0:
default:
orientation = 0;
break;
}
return orientation;
}
private int getFrameOrientation() { private int getFrameOrientation() {
int rotation = getDeviceOrientation(); int rotation = CameraSession.getDeviceOrientation(applicationContext);
if (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK) { if (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK) {
rotation = 360 - rotation; rotation = 360 - rotation;
} }

View File

@ -26,7 +26,6 @@ import android.os.Handler;
import javax.annotation.Nullable; import javax.annotation.Nullable;
import android.util.Range; import android.util.Range;
import android.view.Surface; import android.view.Surface;
import android.view.WindowManager;
import java.util.Arrays; import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
@ -191,9 +190,13 @@ class Camera2Session implements CameraSession {
int oesTextureId, float[] transformMatrix, long timestampNs) { int oesTextureId, float[] transformMatrix, long timestampNs) {
checkIsOnCameraThread(); checkIsOnCameraThread();
final TextureBufferImpl buffer = surfaceTextureHelper.createTextureBuffer(
captureFormat.width, captureFormat.height,
RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix));
if (state != SessionState.RUNNING) { if (state != SessionState.RUNNING) {
Logging.d(TAG, "Texture frame captured but camera is no longer running."); Logging.d(TAG, "Texture frame captured but camera is no longer running.");
surfaceTextureHelper.returnTextureFrame(); buffer.release();
return; return;
} }
@ -204,22 +207,15 @@ class Camera2Session implements CameraSession {
camera2StartTimeMsHistogram.addSample(startTimeMs); camera2StartTimeMsHistogram.addSample(startTimeMs);
} }
int rotation = getFrameOrientation(); // Undo the mirror that the OS "helps" us with.
if (isCameraFrontFacing) { // http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int)
// Undo the mirror that the OS "helps" us with. // Also, undo camera orientation, we report it as rotation instead.
// http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int) final VideoFrame frame = new VideoFrame(
transformMatrix = RendererCommon.multiplyMatrices( CameraSession.createTextureBufferWithModifiedTransformMatrix(buffer,
transformMatrix, RendererCommon.horizontalFlipMatrix()); /* mirror= */ isCameraFrontFacing,
} /* rotation= */ -cameraOrientation),
/* rotation= */ getFrameOrientation(), timestampNs);
// Undo camera orientation - we report it as rotation instead. buffer.release();
transformMatrix =
RendererCommon.rotateTextureMatrix(transformMatrix, -cameraOrientation);
VideoFrame.Buffer buffer = surfaceTextureHelper.createTextureBuffer(
captureFormat.width, captureFormat.height,
RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix));
final VideoFrame frame = new VideoFrame(buffer, rotation, timestampNs);
events.onFrameCaptured(Camera2Session.this, frame); events.onFrameCaptured(Camera2Session.this, frame);
frame.release(); frame.release();
} }
@ -421,30 +417,8 @@ class Camera2Session implements CameraSession {
} }
} }
private int getDeviceOrientation() {
int orientation = 0;
WindowManager wm = (WindowManager) applicationContext.getSystemService(Context.WINDOW_SERVICE);
switch (wm.getDefaultDisplay().getRotation()) {
case Surface.ROTATION_90:
orientation = 90;
break;
case Surface.ROTATION_180:
orientation = 180;
break;
case Surface.ROTATION_270:
orientation = 270;
break;
case Surface.ROTATION_0:
default:
orientation = 0;
break;
}
return orientation;
}
private int getFrameOrientation() { private int getFrameOrientation() {
int rotation = getDeviceOrientation(); int rotation = CameraSession.getDeviceOrientation(applicationContext);
if (!isCameraFrontFacing) { if (!isCameraFrontFacing) {
rotation = 360 - rotation; rotation = 360 - rotation;
} }

View File

@ -10,17 +10,22 @@
package org.webrtc; package org.webrtc;
import android.content.Context;
import android.graphics.Matrix;
import android.view.WindowManager;
import android.view.Surface;
interface CameraSession { interface CameraSession {
enum FailureType { ERROR, DISCONNECTED } enum FailureType { ERROR, DISCONNECTED }
// Callbacks are fired on the camera thread. // Callbacks are fired on the camera thread.
public interface CreateSessionCallback { interface CreateSessionCallback {
void onDone(CameraSession session); void onDone(CameraSession session);
void onFailure(FailureType failureType, String error); void onFailure(FailureType failureType, String error);
} }
// Events are fired on the camera thread. // Events are fired on the camera thread.
public interface Events { interface Events {
void onCameraOpening(); void onCameraOpening();
void onCameraError(CameraSession session, String error); void onCameraError(CameraSession session, String error);
void onCameraDisconnected(CameraSession session); void onCameraDisconnected(CameraSession session);
@ -33,4 +38,35 @@ interface CameraSession {
* If waitCameraStop is true, also waits for the camera to stop. * If waitCameraStop is true, also waits for the camera to stop.
*/ */
void stop(); void stop();
static int getDeviceOrientation(Context context) {
final WindowManager wm = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
switch (wm.getDefaultDisplay().getRotation()) {
case Surface.ROTATION_90:
return 90;
case Surface.ROTATION_180:
return 180;
case Surface.ROTATION_270:
return 270;
case Surface.ROTATION_0:
default:
return 0;
}
}
static VideoFrame.TextureBuffer createTextureBufferWithModifiedTransformMatrix(
TextureBufferImpl buffer, boolean mirror, int rotation) {
final Matrix transformMatrix = new Matrix();
// Perform mirror and rotation around (0.5, 0.5) since that is the center of the texture.
transformMatrix.preTranslate(/* dx= */ 0.5f, /* dy= */ 0.5f);
if (mirror) {
transformMatrix.preScale(/* sx= */ -1f, /* sy= */ 1f);
}
transformMatrix.preRotate(rotation);
transformMatrix.preTranslate(/* dx= */ -0.5f, /* dy= */ -0.5f);
// The width and height are not affected by rotation since Camera2Session has set them to the
// value they should be after undoing the rotation.
return buffer.applyTransformMatrix(transformMatrix, buffer.getWidth(), buffer.getHeight());
}
} }