Move camera implementation details away from the public API.
Moves CameraCapturer, CameraSession, Camera1Session and Camera2Session away from the public API. BUG=webrtc:7172 Review-Url: https://codereview.webrtc.org/2699713004 Cr-Commit-Position: refs/heads/master@{#16723}
This commit is contained in:
336
webrtc/sdk/android/src/java/org/webrtc/Camera1Session.java
Normal file
336
webrtc/sdk/android/src/java/org/webrtc/Camera1Session.java
Normal file
@ -0,0 +1,336 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.content.Context;
|
||||
import android.os.Handler;
|
||||
import android.os.SystemClock;
|
||||
import android.view.Surface;
|
||||
import android.view.WindowManager;
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
class Camera1Session implements CameraSession {
|
||||
private static final String TAG = "Camera1Session";
|
||||
private static final int NUMBER_OF_CAPTURE_BUFFERS = 3;
|
||||
|
||||
private static final Histogram camera1StartTimeMsHistogram =
|
||||
Histogram.createCounts("WebRTC.Android.Camera1.StartTimeMs", 1, 10000, 50);
|
||||
private static final Histogram camera1StopTimeMsHistogram =
|
||||
Histogram.createCounts("WebRTC.Android.Camera1.StopTimeMs", 1, 10000, 50);
|
||||
private static final Histogram camera1ResolutionHistogram = Histogram.createEnumeration(
|
||||
"WebRTC.Android.Camera1.Resolution", CameraEnumerationAndroid.COMMON_RESOLUTIONS.size());
|
||||
|
||||
private static enum SessionState { RUNNING, STOPPED }
|
||||
|
||||
private final Handler cameraThreadHandler;
|
||||
private final Events events;
|
||||
private final boolean captureToTexture;
|
||||
private final Context applicationContext;
|
||||
private final SurfaceTextureHelper surfaceTextureHelper;
|
||||
private final int cameraId;
|
||||
private final android.hardware.Camera camera;
|
||||
private final android.hardware.Camera.CameraInfo info;
|
||||
private final CaptureFormat captureFormat;
|
||||
// Used only for stats. Only used on the camera thread.
|
||||
private final long constructionTimeNs; // Construction time of this class.
|
||||
|
||||
private SessionState state;
|
||||
private boolean firstFrameReported = false;
|
||||
|
||||
public static void create(final CreateSessionCallback callback, final Events events,
|
||||
final boolean captureToTexture, final Context applicationContext,
|
||||
final SurfaceTextureHelper surfaceTextureHelper, final int cameraId, final int width,
|
||||
final int height, final int framerate) {
|
||||
final long constructionTimeNs = System.nanoTime();
|
||||
Logging.d(TAG, "Open camera " + cameraId);
|
||||
events.onCameraOpening();
|
||||
|
||||
final android.hardware.Camera camera;
|
||||
try {
|
||||
camera = android.hardware.Camera.open(cameraId);
|
||||
} catch (RuntimeException e) {
|
||||
callback.onFailure(FailureType.ERROR, e.getMessage());
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
camera.setPreviewTexture(surfaceTextureHelper.getSurfaceTexture());
|
||||
} catch (IOException e) {
|
||||
camera.release();
|
||||
callback.onFailure(FailureType.ERROR, e.getMessage());
|
||||
return;
|
||||
}
|
||||
|
||||
final android.hardware.Camera.CameraInfo info = new android.hardware.Camera.CameraInfo();
|
||||
android.hardware.Camera.getCameraInfo(cameraId, info);
|
||||
|
||||
final android.hardware.Camera.Parameters parameters = camera.getParameters();
|
||||
final CaptureFormat captureFormat =
|
||||
findClosestCaptureFormat(parameters, width, height, framerate);
|
||||
final Size pictureSize = findClosestPictureSize(parameters, width, height);
|
||||
|
||||
updateCameraParameters(camera, parameters, captureFormat, pictureSize, captureToTexture);
|
||||
|
||||
// Initialize the capture buffers.
|
||||
if (!captureToTexture) {
|
||||
final int frameSize = captureFormat.frameSize();
|
||||
for (int i = 0; i < NUMBER_OF_CAPTURE_BUFFERS; ++i) {
|
||||
final ByteBuffer buffer = ByteBuffer.allocateDirect(frameSize);
|
||||
camera.addCallbackBuffer(buffer.array());
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate orientation manually and send it as CVO insted.
|
||||
camera.setDisplayOrientation(0 /* degrees */);
|
||||
|
||||
callback.onDone(new Camera1Session(events, captureToTexture, applicationContext,
|
||||
surfaceTextureHelper, cameraId, camera, info, captureFormat, constructionTimeNs));
|
||||
}
|
||||
|
||||
private static void updateCameraParameters(android.hardware.Camera camera,
|
||||
android.hardware.Camera.Parameters parameters, CaptureFormat captureFormat, Size pictureSize,
|
||||
boolean captureToTexture) {
|
||||
final List<String> focusModes = parameters.getSupportedFocusModes();
|
||||
|
||||
parameters.setPreviewFpsRange(captureFormat.framerate.min, captureFormat.framerate.max);
|
||||
parameters.setPreviewSize(captureFormat.width, captureFormat.height);
|
||||
parameters.setPictureSize(pictureSize.width, pictureSize.height);
|
||||
if (!captureToTexture) {
|
||||
parameters.setPreviewFormat(captureFormat.imageFormat);
|
||||
}
|
||||
|
||||
if (parameters.isVideoStabilizationSupported()) {
|
||||
parameters.setVideoStabilization(true);
|
||||
}
|
||||
if (focusModes.contains(android.hardware.Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
|
||||
parameters.setFocusMode(android.hardware.Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
|
||||
}
|
||||
camera.setParameters(parameters);
|
||||
}
|
||||
|
||||
private static CaptureFormat findClosestCaptureFormat(
|
||||
android.hardware.Camera.Parameters parameters, int width, int height, int framerate) {
|
||||
// Find closest supported format for |width| x |height| @ |framerate|.
|
||||
final List<CaptureFormat.FramerateRange> supportedFramerates =
|
||||
Camera1Enumerator.convertFramerates(parameters.getSupportedPreviewFpsRange());
|
||||
Logging.d(TAG, "Available fps ranges: " + supportedFramerates);
|
||||
|
||||
final CaptureFormat.FramerateRange fpsRange =
|
||||
CameraEnumerationAndroid.getClosestSupportedFramerateRange(supportedFramerates, framerate);
|
||||
|
||||
final Size previewSize = CameraEnumerationAndroid.getClosestSupportedSize(
|
||||
Camera1Enumerator.convertSizes(parameters.getSupportedPreviewSizes()), width, height);
|
||||
CameraEnumerationAndroid.reportCameraResolution(camera1ResolutionHistogram, previewSize);
|
||||
|
||||
return new CaptureFormat(previewSize.width, previewSize.height, fpsRange);
|
||||
}
|
||||
|
||||
private static Size findClosestPictureSize(
|
||||
android.hardware.Camera.Parameters parameters, int width, int height) {
|
||||
return CameraEnumerationAndroid.getClosestSupportedSize(
|
||||
Camera1Enumerator.convertSizes(parameters.getSupportedPictureSizes()), width, height);
|
||||
}
|
||||
|
||||
private Camera1Session(Events events, boolean captureToTexture, Context applicationContext,
|
||||
SurfaceTextureHelper surfaceTextureHelper, int cameraId, android.hardware.Camera camera,
|
||||
android.hardware.Camera.CameraInfo info, CaptureFormat captureFormat,
|
||||
long constructionTimeNs) {
|
||||
Logging.d(TAG, "Create new camera1 session on camera " + cameraId);
|
||||
|
||||
this.cameraThreadHandler = new Handler();
|
||||
this.events = events;
|
||||
this.captureToTexture = captureToTexture;
|
||||
this.applicationContext = applicationContext;
|
||||
this.surfaceTextureHelper = surfaceTextureHelper;
|
||||
this.cameraId = cameraId;
|
||||
this.camera = camera;
|
||||
this.info = info;
|
||||
this.captureFormat = captureFormat;
|
||||
this.constructionTimeNs = constructionTimeNs;
|
||||
|
||||
startCapturing();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void stop() {
|
||||
Logging.d(TAG, "Stop camera1 session on camera " + cameraId);
|
||||
checkIsOnCameraThread();
|
||||
if (state != SessionState.STOPPED) {
|
||||
final long stopStartTime = System.nanoTime();
|
||||
stopInternal();
|
||||
final int stopTimeMs = (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStartTime);
|
||||
camera1StopTimeMsHistogram.addSample(stopTimeMs);
|
||||
}
|
||||
}
|
||||
|
||||
private void startCapturing() {
|
||||
Logging.d(TAG, "Start capturing");
|
||||
checkIsOnCameraThread();
|
||||
|
||||
state = SessionState.RUNNING;
|
||||
|
||||
camera.setErrorCallback(new android.hardware.Camera.ErrorCallback() {
|
||||
@Override
|
||||
public void onError(int error, android.hardware.Camera camera) {
|
||||
String errorMessage;
|
||||
if (error == android.hardware.Camera.CAMERA_ERROR_SERVER_DIED) {
|
||||
errorMessage = "Camera server died!";
|
||||
} else {
|
||||
errorMessage = "Camera error: " + error;
|
||||
}
|
||||
Logging.e(TAG, errorMessage);
|
||||
stopInternal();
|
||||
if (error == android.hardware.Camera.CAMERA_ERROR_EVICTED) {
|
||||
events.onCameraDisconnected(Camera1Session.this);
|
||||
} else {
|
||||
events.onCameraError(Camera1Session.this, errorMessage);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if (captureToTexture) {
|
||||
listenForTextureFrames();
|
||||
} else {
|
||||
listenForBytebufferFrames();
|
||||
}
|
||||
try {
|
||||
camera.startPreview();
|
||||
} catch (RuntimeException e) {
|
||||
stopInternal();
|
||||
events.onCameraError(this, e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
private void stopInternal() {
|
||||
Logging.d(TAG, "Stop internal");
|
||||
checkIsOnCameraThread();
|
||||
if (state == SessionState.STOPPED) {
|
||||
Logging.d(TAG, "Camera is already stopped");
|
||||
return;
|
||||
}
|
||||
|
||||
state = SessionState.STOPPED;
|
||||
surfaceTextureHelper.stopListening();
|
||||
// Note: stopPreview or other driver code might deadlock. Deadlock in
|
||||
// android.hardware.Camera._stopPreview(Native Method) has been observed on
|
||||
// Nexus 5 (hammerhead), OS version LMY48I.
|
||||
camera.stopPreview();
|
||||
camera.release();
|
||||
events.onCameraClosed(this);
|
||||
Logging.d(TAG, "Stop done");
|
||||
}
|
||||
|
||||
private void listenForTextureFrames() {
|
||||
surfaceTextureHelper.startListening(new SurfaceTextureHelper.OnTextureFrameAvailableListener() {
|
||||
@Override
|
||||
public void onTextureFrameAvailable(
|
||||
int oesTextureId, float[] transformMatrix, long timestampNs) {
|
||||
checkIsOnCameraThread();
|
||||
|
||||
if (state != SessionState.RUNNING) {
|
||||
Logging.d(TAG, "Texture frame captured but camera is no longer running.");
|
||||
surfaceTextureHelper.returnTextureFrame();
|
||||
return;
|
||||
}
|
||||
|
||||
if (!firstFrameReported) {
|
||||
final int startTimeMs =
|
||||
(int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs);
|
||||
camera1StartTimeMsHistogram.addSample(startTimeMs);
|
||||
firstFrameReported = true;
|
||||
}
|
||||
|
||||
int rotation = getFrameOrientation();
|
||||
if (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT) {
|
||||
// Undo the mirror that the OS "helps" us with.
|
||||
// http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int)
|
||||
transformMatrix = RendererCommon.multiplyMatrices(
|
||||
transformMatrix, RendererCommon.horizontalFlipMatrix());
|
||||
}
|
||||
events.onTextureFrameCaptured(Camera1Session.this, captureFormat.width,
|
||||
captureFormat.height, oesTextureId, transformMatrix, rotation, timestampNs);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private void listenForBytebufferFrames() {
|
||||
camera.setPreviewCallbackWithBuffer(new android.hardware.Camera.PreviewCallback() {
|
||||
@Override
|
||||
public void onPreviewFrame(byte[] data, android.hardware.Camera callbackCamera) {
|
||||
checkIsOnCameraThread();
|
||||
|
||||
if (callbackCamera != camera) {
|
||||
Logging.e(TAG, "Callback from a different camera. This should never happen.");
|
||||
return;
|
||||
}
|
||||
|
||||
if (state != SessionState.RUNNING) {
|
||||
Logging.d(TAG, "Bytebuffer frame captured but camera is no longer running.");
|
||||
return;
|
||||
}
|
||||
|
||||
final long captureTimeNs = TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
|
||||
|
||||
if (!firstFrameReported) {
|
||||
final int startTimeMs =
|
||||
(int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs);
|
||||
camera1StartTimeMsHistogram.addSample(startTimeMs);
|
||||
firstFrameReported = true;
|
||||
}
|
||||
|
||||
events.onByteBufferFrameCaptured(Camera1Session.this, data, captureFormat.width,
|
||||
captureFormat.height, getFrameOrientation(), captureTimeNs);
|
||||
camera.addCallbackBuffer(data);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private int getDeviceOrientation() {
|
||||
int orientation = 0;
|
||||
|
||||
WindowManager wm = (WindowManager) applicationContext.getSystemService(Context.WINDOW_SERVICE);
|
||||
switch (wm.getDefaultDisplay().getRotation()) {
|
||||
case Surface.ROTATION_90:
|
||||
orientation = 90;
|
||||
break;
|
||||
case Surface.ROTATION_180:
|
||||
orientation = 180;
|
||||
break;
|
||||
case Surface.ROTATION_270:
|
||||
orientation = 270;
|
||||
break;
|
||||
case Surface.ROTATION_0:
|
||||
default:
|
||||
orientation = 0;
|
||||
break;
|
||||
}
|
||||
return orientation;
|
||||
}
|
||||
|
||||
private int getFrameOrientation() {
|
||||
int rotation = getDeviceOrientation();
|
||||
if (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK) {
|
||||
rotation = 360 - rotation;
|
||||
}
|
||||
return (info.orientation + rotation) % 360;
|
||||
}
|
||||
|
||||
private void checkIsOnCameraThread() {
|
||||
if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) {
|
||||
throw new IllegalStateException("Wrong thread");
|
||||
}
|
||||
}
|
||||
}
|
||||
453
webrtc/sdk/android/src/java/org/webrtc/Camera2Session.java
Normal file
453
webrtc/sdk/android/src/java/org/webrtc/Camera2Session.java
Normal file
@ -0,0 +1,453 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.annotation.TargetApi;
|
||||
import android.content.Context;
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.hardware.camera2.CameraAccessException;
|
||||
import android.hardware.camera2.CameraCaptureSession;
|
||||
import android.hardware.camera2.CameraCharacteristics;
|
||||
import android.hardware.camera2.CameraDevice;
|
||||
import android.hardware.camera2.CameraManager;
|
||||
import android.hardware.camera2.CameraMetadata;
|
||||
import android.hardware.camera2.CaptureFailure;
|
||||
import android.hardware.camera2.CaptureRequest;
|
||||
import android.os.Handler;
|
||||
import android.util.Range;
|
||||
import android.view.Surface;
|
||||
import android.view.WindowManager;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
|
||||
|
||||
@TargetApi(21)
|
||||
class Camera2Session implements CameraSession {
|
||||
private static final String TAG = "Camera2Session";
|
||||
|
||||
private static final Histogram camera2StartTimeMsHistogram =
|
||||
Histogram.createCounts("WebRTC.Android.Camera2.StartTimeMs", 1, 10000, 50);
|
||||
private static final Histogram camera2StopTimeMsHistogram =
|
||||
Histogram.createCounts("WebRTC.Android.Camera2.StopTimeMs", 1, 10000, 50);
|
||||
private static final Histogram camera2ResolutionHistogram = Histogram.createEnumeration(
|
||||
"WebRTC.Android.Camera2.Resolution", CameraEnumerationAndroid.COMMON_RESOLUTIONS.size());
|
||||
|
||||
private static enum SessionState { RUNNING, STOPPED }
|
||||
|
||||
private final Handler cameraThreadHandler;
|
||||
private final CreateSessionCallback callback;
|
||||
private final Events events;
|
||||
private final Context applicationContext;
|
||||
private final CameraManager cameraManager;
|
||||
private final SurfaceTextureHelper surfaceTextureHelper;
|
||||
private final String cameraId;
|
||||
private final int width;
|
||||
private final int height;
|
||||
private final int framerate;
|
||||
|
||||
// Initialized at start
|
||||
private CameraCharacteristics cameraCharacteristics;
|
||||
private int cameraOrientation;
|
||||
private boolean isCameraFrontFacing;
|
||||
private int fpsUnitFactor;
|
||||
private CaptureFormat captureFormat;
|
||||
|
||||
// Initialized when camera opens
|
||||
private CameraDevice cameraDevice;
|
||||
private Surface surface;
|
||||
|
||||
// Initialized when capture session is created
|
||||
private CameraCaptureSession captureSession;
|
||||
|
||||
// State
|
||||
private SessionState state = SessionState.RUNNING;
|
||||
private boolean firstFrameReported = false;
|
||||
|
||||
// Used only for stats. Only used on the camera thread.
|
||||
private final long constructionTimeNs; // Construction time of this class.
|
||||
|
||||
private class CameraStateCallback extends CameraDevice.StateCallback {
|
||||
private String getErrorDescription(int errorCode) {
|
||||
switch (errorCode) {
|
||||
case CameraDevice.StateCallback.ERROR_CAMERA_DEVICE:
|
||||
return "Camera device has encountered a fatal error.";
|
||||
case CameraDevice.StateCallback.ERROR_CAMERA_DISABLED:
|
||||
return "Camera device could not be opened due to a device policy.";
|
||||
case CameraDevice.StateCallback.ERROR_CAMERA_IN_USE:
|
||||
return "Camera device is in use already.";
|
||||
case CameraDevice.StateCallback.ERROR_CAMERA_SERVICE:
|
||||
return "Camera service has encountered a fatal error.";
|
||||
case CameraDevice.StateCallback.ERROR_MAX_CAMERAS_IN_USE:
|
||||
return "Camera device could not be opened because"
|
||||
+ " there are too many other open camera devices.";
|
||||
default:
|
||||
return "Unknown camera error: " + errorCode;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onDisconnected(CameraDevice camera) {
|
||||
checkIsOnCameraThread();
|
||||
final boolean startFailure = (captureSession == null) && (state != SessionState.STOPPED);
|
||||
state = SessionState.STOPPED;
|
||||
stopInternal();
|
||||
if (startFailure) {
|
||||
callback.onFailure(FailureType.DISCONNECTED, "Camera disconnected / evicted.");
|
||||
} else {
|
||||
events.onCameraDisconnected(Camera2Session.this);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onError(CameraDevice camera, int errorCode) {
|
||||
checkIsOnCameraThread();
|
||||
reportError(getErrorDescription(errorCode));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onOpened(CameraDevice camera) {
|
||||
checkIsOnCameraThread();
|
||||
|
||||
Logging.d(TAG, "Camera opened.");
|
||||
cameraDevice = camera;
|
||||
|
||||
final SurfaceTexture surfaceTexture = surfaceTextureHelper.getSurfaceTexture();
|
||||
surfaceTexture.setDefaultBufferSize(captureFormat.width, captureFormat.height);
|
||||
surface = new Surface(surfaceTexture);
|
||||
try {
|
||||
camera.createCaptureSession(
|
||||
Arrays.asList(surface), new CaptureSessionCallback(), cameraThreadHandler);
|
||||
} catch (CameraAccessException e) {
|
||||
reportError("Failed to create capture session. " + e);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onClosed(CameraDevice camera) {
|
||||
checkIsOnCameraThread();
|
||||
|
||||
Logging.d(TAG, "Camera device closed.");
|
||||
events.onCameraClosed(Camera2Session.this);
|
||||
}
|
||||
}
|
||||
|
||||
private class CaptureSessionCallback extends CameraCaptureSession.StateCallback {
|
||||
@Override
|
||||
public void onConfigureFailed(CameraCaptureSession session) {
|
||||
checkIsOnCameraThread();
|
||||
session.close();
|
||||
reportError("Failed to configure capture session.");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onConfigured(CameraCaptureSession session) {
|
||||
checkIsOnCameraThread();
|
||||
Logging.d(TAG, "Camera capture session configured.");
|
||||
captureSession = session;
|
||||
try {
|
||||
/*
|
||||
* The viable options for video capture requests are:
|
||||
* TEMPLATE_PREVIEW: High frame rate is given priority over the highest-quality
|
||||
* post-processing.
|
||||
* TEMPLATE_RECORD: Stable frame rate is used, and post-processing is set for recording
|
||||
* quality.
|
||||
*/
|
||||
final CaptureRequest.Builder captureRequestBuilder =
|
||||
cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
|
||||
// Set auto exposure fps range.
|
||||
captureRequestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE,
|
||||
new Range<Integer>(captureFormat.framerate.min / fpsUnitFactor,
|
||||
captureFormat.framerate.max / fpsUnitFactor));
|
||||
captureRequestBuilder.set(
|
||||
CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
|
||||
captureRequestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false);
|
||||
chooseStabilizationMode(captureRequestBuilder);
|
||||
chooseFocusMode(captureRequestBuilder);
|
||||
|
||||
captureRequestBuilder.addTarget(surface);
|
||||
session.setRepeatingRequest(
|
||||
captureRequestBuilder.build(), new CameraCaptureCallback(), cameraThreadHandler);
|
||||
} catch (CameraAccessException e) {
|
||||
reportError("Failed to start capture request. " + e);
|
||||
return;
|
||||
}
|
||||
|
||||
surfaceTextureHelper.startListening(
|
||||
new SurfaceTextureHelper.OnTextureFrameAvailableListener() {
|
||||
@Override
|
||||
public void onTextureFrameAvailable(
|
||||
int oesTextureId, float[] transformMatrix, long timestampNs) {
|
||||
checkIsOnCameraThread();
|
||||
|
||||
if (state != SessionState.RUNNING) {
|
||||
Logging.d(TAG, "Texture frame captured but camera is no longer running.");
|
||||
surfaceTextureHelper.returnTextureFrame();
|
||||
return;
|
||||
}
|
||||
|
||||
if (!firstFrameReported) {
|
||||
firstFrameReported = true;
|
||||
final int startTimeMs =
|
||||
(int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs);
|
||||
camera2StartTimeMsHistogram.addSample(startTimeMs);
|
||||
}
|
||||
|
||||
int rotation = getFrameOrientation();
|
||||
if (isCameraFrontFacing) {
|
||||
// Undo the mirror that the OS "helps" us with.
|
||||
// http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int)
|
||||
transformMatrix = RendererCommon.multiplyMatrices(
|
||||
transformMatrix, RendererCommon.horizontalFlipMatrix());
|
||||
}
|
||||
|
||||
// Undo camera orientation - we report it as rotation instead.
|
||||
transformMatrix =
|
||||
RendererCommon.rotateTextureMatrix(transformMatrix, -cameraOrientation);
|
||||
|
||||
events.onTextureFrameCaptured(Camera2Session.this, captureFormat.width,
|
||||
captureFormat.height, oesTextureId, transformMatrix, rotation, timestampNs);
|
||||
}
|
||||
});
|
||||
Logging.d(TAG, "Camera device successfully started.");
|
||||
callback.onDone(Camera2Session.this);
|
||||
}
|
||||
|
||||
// Prefers optical stabilization over software stabilization if available. Only enables one of
|
||||
// the stabilization modes at a time because having both enabled can cause strange results.
|
||||
private void chooseStabilizationMode(CaptureRequest.Builder captureRequestBuilder) {
|
||||
final int[] availableOpticalStabilization = cameraCharacteristics.get(
|
||||
CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION);
|
||||
if (availableOpticalStabilization != null) {
|
||||
for (int mode : availableOpticalStabilization) {
|
||||
if (mode == CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_ON) {
|
||||
captureRequestBuilder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE,
|
||||
CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_ON);
|
||||
captureRequestBuilder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE,
|
||||
CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_OFF);
|
||||
Logging.d(TAG, "Using optical stabilization.");
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
// If no optical mode is available, try software.
|
||||
final int[] availableVideoStabilization = cameraCharacteristics.get(
|
||||
CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES);
|
||||
for (int mode : availableVideoStabilization) {
|
||||
if (mode == CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_ON) {
|
||||
captureRequestBuilder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE,
|
||||
CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_ON);
|
||||
captureRequestBuilder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE,
|
||||
CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_OFF);
|
||||
Logging.d(TAG, "Using video stabilization.");
|
||||
return;
|
||||
}
|
||||
}
|
||||
Logging.d(TAG, "Stabilization not available.");
|
||||
}
|
||||
|
||||
private void chooseFocusMode(CaptureRequest.Builder captureRequestBuilder) {
|
||||
final int[] availableFocusModes =
|
||||
cameraCharacteristics.get(CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES);
|
||||
for (int mode : availableFocusModes) {
|
||||
if (mode == CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO) {
|
||||
captureRequestBuilder.set(
|
||||
CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO);
|
||||
Logging.d(TAG, "Using continuous video auto-focus.");
|
||||
return;
|
||||
}
|
||||
}
|
||||
Logging.d(TAG, "Auto-focus is not available.");
|
||||
}
|
||||
}
|
||||
|
||||
private class CameraCaptureCallback extends CameraCaptureSession.CaptureCallback {
|
||||
@Override
|
||||
public void onCaptureFailed(
|
||||
CameraCaptureSession session, CaptureRequest request, CaptureFailure failure) {
|
||||
Logging.d(TAG, "Capture failed: " + failure);
|
||||
}
|
||||
}
|
||||
|
||||
public static void create(CreateSessionCallback callback, Events events,
|
||||
Context applicationContext, CameraManager cameraManager,
|
||||
SurfaceTextureHelper surfaceTextureHelper, String cameraId, int width, int height,
|
||||
int framerate) {
|
||||
new Camera2Session(callback, events, applicationContext, cameraManager, surfaceTextureHelper,
|
||||
cameraId, width, height, framerate);
|
||||
}
|
||||
|
||||
private Camera2Session(CreateSessionCallback callback, Events events, Context applicationContext,
|
||||
CameraManager cameraManager, SurfaceTextureHelper surfaceTextureHelper, String cameraId,
|
||||
int width, int height, int framerate) {
|
||||
Logging.d(TAG, "Create new camera2 session on camera " + cameraId);
|
||||
|
||||
constructionTimeNs = System.nanoTime();
|
||||
|
||||
this.cameraThreadHandler = new Handler();
|
||||
this.callback = callback;
|
||||
this.events = events;
|
||||
this.applicationContext = applicationContext;
|
||||
this.cameraManager = cameraManager;
|
||||
this.surfaceTextureHelper = surfaceTextureHelper;
|
||||
this.cameraId = cameraId;
|
||||
this.width = width;
|
||||
this.height = height;
|
||||
this.framerate = framerate;
|
||||
|
||||
start();
|
||||
}
|
||||
|
||||
private void start() {
|
||||
checkIsOnCameraThread();
|
||||
Logging.d(TAG, "start");
|
||||
|
||||
try {
|
||||
cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId);
|
||||
} catch (final CameraAccessException e) {
|
||||
reportError("getCameraCharacteristics(): " + e.getMessage());
|
||||
return;
|
||||
}
|
||||
cameraOrientation = cameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
|
||||
isCameraFrontFacing = cameraCharacteristics.get(CameraCharacteristics.LENS_FACING)
|
||||
== CameraMetadata.LENS_FACING_FRONT;
|
||||
|
||||
findCaptureFormat();
|
||||
openCamera();
|
||||
}
|
||||
|
||||
private void findCaptureFormat() {
|
||||
checkIsOnCameraThread();
|
||||
|
||||
Range<Integer>[] fpsRanges =
|
||||
cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
|
||||
fpsUnitFactor = Camera2Enumerator.getFpsUnitFactor(fpsRanges);
|
||||
List<CaptureFormat.FramerateRange> framerateRanges =
|
||||
Camera2Enumerator.convertFramerates(fpsRanges, fpsUnitFactor);
|
||||
List<Size> sizes = Camera2Enumerator.getSupportedSizes(cameraCharacteristics);
|
||||
Logging.d(TAG, "Available preview sizes: " + sizes);
|
||||
Logging.d(TAG, "Available fps ranges: " + framerateRanges);
|
||||
|
||||
if (framerateRanges.isEmpty() || sizes.isEmpty()) {
|
||||
reportError("No supported capture formats.");
|
||||
return;
|
||||
}
|
||||
|
||||
final CaptureFormat.FramerateRange bestFpsRange =
|
||||
CameraEnumerationAndroid.getClosestSupportedFramerateRange(framerateRanges, framerate);
|
||||
|
||||
final Size bestSize = CameraEnumerationAndroid.getClosestSupportedSize(sizes, width, height);
|
||||
CameraEnumerationAndroid.reportCameraResolution(camera2ResolutionHistogram, bestSize);
|
||||
|
||||
captureFormat = new CaptureFormat(bestSize.width, bestSize.height, bestFpsRange);
|
||||
Logging.d(TAG, "Using capture format: " + captureFormat);
|
||||
}
|
||||
|
||||
private void openCamera() {
|
||||
checkIsOnCameraThread();
|
||||
|
||||
Logging.d(TAG, "Opening camera " + cameraId);
|
||||
events.onCameraOpening();
|
||||
|
||||
try {
|
||||
cameraManager.openCamera(cameraId, new CameraStateCallback(), cameraThreadHandler);
|
||||
} catch (CameraAccessException e) {
|
||||
reportError("Failed to open camera: " + e);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void stop() {
|
||||
Logging.d(TAG, "Stop camera2 session on camera " + cameraId);
|
||||
checkIsOnCameraThread();
|
||||
if (state != SessionState.STOPPED) {
|
||||
final long stopStartTime = System.nanoTime();
|
||||
state = SessionState.STOPPED;
|
||||
stopInternal();
|
||||
final int stopTimeMs = (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStartTime);
|
||||
camera2StopTimeMsHistogram.addSample(stopTimeMs);
|
||||
}
|
||||
}
|
||||
|
||||
private void stopInternal() {
|
||||
Logging.d(TAG, "Stop internal");
|
||||
checkIsOnCameraThread();
|
||||
|
||||
surfaceTextureHelper.stopListening();
|
||||
|
||||
if (captureSession != null) {
|
||||
captureSession.close();
|
||||
captureSession = null;
|
||||
}
|
||||
if (surface != null) {
|
||||
surface.release();
|
||||
surface = null;
|
||||
}
|
||||
if (cameraDevice != null) {
|
||||
cameraDevice.close();
|
||||
cameraDevice = null;
|
||||
}
|
||||
|
||||
Logging.d(TAG, "Stop done");
|
||||
}
|
||||
|
||||
private void reportError(String error) {
|
||||
checkIsOnCameraThread();
|
||||
Logging.e(TAG, "Error: " + error);
|
||||
|
||||
final boolean startFailure = (captureSession == null) && (state != SessionState.STOPPED);
|
||||
state = SessionState.STOPPED;
|
||||
stopInternal();
|
||||
if (startFailure) {
|
||||
callback.onFailure(FailureType.ERROR, error);
|
||||
} else {
|
||||
events.onCameraError(this, error);
|
||||
}
|
||||
}
|
||||
|
||||
private int getDeviceOrientation() {
|
||||
int orientation = 0;
|
||||
|
||||
WindowManager wm = (WindowManager) applicationContext.getSystemService(Context.WINDOW_SERVICE);
|
||||
switch (wm.getDefaultDisplay().getRotation()) {
|
||||
case Surface.ROTATION_90:
|
||||
orientation = 90;
|
||||
break;
|
||||
case Surface.ROTATION_180:
|
||||
orientation = 180;
|
||||
break;
|
||||
case Surface.ROTATION_270:
|
||||
orientation = 270;
|
||||
break;
|
||||
case Surface.ROTATION_0:
|
||||
default:
|
||||
orientation = 0;
|
||||
break;
|
||||
}
|
||||
return orientation;
|
||||
}
|
||||
|
||||
private int getFrameOrientation() {
|
||||
int rotation = getDeviceOrientation();
|
||||
if (!isCameraFrontFacing) {
|
||||
rotation = 360 - rotation;
|
||||
}
|
||||
return (cameraOrientation + rotation) % 360;
|
||||
}
|
||||
|
||||
private void checkIsOnCameraThread() {
|
||||
if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) {
|
||||
throw new IllegalStateException("Wrong thread");
|
||||
}
|
||||
}
|
||||
}
|
||||
449
webrtc/sdk/android/src/java/org/webrtc/CameraCapturer.java
Normal file
449
webrtc/sdk/android/src/java/org/webrtc/CameraCapturer.java
Normal file
@ -0,0 +1,449 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.content.Context;
|
||||
import android.os.Handler;
|
||||
import android.os.Looper;
|
||||
import java.util.Arrays;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
abstract class CameraCapturer implements CameraVideoCapturer {
|
||||
enum SwitchState {
|
||||
IDLE, // No switch requested.
|
||||
PENDING, // Waiting for previous capture session to open.
|
||||
IN_PROGRESS, // Waiting for new switched capture session to start.
|
||||
}
|
||||
|
||||
private static final String TAG = "CameraCapturer";
|
||||
private final static int MAX_OPEN_CAMERA_ATTEMPTS = 3;
|
||||
private final static int OPEN_CAMERA_DELAY_MS = 500;
|
||||
private final static int OPEN_CAMERA_TIMEOUT = 10000;
|
||||
|
||||
private final CameraEnumerator cameraEnumerator;
|
||||
private final CameraEventsHandler eventsHandler;
|
||||
private final Handler uiThreadHandler;
|
||||
|
||||
private final CameraSession.CreateSessionCallback createSessionCallback =
|
||||
new CameraSession.CreateSessionCallback() {
|
||||
@Override
|
||||
public void onDone(CameraSession session) {
|
||||
checkIsOnCameraThread();
|
||||
Logging.d(TAG, "Create session done");
|
||||
uiThreadHandler.removeCallbacks(openCameraTimeoutRunnable);
|
||||
synchronized (stateLock) {
|
||||
capturerObserver.onCapturerStarted(true /* success */);
|
||||
sessionOpening = false;
|
||||
currentSession = session;
|
||||
cameraStatistics = new CameraStatistics(surfaceHelper, eventsHandler);
|
||||
firstFrameObserved = false;
|
||||
stateLock.notifyAll();
|
||||
|
||||
if (switchState == SwitchState.IN_PROGRESS) {
|
||||
if (switchEventsHandler != null) {
|
||||
switchEventsHandler.onCameraSwitchDone(cameraEnumerator.isFrontFacing(cameraName));
|
||||
switchEventsHandler = null;
|
||||
}
|
||||
switchState = SwitchState.IDLE;
|
||||
} else if (switchState == SwitchState.PENDING) {
|
||||
switchState = SwitchState.IDLE;
|
||||
switchCameraInternal(switchEventsHandler);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(CameraSession.FailureType failureType, String error) {
|
||||
checkIsOnCameraThread();
|
||||
uiThreadHandler.removeCallbacks(openCameraTimeoutRunnable);
|
||||
synchronized (stateLock) {
|
||||
capturerObserver.onCapturerStarted(false /* success */);
|
||||
openAttemptsRemaining--;
|
||||
|
||||
if (openAttemptsRemaining <= 0) {
|
||||
Logging.w(TAG, "Opening camera failed, passing: " + error);
|
||||
sessionOpening = false;
|
||||
stateLock.notifyAll();
|
||||
|
||||
if (switchState != SwitchState.IDLE) {
|
||||
if (switchEventsHandler != null) {
|
||||
switchEventsHandler.onCameraSwitchError(error);
|
||||
switchEventsHandler = null;
|
||||
}
|
||||
switchState = SwitchState.IDLE;
|
||||
}
|
||||
|
||||
if (failureType == CameraSession.FailureType.DISCONNECTED) {
|
||||
eventsHandler.onCameraDisconnected();
|
||||
} else {
|
||||
eventsHandler.onCameraError(error);
|
||||
}
|
||||
} else {
|
||||
Logging.w(TAG, "Opening camera failed, retry: " + error);
|
||||
|
||||
createSessionInternal(OPEN_CAMERA_DELAY_MS);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
private final CameraSession.Events cameraSessionEventsHandler = new CameraSession.Events() {
|
||||
@Override
|
||||
public void onCameraOpening() {
|
||||
checkIsOnCameraThread();
|
||||
synchronized (stateLock) {
|
||||
if (currentSession != null) {
|
||||
Logging.w(TAG, "onCameraOpening while session was open.");
|
||||
return;
|
||||
}
|
||||
eventsHandler.onCameraOpening(cameraName);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onCameraError(CameraSession session, String error) {
|
||||
checkIsOnCameraThread();
|
||||
synchronized (stateLock) {
|
||||
if (session != currentSession) {
|
||||
Logging.w(TAG, "onCameraError from another session: " + error);
|
||||
return;
|
||||
}
|
||||
eventsHandler.onCameraError(error);
|
||||
stopCapture();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onCameraDisconnected(CameraSession session) {
|
||||
checkIsOnCameraThread();
|
||||
synchronized (stateLock) {
|
||||
if (session != currentSession) {
|
||||
Logging.w(TAG, "onCameraDisconnected from another session.");
|
||||
return;
|
||||
}
|
||||
eventsHandler.onCameraDisconnected();
|
||||
stopCapture();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onCameraClosed(CameraSession session) {
|
||||
checkIsOnCameraThread();
|
||||
synchronized (stateLock) {
|
||||
if (session != currentSession && currentSession != null) {
|
||||
Logging.d(TAG, "onCameraClosed from another session.");
|
||||
return;
|
||||
}
|
||||
eventsHandler.onCameraClosed();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onByteBufferFrameCaptured(
|
||||
CameraSession session, byte[] data, int width, int height, int rotation, long timestamp) {
|
||||
checkIsOnCameraThread();
|
||||
synchronized (stateLock) {
|
||||
if (session != currentSession) {
|
||||
Logging.w(TAG, "onByteBufferFrameCaptured from another session.");
|
||||
return;
|
||||
}
|
||||
if (!firstFrameObserved) {
|
||||
eventsHandler.onFirstFrameAvailable();
|
||||
firstFrameObserved = true;
|
||||
}
|
||||
cameraStatistics.addFrame();
|
||||
capturerObserver.onByteBufferFrameCaptured(data, width, height, rotation, timestamp);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onTextureFrameCaptured(CameraSession session, int width, int height,
|
||||
int oesTextureId, float[] transformMatrix, int rotation, long timestamp) {
|
||||
checkIsOnCameraThread();
|
||||
synchronized (stateLock) {
|
||||
if (session != currentSession) {
|
||||
Logging.w(TAG, "onTextureFrameCaptured from another session.");
|
||||
surfaceHelper.returnTextureFrame();
|
||||
return;
|
||||
}
|
||||
if (!firstFrameObserved) {
|
||||
eventsHandler.onFirstFrameAvailable();
|
||||
firstFrameObserved = true;
|
||||
}
|
||||
cameraStatistics.addFrame();
|
||||
capturerObserver.onTextureFrameCaptured(
|
||||
width, height, oesTextureId, transformMatrix, rotation, timestamp);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
private final Runnable openCameraTimeoutRunnable = new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
eventsHandler.onCameraError("Camera failed to start within timeout.");
|
||||
}
|
||||
};
|
||||
|
||||
// Initialized on initialize
|
||||
// -------------------------
|
||||
private Handler cameraThreadHandler;
|
||||
private Context applicationContext;
|
||||
private CapturerObserver capturerObserver;
|
||||
private SurfaceTextureHelper surfaceHelper;
|
||||
|
||||
private final Object stateLock = new Object();
|
||||
private boolean sessionOpening; /* guarded by stateLock */
|
||||
private CameraSession currentSession; /* guarded by stateLock */
|
||||
private String cameraName; /* guarded by stateLock */
|
||||
private int width; /* guarded by stateLock */
|
||||
private int height; /* guarded by stateLock */
|
||||
private int framerate; /* guarded by stateLock */
|
||||
private int openAttemptsRemaining; /* guarded by stateLock */
|
||||
private SwitchState switchState = SwitchState.IDLE; /* guarded by stateLock */
|
||||
private CameraSwitchHandler switchEventsHandler; /* guarded by stateLock */
|
||||
// Valid from onDone call until stopCapture, otherwise null.
|
||||
private CameraStatistics cameraStatistics; /* guarded by stateLock */
|
||||
private boolean firstFrameObserved; /* guarded by stateLock */
|
||||
|
||||
public CameraCapturer(
|
||||
String cameraName, CameraEventsHandler eventsHandler, CameraEnumerator cameraEnumerator) {
|
||||
if (eventsHandler == null) {
|
||||
eventsHandler = new CameraEventsHandler() {
|
||||
@Override
|
||||
public void onCameraError(String errorDescription) {}
|
||||
@Override
|
||||
public void onCameraDisconnected() {}
|
||||
@Override
|
||||
public void onCameraFreezed(String errorDescription) {}
|
||||
@Override
|
||||
public void onCameraOpening(String cameraName) {}
|
||||
@Override
|
||||
public void onFirstFrameAvailable() {}
|
||||
@Override
|
||||
public void onCameraClosed() {}
|
||||
};
|
||||
}
|
||||
|
||||
this.eventsHandler = eventsHandler;
|
||||
this.cameraEnumerator = cameraEnumerator;
|
||||
this.cameraName = cameraName;
|
||||
uiThreadHandler = new Handler(Looper.getMainLooper());
|
||||
|
||||
final String[] deviceNames = cameraEnumerator.getDeviceNames();
|
||||
|
||||
if (deviceNames.length == 0) {
|
||||
throw new RuntimeException("No cameras attached.");
|
||||
}
|
||||
if (!Arrays.asList(deviceNames).contains(this.cameraName)) {
|
||||
throw new IllegalArgumentException(
|
||||
"Camera name " + this.cameraName + " does not match any known camera device.");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void initialize(SurfaceTextureHelper surfaceTextureHelper, Context applicationContext,
|
||||
CapturerObserver capturerObserver) {
|
||||
this.applicationContext = applicationContext;
|
||||
this.capturerObserver = capturerObserver;
|
||||
this.surfaceHelper = surfaceTextureHelper;
|
||||
this.cameraThreadHandler =
|
||||
surfaceTextureHelper == null ? null : surfaceTextureHelper.getHandler();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void startCapture(int width, int height, int framerate) {
|
||||
Logging.d(TAG, "startCapture: " + width + "x" + height + "@" + framerate);
|
||||
if (applicationContext == null) {
|
||||
throw new RuntimeException("CameraCapturer must be initialized before calling startCapture.");
|
||||
}
|
||||
|
||||
synchronized (stateLock) {
|
||||
if (sessionOpening || currentSession != null) {
|
||||
Logging.w(TAG, "Session already open");
|
||||
return;
|
||||
}
|
||||
|
||||
this.width = width;
|
||||
this.height = height;
|
||||
this.framerate = framerate;
|
||||
|
||||
sessionOpening = true;
|
||||
openAttemptsRemaining = MAX_OPEN_CAMERA_ATTEMPTS;
|
||||
createSessionInternal(0);
|
||||
}
|
||||
}
|
||||
|
||||
private void createSessionInternal(int delayMs) {
|
||||
uiThreadHandler.postDelayed(openCameraTimeoutRunnable, delayMs + OPEN_CAMERA_TIMEOUT);
|
||||
cameraThreadHandler.postDelayed(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
createCameraSession(createSessionCallback, cameraSessionEventsHandler, applicationContext,
|
||||
surfaceHelper, cameraName, width, height, framerate);
|
||||
}
|
||||
}, delayMs);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void stopCapture() {
|
||||
Logging.d(TAG, "Stop capture");
|
||||
|
||||
synchronized (stateLock) {
|
||||
while (sessionOpening) {
|
||||
Logging.d(TAG, "Stop capture: Waiting for session to open");
|
||||
ThreadUtils.waitUninterruptibly(stateLock);
|
||||
}
|
||||
|
||||
if (currentSession != null) {
|
||||
Logging.d(TAG, "Stop capture: Nulling session");
|
||||
cameraStatistics.release();
|
||||
cameraStatistics = null;
|
||||
final CameraSession oldSession = currentSession;
|
||||
cameraThreadHandler.post(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
oldSession.stop();
|
||||
}
|
||||
});
|
||||
currentSession = null;
|
||||
capturerObserver.onCapturerStopped();
|
||||
} else {
|
||||
Logging.d(TAG, "Stop capture: No session open");
|
||||
}
|
||||
}
|
||||
|
||||
Logging.d(TAG, "Stop capture done");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void changeCaptureFormat(int width, int height, int framerate) {
|
||||
Logging.d(TAG, "changeCaptureFormat: " + width + "x" + height + "@" + framerate);
|
||||
synchronized (stateLock) {
|
||||
stopCapture();
|
||||
startCapture(width, height, framerate);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void dispose() {
|
||||
Logging.d(TAG, "dispose");
|
||||
stopCapture();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void switchCamera(final CameraSwitchHandler switchEventsHandler) {
|
||||
Logging.d(TAG, "switchCamera");
|
||||
cameraThreadHandler.post(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
switchCameraInternal(switchEventsHandler);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isScreencast() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public void printStackTrace() {
|
||||
Thread cameraThread = null;
|
||||
if (cameraThreadHandler != null) {
|
||||
cameraThread = cameraThreadHandler.getLooper().getThread();
|
||||
}
|
||||
if (cameraThread != null) {
|
||||
StackTraceElement[] cameraStackTrace = cameraThread.getStackTrace();
|
||||
if (cameraStackTrace.length > 0) {
|
||||
Logging.d(TAG, "CameraCapturer stack trace:");
|
||||
for (StackTraceElement traceElem : cameraStackTrace) {
|
||||
Logging.d(TAG, traceElem.toString());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void switchCameraInternal(final CameraSwitchHandler switchEventsHandler) {
|
||||
Logging.d(TAG, "switchCamera internal");
|
||||
|
||||
final String[] deviceNames = cameraEnumerator.getDeviceNames();
|
||||
|
||||
if (deviceNames.length < 2) {
|
||||
if (switchEventsHandler != null) {
|
||||
switchEventsHandler.onCameraSwitchError("No camera to switch to.");
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
synchronized (stateLock) {
|
||||
if (switchState != SwitchState.IDLE) {
|
||||
Logging.d(TAG, "switchCamera switchInProgress");
|
||||
if (switchEventsHandler != null) {
|
||||
switchEventsHandler.onCameraSwitchError("Camera switch already in progress.");
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (!sessionOpening && currentSession == null) {
|
||||
Logging.d(TAG, "switchCamera: No session open");
|
||||
if (switchEventsHandler != null) {
|
||||
switchEventsHandler.onCameraSwitchError("Camera is not running.");
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
this.switchEventsHandler = switchEventsHandler;
|
||||
if (sessionOpening) {
|
||||
switchState = SwitchState.PENDING;
|
||||
return;
|
||||
} else {
|
||||
switchState = SwitchState.IN_PROGRESS;
|
||||
}
|
||||
|
||||
Logging.d(TAG, "switchCamera: Stopping session");
|
||||
cameraStatistics.release();
|
||||
cameraStatistics = null;
|
||||
final CameraSession oldSession = currentSession;
|
||||
cameraThreadHandler.post(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
oldSession.stop();
|
||||
}
|
||||
});
|
||||
currentSession = null;
|
||||
|
||||
int cameraNameIndex = Arrays.asList(deviceNames).indexOf(cameraName);
|
||||
cameraName = deviceNames[(cameraNameIndex + 1) % deviceNames.length];
|
||||
|
||||
sessionOpening = true;
|
||||
openAttemptsRemaining = 1;
|
||||
createSessionInternal(0);
|
||||
}
|
||||
Logging.d(TAG, "switchCamera done");
|
||||
}
|
||||
|
||||
private void checkIsOnCameraThread() {
|
||||
if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) {
|
||||
Logging.e(TAG, "Check is on camera thread failed.");
|
||||
throw new RuntimeException("Not on camera thread.");
|
||||
}
|
||||
}
|
||||
|
||||
protected String getCameraName() {
|
||||
synchronized (stateLock) {
|
||||
return cameraName;
|
||||
}
|
||||
}
|
||||
|
||||
abstract protected void createCameraSession(
|
||||
CameraSession.CreateSessionCallback createSessionCallback, CameraSession.Events events,
|
||||
Context applicationContext, SurfaceTextureHelper surfaceTextureHelper, String cameraName,
|
||||
int width, int height, int framerate);
|
||||
}
|
||||
39
webrtc/sdk/android/src/java/org/webrtc/CameraSession.java
Normal file
39
webrtc/sdk/android/src/java/org/webrtc/CameraSession.java
Normal file
@ -0,0 +1,39 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
interface CameraSession {
|
||||
enum FailureType { ERROR, DISCONNECTED }
|
||||
|
||||
// Callbacks are fired on the camera thread.
|
||||
public interface CreateSessionCallback {
|
||||
void onDone(CameraSession session);
|
||||
void onFailure(FailureType failureType, String error);
|
||||
}
|
||||
|
||||
// Events are fired on the camera thread.
|
||||
public interface Events {
|
||||
void onCameraOpening();
|
||||
void onCameraError(CameraSession session, String error);
|
||||
void onCameraDisconnected(CameraSession session);
|
||||
void onCameraClosed(CameraSession session);
|
||||
void onByteBufferFrameCaptured(
|
||||
CameraSession session, byte[] data, int width, int height, int rotation, long timestamp);
|
||||
void onTextureFrameCaptured(CameraSession session, int width, int height, int oesTextureId,
|
||||
float[] transformMatrix, int rotation, long timestamp);
|
||||
}
|
||||
|
||||
/**
|
||||
* Stops the capture. Waits until no more calls to capture observer will be made.
|
||||
* If waitCameraStop is true, also waits for the camera to stop.
|
||||
*/
|
||||
void stop();
|
||||
}
|
||||
Reference in New Issue
Block a user