Combine webrtc/api/java/android and webrtc/api/java/src.
It used to be that there was a Java api for devices not running Android but that is no longer the case. I combined the directories and made the folder structure chromium style. BUG=webrtc:6067 R=magjed@webrtc.org, tommi@webrtc.org Review URL: https://codereview.webrtc.org/2111823002 . Cr-Commit-Position: refs/heads/master@{#13356}
This commit is contained in:
@ -1,172 +0,0 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
|
||||
|
||||
import android.os.SystemClock;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
public class Camera1Enumerator implements CameraEnumerator {
|
||||
private final static String TAG = "Camera1Enumerator";
|
||||
// Each entry contains the supported formats for corresponding camera index. The formats for all
|
||||
// cameras are enumerated on the first call to getSupportedFormats(), and cached for future
|
||||
// reference.
|
||||
private static List<List<CaptureFormat>> cachedSupportedFormats;
|
||||
|
||||
private final boolean captureToTexture;
|
||||
|
||||
public Camera1Enumerator() {
|
||||
this(true /* captureToTexture */);
|
||||
}
|
||||
|
||||
public Camera1Enumerator(boolean captureToTexture) {
|
||||
this.captureToTexture = captureToTexture;
|
||||
}
|
||||
|
||||
// Returns device names that can be used to create a new VideoCapturerAndroid.
|
||||
@Override
|
||||
public String[] getDeviceNames() {
|
||||
String[] names = new String[android.hardware.Camera.getNumberOfCameras()];
|
||||
for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
|
||||
names[i] = getDeviceName(i);
|
||||
}
|
||||
return names;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isFrontFacing(String deviceName) {
|
||||
android.hardware.Camera.CameraInfo info = getCameraInfo(getCameraIndex(deviceName));
|
||||
return info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isBackFacing(String deviceName) {
|
||||
android.hardware.Camera.CameraInfo info = getCameraInfo(getCameraIndex(deviceName));
|
||||
return info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK;
|
||||
}
|
||||
|
||||
@Override
|
||||
public CameraVideoCapturer createCapturer(String deviceName,
|
||||
CameraVideoCapturer.CameraEventsHandler eventsHandler) {
|
||||
return new VideoCapturerAndroid(deviceName, eventsHandler, captureToTexture);
|
||||
}
|
||||
|
||||
private static android.hardware.Camera.CameraInfo getCameraInfo(int index) {
|
||||
android.hardware.Camera.CameraInfo info = new android.hardware.Camera.CameraInfo();
|
||||
try {
|
||||
android.hardware.Camera.getCameraInfo(index, info);
|
||||
} catch (Exception e) {
|
||||
Logging.e(TAG, "getCameraInfo failed on index " + index,e);
|
||||
return null;
|
||||
}
|
||||
return info;
|
||||
}
|
||||
|
||||
static synchronized List<CaptureFormat> getSupportedFormats(int cameraId) {
|
||||
if (cachedSupportedFormats == null) {
|
||||
cachedSupportedFormats = new ArrayList<List<CaptureFormat>>();
|
||||
for (int i = 0; i < CameraEnumerationAndroid.getDeviceCount(); ++i) {
|
||||
cachedSupportedFormats.add(enumerateFormats(i));
|
||||
}
|
||||
}
|
||||
return cachedSupportedFormats.get(cameraId);
|
||||
}
|
||||
|
||||
private static List<CaptureFormat> enumerateFormats(int cameraId) {
|
||||
Logging.d(TAG, "Get supported formats for camera index " + cameraId + ".");
|
||||
final long startTimeMs = SystemClock.elapsedRealtime();
|
||||
final android.hardware.Camera.Parameters parameters;
|
||||
android.hardware.Camera camera = null;
|
||||
try {
|
||||
Logging.d(TAG, "Opening camera with index " + cameraId);
|
||||
camera = android.hardware.Camera.open(cameraId);
|
||||
parameters = camera.getParameters();
|
||||
} catch (RuntimeException e) {
|
||||
Logging.e(TAG, "Open camera failed on camera index " + cameraId, e);
|
||||
return new ArrayList<CaptureFormat>();
|
||||
} finally {
|
||||
if (camera != null) {
|
||||
camera.release();
|
||||
}
|
||||
}
|
||||
|
||||
final List<CaptureFormat> formatList = new ArrayList<CaptureFormat>();
|
||||
try {
|
||||
int minFps = 0;
|
||||
int maxFps = 0;
|
||||
final List<int[]> listFpsRange = parameters.getSupportedPreviewFpsRange();
|
||||
if (listFpsRange != null) {
|
||||
// getSupportedPreviewFpsRange() returns a sorted list. Take the fps range
|
||||
// corresponding to the highest fps.
|
||||
final int[] range = listFpsRange.get(listFpsRange.size() - 1);
|
||||
minFps = range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX];
|
||||
maxFps = range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX];
|
||||
}
|
||||
for (android.hardware.Camera.Size size : parameters.getSupportedPreviewSizes()) {
|
||||
formatList.add(new CaptureFormat(size.width, size.height, minFps, maxFps));
|
||||
}
|
||||
} catch (Exception e) {
|
||||
Logging.e(TAG, "getSupportedFormats() failed on camera index " + cameraId, e);
|
||||
}
|
||||
|
||||
final long endTimeMs = SystemClock.elapsedRealtime();
|
||||
Logging.d(TAG, "Get supported formats for camera index " + cameraId + " done."
|
||||
+ " Time spent: " + (endTimeMs - startTimeMs) + " ms.");
|
||||
return formatList;
|
||||
}
|
||||
|
||||
// Convert from android.hardware.Camera.Size to Size.
|
||||
static List<Size> convertSizes(List<android.hardware.Camera.Size> cameraSizes) {
|
||||
final List<Size> sizes = new ArrayList<Size>();
|
||||
for (android.hardware.Camera.Size size : cameraSizes) {
|
||||
sizes.add(new Size(size.width, size.height));
|
||||
}
|
||||
return sizes;
|
||||
}
|
||||
|
||||
// Convert from int[2] to CaptureFormat.FramerateRange.
|
||||
static List<CaptureFormat.FramerateRange> convertFramerates(List<int[]> arrayRanges) {
|
||||
final List<CaptureFormat.FramerateRange> ranges = new ArrayList<CaptureFormat.FramerateRange>();
|
||||
for (int[] range : arrayRanges) {
|
||||
ranges.add(new CaptureFormat.FramerateRange(
|
||||
range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
|
||||
range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX]));
|
||||
}
|
||||
return ranges;
|
||||
}
|
||||
|
||||
// Returns the camera index for camera with name |deviceName|, or throws IllegalArgumentException
|
||||
// if no such camera can be found.
|
||||
static int getCameraIndex(String deviceName) {
|
||||
Logging.d(TAG, "getCameraIndex: " + deviceName);
|
||||
for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
|
||||
if (deviceName.equals(CameraEnumerationAndroid.getDeviceName(i))) {
|
||||
return i;
|
||||
}
|
||||
}
|
||||
throw new IllegalArgumentException("No such camera: " + deviceName);
|
||||
}
|
||||
|
||||
// Returns the name of the camera with camera index. Returns null if the
|
||||
// camera can not be used.
|
||||
static String getDeviceName(int index) {
|
||||
android.hardware.Camera.CameraInfo info = getCameraInfo(index);
|
||||
|
||||
String facing =
|
||||
(info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT) ? "front" : "back";
|
||||
return "Camera " + index + ", Facing " + facing
|
||||
+ ", Orientation " + info.orientation;
|
||||
}
|
||||
}
|
||||
@ -1,926 +0,0 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
|
||||
|
||||
import android.annotation.TargetApi;
|
||||
import android.content.Context;
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.hardware.camera2.CameraAccessException;
|
||||
import android.hardware.camera2.CameraCaptureSession;
|
||||
import android.hardware.camera2.CameraCharacteristics;
|
||||
import android.hardware.camera2.CameraDevice;
|
||||
import android.hardware.camera2.CameraManager;
|
||||
import android.hardware.camera2.CameraMetadata;
|
||||
import android.hardware.camera2.CaptureFailure;
|
||||
import android.hardware.camera2.CaptureRequest;
|
||||
import android.hardware.camera2.TotalCaptureResult;
|
||||
import android.hardware.camera2.params.StreamConfigurationMap;
|
||||
import android.os.Build;
|
||||
import android.os.Handler;
|
||||
import android.os.SystemClock;
|
||||
import android.util.Range;
|
||||
import android.view.Surface;
|
||||
import android.view.WindowManager;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.Semaphore;
|
||||
|
||||
@TargetApi(21)
|
||||
public class Camera2Capturer implements
|
||||
CameraVideoCapturer,
|
||||
SurfaceTextureHelper.OnTextureFrameAvailableListener {
|
||||
private final static String TAG = "Camera2Capturer";
|
||||
|
||||
private final static int MAX_OPEN_CAMERA_ATTEMPTS = 3;
|
||||
private final static int OPEN_CAMERA_DELAY_MS = 500;
|
||||
private final static int STOP_TIMEOUT = 10000;
|
||||
private final static int START_TIMEOUT = 10000;
|
||||
private final static Object STOP_TIMEOUT_RUNNABLE_TOKEN = new Object();
|
||||
|
||||
// In the Camera2 API, starting a camera is inherently asynchronous, and this state is
|
||||
// represented with 'STARTING'. Stopping is also asynchronous and this state is 'STOPPING'.
|
||||
private static enum CameraState { IDLE, STARTING, RUNNING, STOPPING }
|
||||
|
||||
// Thread safe objects.
|
||||
// --------------------
|
||||
private final CameraManager cameraManager;
|
||||
private final CameraEventsHandler eventsHandler;
|
||||
|
||||
|
||||
// Shared state - guarded by cameraStateLock. Will only be edited from camera thread (when it is
|
||||
// running).
|
||||
// ---------------------------------------------------------------------------------------------
|
||||
private final Object cameraStateLock = new Object();
|
||||
private CameraState cameraState = CameraState.IDLE;
|
||||
// |cameraThreadHandler| must be synchronized on |cameraStateLock| when not on the camera thread,
|
||||
// or when modifying the reference. Use postOnCameraThread() instead of posting directly to
|
||||
// the handler - this way all callbacks with a specifed token can be removed at once.
|
||||
// |cameraThreadHandler| must be null if and only if CameraState is IDLE.
|
||||
private Handler cameraThreadHandler;
|
||||
// Remember the requested format in case we want to switch cameras.
|
||||
private int requestedWidth;
|
||||
private int requestedHeight;
|
||||
private int requestedFramerate;
|
||||
|
||||
// Will only be edited while camera state is IDLE and cameraStateLock is acquired.
|
||||
private String cameraName;
|
||||
private boolean isFrontCamera;
|
||||
private int cameraOrientation;
|
||||
|
||||
// Semaphore for allowing only one switch at a time.
|
||||
private final Semaphore pendingCameraSwitchSemaphore = new Semaphore(1);
|
||||
// Guarded by pendingCameraSwitchSemaphore
|
||||
private CameraSwitchHandler switchEventsHandler;
|
||||
|
||||
// Internal state - must only be modified from camera thread
|
||||
// ---------------------------------------------------------
|
||||
private CaptureFormat captureFormat;
|
||||
private Context applicationContext;
|
||||
private CapturerObserver capturerObserver;
|
||||
private CameraStatistics cameraStatistics;
|
||||
private SurfaceTextureHelper surfaceTextureHelper;
|
||||
private CameraCaptureSession captureSession;
|
||||
private Surface surface;
|
||||
private CameraDevice cameraDevice;
|
||||
private CameraStateCallback cameraStateCallback;
|
||||
|
||||
// Factor to convert between Android framerates and CaptureFormat.FramerateRange. It will be
|
||||
// either 1 or 1000.
|
||||
private int fpsUnitFactor;
|
||||
private boolean firstFrameReported;
|
||||
private int consecutiveCameraOpenFailures;
|
||||
|
||||
public Camera2Capturer(
|
||||
Context context, String cameraName, CameraEventsHandler eventsHandler) {
|
||||
Logging.d(TAG, "Camera2Capturer ctor, camera name: " + cameraName);
|
||||
this.cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
|
||||
this.eventsHandler = eventsHandler;
|
||||
|
||||
setCameraName(cameraName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper method for checking method is executed on camera thread. Also allows calls from other
|
||||
* threads if camera is closed.
|
||||
*/
|
||||
private void checkIsOnCameraThread() {
|
||||
if (cameraState == CameraState.IDLE) {
|
||||
return;
|
||||
}
|
||||
|
||||
checkIsStrictlyOnCameraThread();
|
||||
}
|
||||
|
||||
/**
|
||||
* Like checkIsOnCameraThread but doesn't allow the camera to be stopped.
|
||||
*/
|
||||
private void checkIsStrictlyOnCameraThread() {
|
||||
if (cameraThreadHandler == null) {
|
||||
throw new IllegalStateException("Camera is closed.");
|
||||
}
|
||||
|
||||
if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) {
|
||||
throw new IllegalStateException("Wrong thread");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks method is not invoked on the camera thread. Used in functions waiting for the camera
|
||||
* state to change since executing them on the camera thread would cause a deadlock.
|
||||
*/
|
||||
private void checkNotOnCameraThread() {
|
||||
if (cameraThreadHandler == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (Thread.currentThread() == cameraThreadHandler.getLooper().getThread()) {
|
||||
throw new IllegalStateException(
|
||||
"Method waiting for camera state to change executed on camera thread");
|
||||
}
|
||||
}
|
||||
|
||||
private void waitForCameraToExitTransitionalState(
|
||||
CameraState transitionalState, long timeoutMs) {
|
||||
checkNotOnCameraThread();
|
||||
|
||||
// We probably should already have the lock when this is called but acquire it in case
|
||||
// we don't have it.
|
||||
synchronized (cameraStateLock) {
|
||||
long timeoutAt = SystemClock.uptimeMillis() + timeoutMs;
|
||||
|
||||
while (cameraState == transitionalState) {
|
||||
Logging.d(TAG, "waitForCameraToExitTransitionalState waiting: "
|
||||
+ cameraState);
|
||||
|
||||
long timeLeft = timeoutAt - SystemClock.uptimeMillis();
|
||||
|
||||
if (timeLeft <= 0) {
|
||||
Logging.e(TAG, "Camera failed to exit transitional state " + transitionalState
|
||||
+ " within the time limit.");
|
||||
break;
|
||||
}
|
||||
|
||||
try {
|
||||
cameraStateLock.wait(timeLeft);
|
||||
} catch (InterruptedException e) {
|
||||
Logging.w(TAG, "Trying to interrupt while waiting to exit transitional state "
|
||||
+ transitionalState + ", ignoring: " + e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Waits until camera state is not STOPPING.
|
||||
*/
|
||||
private void waitForCameraToStopIfStopping() {
|
||||
waitForCameraToExitTransitionalState(CameraState.STOPPING, STOP_TIMEOUT);
|
||||
}
|
||||
|
||||
/**
|
||||
* Wait until camera state is not STARTING.
|
||||
*/
|
||||
private void waitForCameraToStartIfStarting() {
|
||||
waitForCameraToExitTransitionalState(CameraState.STARTING, START_TIMEOUT);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the name of the camera. Camera must be stopped or stopping when this is called.
|
||||
*/
|
||||
private void setCameraName(String cameraName) {
|
||||
final CameraCharacteristics characteristics;
|
||||
try {
|
||||
final String[] cameraIds = cameraManager.getCameraIdList();
|
||||
|
||||
if (cameraName.isEmpty() && cameraIds.length != 0) {
|
||||
cameraName = cameraIds[0];
|
||||
}
|
||||
|
||||
if (!Arrays.asList(cameraIds).contains(cameraName)) {
|
||||
throw new IllegalArgumentException(
|
||||
"Camera name: " + cameraName + " does not match any known camera device:");
|
||||
}
|
||||
|
||||
characteristics = cameraManager.getCameraCharacteristics(cameraName);
|
||||
} catch (CameraAccessException e) {
|
||||
throw new RuntimeException("Camera access exception: " + e);
|
||||
}
|
||||
|
||||
synchronized (cameraStateLock) {
|
||||
waitForCameraToStopIfStopping();
|
||||
|
||||
if (cameraState != CameraState.IDLE) {
|
||||
throw new RuntimeException("Changing camera name on running camera.");
|
||||
}
|
||||
|
||||
// Note: Usually changing camera state from outside camera thread is not allowed. It is
|
||||
// allowed here because camera is not running.
|
||||
this.cameraName = cameraName;
|
||||
isFrontCamera = characteristics.get(CameraCharacteristics.LENS_FACING)
|
||||
== CameraMetadata.LENS_FACING_FRONT;
|
||||
|
||||
/*
|
||||
* Clockwise angle through which the output image needs to be rotated to be upright on the
|
||||
* device screen in its native orientation.
|
||||
* Also defines the direction of rolling shutter readout, which is from top to bottom in the
|
||||
* sensor's coordinate system.
|
||||
* Units: Degrees of clockwise rotation; always a multiple of 90
|
||||
*/
|
||||
cameraOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Triggers appropriate error handlers based on the camera state. Must be called on the camera
|
||||
* thread and camera must not be stopped.
|
||||
*/
|
||||
private void reportError(String errorDescription) {
|
||||
checkIsStrictlyOnCameraThread();
|
||||
Logging.e(TAG, "Error in camera at state " + cameraState + ": " + errorDescription);
|
||||
|
||||
if (switchEventsHandler != null) {
|
||||
switchEventsHandler.onCameraSwitchError(errorDescription);
|
||||
switchEventsHandler = null;
|
||||
pendingCameraSwitchSemaphore.release();
|
||||
}
|
||||
|
||||
switch (cameraState) {
|
||||
case STARTING:
|
||||
capturerObserver.onCapturerStarted(false /* success */);
|
||||
// fall through
|
||||
case RUNNING:
|
||||
if (eventsHandler != null) {
|
||||
eventsHandler.onCameraError(errorDescription);
|
||||
}
|
||||
break;
|
||||
case STOPPING:
|
||||
setCameraState(CameraState.IDLE);
|
||||
Logging.e(TAG, "Closing camera failed: " + errorDescription);
|
||||
return; // We don't want to call closeAndRelease in this case.
|
||||
default:
|
||||
throw new RuntimeException("Unknown camera state: " + cameraState);
|
||||
}
|
||||
closeAndRelease();
|
||||
}
|
||||
|
||||
private void closeAndRelease() {
|
||||
checkIsStrictlyOnCameraThread();
|
||||
|
||||
Logging.d(TAG, "Close and release.");
|
||||
setCameraState(CameraState.STOPPING);
|
||||
|
||||
// Remove all pending Runnables posted from |this|.
|
||||
cameraThreadHandler.removeCallbacksAndMessages(this /* token */);
|
||||
applicationContext = null;
|
||||
capturerObserver = null;
|
||||
if (cameraStatistics != null) {
|
||||
cameraStatistics.release();
|
||||
cameraStatistics = null;
|
||||
}
|
||||
if (surfaceTextureHelper != null) {
|
||||
surfaceTextureHelper.stopListening();
|
||||
surfaceTextureHelper = null;
|
||||
}
|
||||
if (captureSession != null) {
|
||||
captureSession.close();
|
||||
captureSession = null;
|
||||
}
|
||||
if (surface != null) {
|
||||
surface.release();
|
||||
surface = null;
|
||||
}
|
||||
if (cameraDevice != null) {
|
||||
// Add a timeout for stopping the camera.
|
||||
cameraThreadHandler.postAtTime(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
Logging.e(TAG, "Camera failed to stop within the timeout. Force stopping.");
|
||||
setCameraState(CameraState.IDLE);
|
||||
if (eventsHandler != null) {
|
||||
eventsHandler.onCameraError("Camera failed to stop (timeout).");
|
||||
}
|
||||
}
|
||||
}, STOP_TIMEOUT_RUNNABLE_TOKEN, SystemClock.uptimeMillis() + STOP_TIMEOUT);
|
||||
|
||||
cameraDevice.close();
|
||||
cameraDevice = null;
|
||||
} else {
|
||||
Logging.w(TAG, "closeAndRelease called while cameraDevice is null");
|
||||
setCameraState(CameraState.IDLE);
|
||||
}
|
||||
this.cameraStateCallback = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the camera state while ensuring constraints are followed.
|
||||
*/
|
||||
private void setCameraState(CameraState newState) {
|
||||
// State must only be modified on the camera thread. It can be edited from other threads
|
||||
// if cameraState is IDLE since there is no camera thread.
|
||||
checkIsOnCameraThread();
|
||||
|
||||
if (newState != CameraState.IDLE) {
|
||||
if (cameraThreadHandler == null) {
|
||||
throw new IllegalStateException(
|
||||
"cameraThreadHandler must be null if and only if CameraState is IDLE.");
|
||||
}
|
||||
} else {
|
||||
cameraThreadHandler = null;
|
||||
}
|
||||
|
||||
switch (newState) {
|
||||
case STARTING:
|
||||
if (cameraState != CameraState.IDLE) {
|
||||
throw new IllegalStateException("Only stopped camera can start.");
|
||||
}
|
||||
break;
|
||||
case RUNNING:
|
||||
if (cameraState != CameraState.STARTING) {
|
||||
throw new IllegalStateException("Only starting camera can go to running state.");
|
||||
}
|
||||
break;
|
||||
case STOPPING:
|
||||
if (cameraState != CameraState.STARTING && cameraState != CameraState.RUNNING) {
|
||||
throw new IllegalStateException("Only starting or running camera can stop.");
|
||||
}
|
||||
break;
|
||||
case IDLE:
|
||||
if (cameraState != CameraState.STOPPING) {
|
||||
throw new IllegalStateException("Only stopping camera can go to idle state.");
|
||||
}
|
||||
break;
|
||||
default:
|
||||
throw new RuntimeException("Unknown camera state: " + newState);
|
||||
}
|
||||
|
||||
synchronized (cameraStateLock) {
|
||||
cameraState = newState;
|
||||
cameraStateLock.notifyAll();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Internal method for opening the camera. Must be called on the camera thread.
|
||||
*/
|
||||
private void openCamera() {
|
||||
try {
|
||||
checkIsStrictlyOnCameraThread();
|
||||
|
||||
if (cameraState != CameraState.STARTING) {
|
||||
throw new IllegalStateException("Camera should be in state STARTING in openCamera.");
|
||||
}
|
||||
|
||||
if (cameraThreadHandler == null) {
|
||||
throw new RuntimeException("Someone set cameraThreadHandler to null while the camera "
|
||||
+ "state was STARTING. This should never happen");
|
||||
}
|
||||
|
||||
// Camera is in state STARTING so cameraName will not be edited.
|
||||
cameraManager.openCamera(cameraName, cameraStateCallback, cameraThreadHandler);
|
||||
} catch (CameraAccessException e) {
|
||||
reportError("Failed to open camera: " + e);
|
||||
}
|
||||
}
|
||||
|
||||
private void startCaptureOnCameraThread(
|
||||
final int requestedWidth, final int requestedHeight, final int requestedFramerate,
|
||||
final SurfaceTextureHelper surfaceTextureHelper, final Context applicationContext,
|
||||
final CapturerObserver capturerObserver) {
|
||||
checkIsStrictlyOnCameraThread();
|
||||
|
||||
firstFrameReported = false;
|
||||
consecutiveCameraOpenFailures = 0;
|
||||
|
||||
this.applicationContext = applicationContext;
|
||||
this.capturerObserver = capturerObserver;
|
||||
this.surfaceTextureHelper = surfaceTextureHelper;
|
||||
this.cameraStateCallback = new CameraStateCallback();
|
||||
|
||||
synchronized (cameraStateLock) {
|
||||
// Remember the requested format in case we want to switch cameras.
|
||||
this.requestedWidth = requestedWidth;
|
||||
this.requestedHeight = requestedHeight;
|
||||
this.requestedFramerate = requestedFramerate;
|
||||
}
|
||||
|
||||
final CameraCharacteristics cameraCharacteristics;
|
||||
try {
|
||||
// Camera is in state STARTING so cameraName will not be edited.
|
||||
cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraName);
|
||||
} catch (CameraAccessException e) {
|
||||
reportError("getCameraCharacteristics(): " + e.getMessage());
|
||||
return;
|
||||
}
|
||||
|
||||
List<CaptureFormat.FramerateRange> framerateRanges =
|
||||
Camera2Enumerator.getSupportedFramerateRanges(cameraCharacteristics);
|
||||
List<Size> sizes = Camera2Enumerator.getSupportedSizes(cameraCharacteristics);
|
||||
|
||||
if (framerateRanges.isEmpty() || sizes.isEmpty()) {
|
||||
reportError("No supported capture formats.");
|
||||
}
|
||||
|
||||
// Some LEGACY camera implementations use fps rates that are multiplied with 1000. Make sure
|
||||
// all values are multiplied with 1000 for consistency.
|
||||
this.fpsUnitFactor = (framerateRanges.get(0).max > 1000) ? 1 : 1000;
|
||||
|
||||
final CaptureFormat.FramerateRange bestFpsRange =
|
||||
CameraEnumerationAndroid.getClosestSupportedFramerateRange(
|
||||
framerateRanges, requestedFramerate);
|
||||
|
||||
final Size bestSize = CameraEnumerationAndroid.getClosestSupportedSize(
|
||||
sizes, requestedWidth, requestedHeight);
|
||||
|
||||
this.captureFormat = new CaptureFormat(bestSize.width, bestSize.height, bestFpsRange);
|
||||
Logging.d(TAG, "Using capture format: " + captureFormat);
|
||||
|
||||
Logging.d(TAG, "Opening camera " + cameraName);
|
||||
if (eventsHandler != null) {
|
||||
int cameraIndex = -1;
|
||||
try {
|
||||
cameraIndex = Integer.parseInt(cameraName);
|
||||
} catch (NumberFormatException e) {
|
||||
Logging.d(TAG, "External camera with non-int identifier: " + cameraName);
|
||||
}
|
||||
eventsHandler.onCameraOpening(cameraIndex);
|
||||
}
|
||||
|
||||
openCamera();
|
||||
}
|
||||
|
||||
/**
|
||||
* Starts capture using specified settings. This is automatically called for you by
|
||||
* VideoCapturerTrackSource if you are just using the camera as source for video track.
|
||||
*/
|
||||
@Override
|
||||
public void startCapture(
|
||||
final int requestedWidth, final int requestedHeight, final int requestedFramerate,
|
||||
final SurfaceTextureHelper surfaceTextureHelper, final Context applicationContext,
|
||||
final CapturerObserver capturerObserver) {
|
||||
Logging.d(TAG, "startCapture requested: " + requestedWidth + "x" + requestedHeight
|
||||
+ "@" + requestedFramerate);
|
||||
if (surfaceTextureHelper == null) {
|
||||
throw new IllegalArgumentException("surfaceTextureHelper not set.");
|
||||
}
|
||||
if (applicationContext == null) {
|
||||
throw new IllegalArgumentException("applicationContext not set.");
|
||||
}
|
||||
if (capturerObserver == null) {
|
||||
throw new IllegalArgumentException("capturerObserver not set.");
|
||||
}
|
||||
|
||||
synchronized (cameraStateLock) {
|
||||
waitForCameraToStopIfStopping();
|
||||
if (cameraState != CameraState.IDLE) {
|
||||
Logging.e(TAG, "Unexpected camera state for startCapture: " + cameraState);
|
||||
return;
|
||||
}
|
||||
this.cameraThreadHandler = surfaceTextureHelper.getHandler();
|
||||
setCameraState(CameraState.STARTING);
|
||||
}
|
||||
|
||||
postOnCameraThread(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
startCaptureOnCameraThread(requestedWidth, requestedHeight, requestedFramerate,
|
||||
surfaceTextureHelper, applicationContext, capturerObserver);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
final class CameraStateCallback extends CameraDevice.StateCallback {
|
||||
private String getErrorDescription(int errorCode) {
|
||||
switch (errorCode) {
|
||||
case CameraDevice.StateCallback.ERROR_CAMERA_DEVICE:
|
||||
return "Camera device has encountered a fatal error.";
|
||||
case CameraDevice.StateCallback.ERROR_CAMERA_DISABLED:
|
||||
return "Camera device could not be opened due to a device policy.";
|
||||
case CameraDevice.StateCallback.ERROR_CAMERA_IN_USE:
|
||||
return "Camera device is in use already.";
|
||||
case CameraDevice.StateCallback.ERROR_CAMERA_SERVICE:
|
||||
return "Camera service has encountered a fatal error.";
|
||||
case CameraDevice.StateCallback.ERROR_MAX_CAMERAS_IN_USE:
|
||||
return "Camera device could not be opened because"
|
||||
+ " there are too many other open camera devices.";
|
||||
default:
|
||||
return "Unknown camera error: " + errorCode;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onDisconnected(CameraDevice camera) {
|
||||
checkIsStrictlyOnCameraThread();
|
||||
cameraDevice = camera;
|
||||
reportError("Camera disconnected.");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onError(CameraDevice camera, int errorCode) {
|
||||
checkIsStrictlyOnCameraThread();
|
||||
cameraDevice = camera;
|
||||
|
||||
if (cameraState == CameraState.STARTING && (
|
||||
errorCode == CameraDevice.StateCallback.ERROR_CAMERA_IN_USE ||
|
||||
errorCode == CameraDevice.StateCallback.ERROR_MAX_CAMERAS_IN_USE)) {
|
||||
consecutiveCameraOpenFailures++;
|
||||
|
||||
if (consecutiveCameraOpenFailures < MAX_OPEN_CAMERA_ATTEMPTS) {
|
||||
Logging.w(TAG, "Opening camera failed, trying again: " + getErrorDescription(errorCode));
|
||||
|
||||
postDelayedOnCameraThread(OPEN_CAMERA_DELAY_MS, new Runnable() {
|
||||
public void run() {
|
||||
openCamera();
|
||||
}
|
||||
});
|
||||
return;
|
||||
} else {
|
||||
Logging.e(TAG, "Opening camera failed too many times. Passing the error.");
|
||||
}
|
||||
}
|
||||
|
||||
reportError(getErrorDescription(errorCode));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onOpened(CameraDevice camera) {
|
||||
checkIsStrictlyOnCameraThread();
|
||||
|
||||
Logging.d(TAG, "Camera opened.");
|
||||
if (cameraState != CameraState.STARTING) {
|
||||
throw new IllegalStateException("Unexpected state when camera opened: " + cameraState);
|
||||
}
|
||||
|
||||
cameraDevice = camera;
|
||||
final SurfaceTexture surfaceTexture = surfaceTextureHelper.getSurfaceTexture();
|
||||
surfaceTexture.setDefaultBufferSize(captureFormat.width, captureFormat.height);
|
||||
surface = new Surface(surfaceTexture);
|
||||
try {
|
||||
camera.createCaptureSession(
|
||||
Arrays.asList(surface), new CaptureSessionCallback(), cameraThreadHandler);
|
||||
} catch (CameraAccessException e) {
|
||||
reportError("Failed to create capture session. " + e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onClosed(CameraDevice camera) {
|
||||
checkIsStrictlyOnCameraThread();
|
||||
|
||||
Logging.d(TAG, "Camera device closed.");
|
||||
|
||||
if (cameraState != CameraState.STOPPING) {
|
||||
Logging.e(TAG, "Camera state was not STOPPING in onClosed. Most likely camera didn't stop "
|
||||
+ "within timelimit and this method was invoked twice.");
|
||||
return;
|
||||
}
|
||||
|
||||
cameraThreadHandler.removeCallbacksAndMessages(STOP_TIMEOUT_RUNNABLE_TOKEN);
|
||||
setCameraState(CameraState.IDLE);
|
||||
if (eventsHandler != null) {
|
||||
eventsHandler.onCameraClosed();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
final class CaptureSessionCallback extends CameraCaptureSession.StateCallback {
|
||||
@Override
|
||||
public void onConfigureFailed(CameraCaptureSession session) {
|
||||
checkIsStrictlyOnCameraThread();
|
||||
captureSession = session;
|
||||
reportError("Failed to configure capture session.");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onConfigured(CameraCaptureSession session) {
|
||||
checkIsStrictlyOnCameraThread();
|
||||
Logging.d(TAG, "Camera capture session configured.");
|
||||
captureSession = session;
|
||||
try {
|
||||
/*
|
||||
* The viable options for video capture requests are:
|
||||
* TEMPLATE_PREVIEW: High frame rate is given priority over the highest-quality
|
||||
* post-processing.
|
||||
* TEMPLATE_RECORD: Stable frame rate is used, and post-processing is set for recording
|
||||
* quality.
|
||||
*/
|
||||
final CaptureRequest.Builder captureRequestBuilder =
|
||||
cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
|
||||
// Set auto exposure fps range.
|
||||
captureRequestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, new Range<Integer>(
|
||||
captureFormat.framerate.min / fpsUnitFactor,
|
||||
captureFormat.framerate.max / fpsUnitFactor));
|
||||
captureRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE,
|
||||
CaptureRequest.CONTROL_AE_MODE_ON);
|
||||
captureRequestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false);
|
||||
|
||||
captureRequestBuilder.addTarget(surface);
|
||||
session.setRepeatingRequest(
|
||||
captureRequestBuilder.build(), new CameraCaptureCallback(), cameraThreadHandler);
|
||||
} catch (CameraAccessException e) {
|
||||
reportError("Failed to start capture request. " + e);
|
||||
return;
|
||||
}
|
||||
|
||||
Logging.d(TAG, "Camera device successfully started.");
|
||||
surfaceTextureHelper.startListening(Camera2Capturer.this);
|
||||
capturerObserver.onCapturerStarted(true /* success */);
|
||||
cameraStatistics = new CameraStatistics(surfaceTextureHelper, eventsHandler);
|
||||
setCameraState(CameraState.RUNNING);
|
||||
|
||||
if (switchEventsHandler != null) {
|
||||
switchEventsHandler.onCameraSwitchDone(isFrontCamera);
|
||||
switchEventsHandler = null;
|
||||
pendingCameraSwitchSemaphore.release();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
final class CameraCaptureCallback extends CameraCaptureSession.CaptureCallback {
|
||||
static final int MAX_CONSECUTIVE_CAMERA_CAPTURE_FAILURES = 10;
|
||||
int consecutiveCameraCaptureFailures;
|
||||
|
||||
@Override
|
||||
public void onCaptureFailed(
|
||||
CameraCaptureSession session, CaptureRequest request, CaptureFailure failure) {
|
||||
checkIsOnCameraThread();
|
||||
++consecutiveCameraCaptureFailures;
|
||||
if (consecutiveCameraCaptureFailures > MAX_CONSECUTIVE_CAMERA_CAPTURE_FAILURES) {
|
||||
reportError("Capture failed " + consecutiveCameraCaptureFailures + " consecutive times.");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onCaptureCompleted(
|
||||
CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
|
||||
// TODO(sakal): This sometimes gets called after camera has stopped, investigate
|
||||
checkIsOnCameraThread();
|
||||
consecutiveCameraCaptureFailures = 0;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// Switch camera to the next valid camera id. This can only be called while
|
||||
// the camera is running.
|
||||
@Override
|
||||
public void switchCamera(final CameraSwitchHandler switchEventsHandler) {
|
||||
final String[] cameraIds;
|
||||
try {
|
||||
cameraIds = cameraManager.getCameraIdList();
|
||||
} catch (CameraAccessException e) {
|
||||
if (switchEventsHandler != null) {
|
||||
switchEventsHandler.onCameraSwitchError("Could not get camera names: " + e);
|
||||
}
|
||||
return;
|
||||
}
|
||||
if (cameraIds.length < 2) {
|
||||
if (switchEventsHandler != null) {
|
||||
switchEventsHandler.onCameraSwitchError("No camera to switch to.");
|
||||
}
|
||||
return;
|
||||
}
|
||||
// Do not handle multiple camera switch request to avoid blocking camera thread by handling too
|
||||
// many switch request from a queue. We have to be careful to always release this.
|
||||
if (!pendingCameraSwitchSemaphore.tryAcquire()) {
|
||||
Logging.w(TAG, "Ignoring camera switch request.");
|
||||
if (switchEventsHandler != null) {
|
||||
switchEventsHandler.onCameraSwitchError("Pending camera switch already in progress.");
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
final String newCameraId;
|
||||
final SurfaceTextureHelper surfaceTextureHelper;
|
||||
final Context applicationContext;
|
||||
final CapturerObserver capturerObserver;
|
||||
final int requestedWidth;
|
||||
final int requestedHeight;
|
||||
final int requestedFramerate;
|
||||
|
||||
synchronized (cameraStateLock) {
|
||||
waitForCameraToStartIfStarting();
|
||||
|
||||
if (cameraState != CameraState.RUNNING) {
|
||||
Logging.e(TAG, "Calling swithCamera() on stopped camera.");
|
||||
if (switchEventsHandler != null) {
|
||||
switchEventsHandler.onCameraSwitchError("Camera is stopped.");
|
||||
}
|
||||
pendingCameraSwitchSemaphore.release();
|
||||
return;
|
||||
}
|
||||
|
||||
// Calculate new camera index and camera id. Camera is in state RUNNING so cameraName will
|
||||
// not be edited.
|
||||
final int currentCameraIndex = Arrays.asList(cameraIds).indexOf(cameraName);
|
||||
if (currentCameraIndex == -1) {
|
||||
Logging.e(TAG, "Couldn't find current camera id " + cameraName
|
||||
+ " in list of camera ids: " + Arrays.toString(cameraIds));
|
||||
}
|
||||
final int newCameraIndex = (currentCameraIndex + 1) % cameraIds.length;
|
||||
newCameraId = cameraIds[newCameraIndex];
|
||||
|
||||
// Remember parameters. These are not null since camera is in RUNNING state. They aren't
|
||||
// edited either while camera is in RUNNING state.
|
||||
surfaceTextureHelper = this.surfaceTextureHelper;
|
||||
applicationContext = this.applicationContext;
|
||||
capturerObserver = this.capturerObserver;
|
||||
requestedWidth = this.requestedWidth;
|
||||
requestedHeight = this.requestedHeight;
|
||||
requestedFramerate = this.requestedFramerate;
|
||||
this.switchEventsHandler = switchEventsHandler;
|
||||
}
|
||||
|
||||
// Make the switch.
|
||||
stopCapture();
|
||||
setCameraName(newCameraId);
|
||||
startCapture(requestedWidth, requestedHeight, requestedFramerate, surfaceTextureHelper,
|
||||
applicationContext, capturerObserver);
|
||||
|
||||
// Note: switchEventsHandler will be called from onConfigured / reportError.
|
||||
}
|
||||
|
||||
// Requests a new output format from the video capturer. Captured frames
|
||||
// by the camera will be scaled/or dropped by the video capturer.
|
||||
// It does not matter if width and height are flipped. I.E, |width| = 640, |height| = 480 produce
|
||||
// the same result as |width| = 480, |height| = 640.
|
||||
// TODO(magjed/perkj): Document what this function does. Change name?
|
||||
@Override
|
||||
public void onOutputFormatRequest(final int width, final int height, final int framerate) {
|
||||
postOnCameraThread(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
if (capturerObserver == null) {
|
||||
Logging.e(TAG, "Calling onOutputFormatRequest() on stopped camera.");
|
||||
return;
|
||||
}
|
||||
Logging.d(TAG,
|
||||
"onOutputFormatRequestOnCameraThread: " + width + "x" + height + "@" + framerate);
|
||||
capturerObserver.onOutputFormatRequest(width, height, framerate);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Reconfigure the camera to capture in a new format. This should only be called while the camera
|
||||
// is running.
|
||||
@Override
|
||||
public void changeCaptureFormat(final int width, final int height, final int framerate) {
|
||||
final SurfaceTextureHelper surfaceTextureHelper;
|
||||
final Context applicationContext;
|
||||
final CapturerObserver capturerObserver;
|
||||
|
||||
synchronized (cameraStateLock) {
|
||||
waitForCameraToStartIfStarting();
|
||||
|
||||
if (cameraState != CameraState.RUNNING) {
|
||||
Logging.e(TAG, "Calling changeCaptureFormat() on stopped camera.");
|
||||
return;
|
||||
}
|
||||
|
||||
requestedWidth = width;
|
||||
requestedHeight = height;
|
||||
requestedFramerate = framerate;
|
||||
|
||||
surfaceTextureHelper = this.surfaceTextureHelper;
|
||||
applicationContext = this.applicationContext;
|
||||
capturerObserver = this.capturerObserver;
|
||||
}
|
||||
|
||||
// Make the switch.
|
||||
stopCapture();
|
||||
// TODO(magjed/sakal): Just recreate session.
|
||||
startCapture(width, height, framerate,
|
||||
surfaceTextureHelper, applicationContext, capturerObserver);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<CaptureFormat> getSupportedFormats() {
|
||||
synchronized (cameraState) {
|
||||
return Camera2Enumerator.getSupportedFormats(this.cameraManager, cameraName);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void dispose() {
|
||||
synchronized (cameraStateLock) {
|
||||
waitForCameraToStopIfStopping();
|
||||
|
||||
if (cameraState != CameraState.IDLE) {
|
||||
throw new IllegalStateException("Unexpected camera state for dispose: " + cameraState);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Blocks until camera is known to be stopped.
|
||||
@Override
|
||||
public void stopCapture() {
|
||||
final CountDownLatch cameraStoppingLatch = new CountDownLatch(1);
|
||||
|
||||
Logging.d(TAG, "stopCapture");
|
||||
checkNotOnCameraThread();
|
||||
|
||||
synchronized (cameraStateLock) {
|
||||
waitForCameraToStartIfStarting();
|
||||
|
||||
if (cameraState != CameraState.RUNNING) {
|
||||
Logging.w(TAG, "stopCapture called for already stopped camera.");
|
||||
return;
|
||||
}
|
||||
|
||||
postOnCameraThread(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
Logging.d(TAG, "stopCaptureOnCameraThread");
|
||||
|
||||
// Stop capture.
|
||||
closeAndRelease();
|
||||
cameraStoppingLatch.countDown();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Wait for the stopping to start
|
||||
ThreadUtils.awaitUninterruptibly(cameraStoppingLatch);
|
||||
|
||||
Logging.d(TAG, "stopCapture done");
|
||||
}
|
||||
|
||||
private void postOnCameraThread(Runnable runnable) {
|
||||
postDelayedOnCameraThread(0 /* delayMs */, runnable);
|
||||
}
|
||||
|
||||
private void postDelayedOnCameraThread(int delayMs, Runnable runnable) {
|
||||
synchronized (cameraStateLock) {
|
||||
if ((cameraState != CameraState.STARTING && cameraState != CameraState.RUNNING)
|
||||
|| !cameraThreadHandler.postAtTime(
|
||||
runnable, this /* token */, SystemClock.uptimeMillis() + delayMs)) {
|
||||
Logging.w(TAG, "Runnable not scheduled even though it was requested.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private int getDeviceOrientation() {
|
||||
int orientation = 0;
|
||||
|
||||
WindowManager wm = (WindowManager) applicationContext.getSystemService(
|
||||
Context.WINDOW_SERVICE);
|
||||
switch(wm.getDefaultDisplay().getRotation()) {
|
||||
case Surface.ROTATION_90:
|
||||
orientation = 90;
|
||||
break;
|
||||
case Surface.ROTATION_180:
|
||||
orientation = 180;
|
||||
break;
|
||||
case Surface.ROTATION_270:
|
||||
orientation = 270;
|
||||
break;
|
||||
case Surface.ROTATION_0:
|
||||
default:
|
||||
orientation = 0;
|
||||
break;
|
||||
}
|
||||
return orientation;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onTextureFrameAvailable(
|
||||
int oesTextureId, float[] transformMatrix, long timestampNs) {
|
||||
checkIsStrictlyOnCameraThread();
|
||||
|
||||
if (eventsHandler != null && !firstFrameReported) {
|
||||
eventsHandler.onFirstFrameAvailable();
|
||||
firstFrameReported = true;
|
||||
}
|
||||
|
||||
int rotation;
|
||||
if (isFrontCamera) {
|
||||
// Undo the mirror that the OS "helps" us with.
|
||||
// http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int)
|
||||
rotation = cameraOrientation + getDeviceOrientation();
|
||||
transformMatrix =
|
||||
RendererCommon.multiplyMatrices(transformMatrix, RendererCommon.horizontalFlipMatrix());
|
||||
} else {
|
||||
rotation = cameraOrientation - getDeviceOrientation();
|
||||
}
|
||||
// Make sure |rotation| is between 0 and 360.
|
||||
rotation = (360 + rotation % 360) % 360;
|
||||
|
||||
// Undo camera orientation - we report it as rotation instead.
|
||||
transformMatrix = RendererCommon.rotateTextureMatrix(transformMatrix, -cameraOrientation);
|
||||
|
||||
cameraStatistics.addFrame();
|
||||
capturerObserver.onTextureFrameCaptured(captureFormat.width, captureFormat.height, oesTextureId,
|
||||
transformMatrix, rotation, timestampNs);
|
||||
}
|
||||
}
|
||||
@ -1,208 +0,0 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
|
||||
|
||||
import android.annotation.TargetApi;
|
||||
import android.content.Context;
|
||||
import android.graphics.ImageFormat;
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.hardware.camera2.CameraAccessException;
|
||||
import android.hardware.camera2.CameraCharacteristics;
|
||||
import android.hardware.camera2.CameraManager;
|
||||
import android.hardware.camera2.CameraMetadata;
|
||||
import android.hardware.camera2.params.StreamConfigurationMap;
|
||||
import android.os.Build;
|
||||
import android.os.SystemClock;
|
||||
import android.util.Range;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
@TargetApi(21)
|
||||
public class Camera2Enumerator implements CameraEnumerator {
|
||||
private final static String TAG = "Camera2Enumerator";
|
||||
private final static double NANO_SECONDS_PER_SECOND = 1.0e9;
|
||||
|
||||
// Each entry contains the supported formats for a given camera index. The formats are enumerated
|
||||
// lazily in getSupportedFormats(), and cached for future reference.
|
||||
private static final Map<String, List<CaptureFormat>> cachedSupportedFormats =
|
||||
new HashMap<String, List<CaptureFormat>>();
|
||||
|
||||
final Context context;
|
||||
final CameraManager cameraManager;
|
||||
|
||||
public Camera2Enumerator(Context context) {
|
||||
this.context = context;
|
||||
this.cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] getDeviceNames() {
|
||||
try {
|
||||
return cameraManager.getCameraIdList();
|
||||
} catch (CameraAccessException e) {
|
||||
Logging.e(TAG, "Camera access exception: " + e);
|
||||
return new String[] {};
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isFrontFacing(String deviceName) {
|
||||
CameraCharacteristics characteristics
|
||||
= getCameraCharacteristics(deviceName);
|
||||
|
||||
return characteristics != null
|
||||
&& characteristics.get(CameraCharacteristics.LENS_FACING)
|
||||
== CameraMetadata.LENS_FACING_FRONT;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isBackFacing(String deviceName) {
|
||||
CameraCharacteristics characteristics
|
||||
= getCameraCharacteristics(deviceName);
|
||||
|
||||
return characteristics != null
|
||||
&& characteristics.get(CameraCharacteristics.LENS_FACING)
|
||||
== CameraMetadata.LENS_FACING_BACK;
|
||||
}
|
||||
|
||||
@Override
|
||||
public CameraVideoCapturer createCapturer(String deviceName,
|
||||
CameraVideoCapturer.CameraEventsHandler eventsHandler) {
|
||||
return new Camera2Capturer(context, deviceName, eventsHandler);
|
||||
}
|
||||
|
||||
private CameraCharacteristics getCameraCharacteristics(String deviceName) {
|
||||
try {
|
||||
return cameraManager.getCameraCharacteristics(deviceName);
|
||||
} catch (CameraAccessException e) {
|
||||
Logging.e(TAG, "Camera access exception: " + e);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
public static boolean isSupported() {
|
||||
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP;
|
||||
}
|
||||
|
||||
static List<CaptureFormat.FramerateRange> getSupportedFramerateRanges(
|
||||
CameraCharacteristics cameraCharacteristics) {
|
||||
final Range<Integer>[] fpsRanges =
|
||||
cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
|
||||
|
||||
if (fpsRanges == null) {
|
||||
return new ArrayList<CaptureFormat.FramerateRange>();
|
||||
}
|
||||
|
||||
int maxFps = 0;
|
||||
for (Range<Integer> fpsRange : fpsRanges) {
|
||||
maxFps = Math.max(maxFps, fpsRange.getUpper());
|
||||
}
|
||||
int unitFactor = maxFps < 1000 ? 1000 : 1;
|
||||
return convertFramerates(fpsRanges, unitFactor);
|
||||
}
|
||||
|
||||
static List<Size> getSupportedSizes(
|
||||
CameraCharacteristics cameraCharacteristics) {
|
||||
final StreamConfigurationMap streamMap =
|
||||
cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
|
||||
final android.util.Size[] sizes = streamMap.getOutputSizes(SurfaceTexture.class);
|
||||
if (sizes == null) {
|
||||
Logging.e(TAG, "No supported camera output sizes.");
|
||||
return new ArrayList<Size>();
|
||||
}
|
||||
return convertSizes(sizes);
|
||||
}
|
||||
|
||||
static List<CaptureFormat> getSupportedFormats(Context context, String cameraId) {
|
||||
return getSupportedFormats(
|
||||
(CameraManager) context.getSystemService(Context.CAMERA_SERVICE), cameraId);
|
||||
}
|
||||
|
||||
static List<CaptureFormat> getSupportedFormats(
|
||||
CameraManager cameraManager, String cameraId) {
|
||||
synchronized (cachedSupportedFormats) {
|
||||
if (cachedSupportedFormats.containsKey(cameraId)) {
|
||||
return cachedSupportedFormats.get(cameraId);
|
||||
}
|
||||
|
||||
Logging.d(TAG, "Get supported formats for camera index " + cameraId + ".");
|
||||
final long startTimeMs = SystemClock.elapsedRealtime();
|
||||
|
||||
final CameraCharacteristics cameraCharacteristics;
|
||||
try {
|
||||
cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId);
|
||||
} catch (Exception ex) {
|
||||
Logging.e(TAG, "getCameraCharacteristics(): " + ex);
|
||||
return new ArrayList<CaptureFormat>();
|
||||
}
|
||||
|
||||
final StreamConfigurationMap streamMap =
|
||||
cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
|
||||
|
||||
List<CaptureFormat.FramerateRange> framerateRanges = getSupportedFramerateRanges(
|
||||
cameraCharacteristics);
|
||||
List<Size> sizes = getSupportedSizes(cameraCharacteristics);
|
||||
|
||||
int defaultMaxFps = 0;
|
||||
for (CaptureFormat.FramerateRange framerateRange : framerateRanges) {
|
||||
defaultMaxFps = Math.max(defaultMaxFps, framerateRange.max);
|
||||
}
|
||||
|
||||
final List<CaptureFormat> formatList = new ArrayList<CaptureFormat>();
|
||||
for (Size size : sizes) {
|
||||
long minFrameDurationNs = 0;
|
||||
try {
|
||||
minFrameDurationNs = streamMap.getOutputMinFrameDuration(SurfaceTexture.class,
|
||||
new android.util.Size(size.width, size.height));
|
||||
} catch (Exception e) {
|
||||
// getOutputMinFrameDuration() is not supported on all devices. Ignore silently.
|
||||
}
|
||||
final int maxFps = (minFrameDurationNs == 0)
|
||||
? defaultMaxFps
|
||||
: (int) Math.round(NANO_SECONDS_PER_SECOND / minFrameDurationNs) * 1000;
|
||||
formatList.add(new CaptureFormat(size.width, size.height, 0, maxFps));
|
||||
Logging.d(TAG, "Format: " + size.width + "x" + size.height + "@" + maxFps);
|
||||
}
|
||||
|
||||
cachedSupportedFormats.put(cameraId, formatList);
|
||||
final long endTimeMs = SystemClock.elapsedRealtime();
|
||||
Logging.d(TAG, "Get supported formats for camera index " + cameraId + " done."
|
||||
+ " Time spent: " + (endTimeMs - startTimeMs) + " ms.");
|
||||
return formatList;
|
||||
}
|
||||
}
|
||||
|
||||
// Convert from android.util.Size to Size.
|
||||
private static List<Size> convertSizes(android.util.Size[] cameraSizes) {
|
||||
final List<Size> sizes = new ArrayList<Size>();
|
||||
for (android.util.Size size : cameraSizes) {
|
||||
sizes.add(new Size(size.getWidth(), size.getHeight()));
|
||||
}
|
||||
return sizes;
|
||||
}
|
||||
|
||||
// Convert from android.util.Range<Integer> to CaptureFormat.FramerateRange.
|
||||
private static List<CaptureFormat.FramerateRange> convertFramerates(
|
||||
Range<Integer>[] arrayRanges, int unitFactor) {
|
||||
final List<CaptureFormat.FramerateRange> ranges = new ArrayList<CaptureFormat.FramerateRange>();
|
||||
for (Range<Integer> range : arrayRanges) {
|
||||
ranges.add(new CaptureFormat.FramerateRange(
|
||||
range.getLower() * unitFactor,
|
||||
range.getUpper() * unitFactor));
|
||||
}
|
||||
return ranges;
|
||||
}
|
||||
}
|
||||
@ -1,237 +0,0 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import static java.lang.Math.abs;
|
||||
|
||||
import android.graphics.ImageFormat;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
public class CameraEnumerationAndroid {
|
||||
private final static String TAG = "CameraEnumerationAndroid";
|
||||
|
||||
public static class CaptureFormat {
|
||||
// Class to represent a framerate range. The framerate varies because of lightning conditions.
|
||||
// The values are multiplied by 1000, so 1000 represents one frame per second.
|
||||
public static class FramerateRange {
|
||||
public int min;
|
||||
public int max;
|
||||
|
||||
public FramerateRange(int min, int max) {
|
||||
this.min = min;
|
||||
this.max = max;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "[" + (min / 1000.0f) + ":" + (max / 1000.0f) + "]";
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (!(other instanceof FramerateRange)) {
|
||||
return false;
|
||||
}
|
||||
final FramerateRange otherFramerate = (FramerateRange) other;
|
||||
return min == otherFramerate.min && max == otherFramerate.max;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
// Use prime close to 2^16 to avoid collisions for normal values less than 2^16.
|
||||
return 1 + 65537 * min + max;
|
||||
}
|
||||
}
|
||||
|
||||
public final int width;
|
||||
public final int height;
|
||||
public final FramerateRange framerate;
|
||||
|
||||
// TODO(hbos): If VideoCapturer.startCapture is updated to support other image formats then this
|
||||
// needs to be updated and VideoCapturer.getSupportedFormats need to return CaptureFormats of
|
||||
// all imageFormats.
|
||||
public final int imageFormat = ImageFormat.NV21;
|
||||
|
||||
public CaptureFormat(int width, int height, int minFramerate, int maxFramerate) {
|
||||
this.width = width;
|
||||
this.height = height;
|
||||
this.framerate = new FramerateRange(minFramerate, maxFramerate);
|
||||
}
|
||||
|
||||
public CaptureFormat(int width, int height, FramerateRange framerate) {
|
||||
this.width = width;
|
||||
this.height = height;
|
||||
this.framerate = framerate;
|
||||
}
|
||||
|
||||
// Calculates the frame size of this capture format.
|
||||
public int frameSize() {
|
||||
return frameSize(width, height, imageFormat);
|
||||
}
|
||||
|
||||
// Calculates the frame size of the specified image format. Currently only
|
||||
// supporting ImageFormat.NV21.
|
||||
// The size is width * height * number of bytes per pixel.
|
||||
// http://developer.android.com/reference/android/hardware/Camera.html#addCallbackBuffer(byte[])
|
||||
public static int frameSize(int width, int height, int imageFormat) {
|
||||
if (imageFormat != ImageFormat.NV21) {
|
||||
throw new UnsupportedOperationException("Don't know how to calculate "
|
||||
+ "the frame size of non-NV21 image formats.");
|
||||
}
|
||||
return (width * height * ImageFormat.getBitsPerPixel(imageFormat)) / 8;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return width + "x" + height + "@" + framerate;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (!(other instanceof CaptureFormat)) {
|
||||
return false;
|
||||
}
|
||||
final CaptureFormat otherFormat = (CaptureFormat) other;
|
||||
return width == otherFormat.width && height == otherFormat.height
|
||||
&& framerate.equals(otherFormat.framerate);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return 1 + (width * 65497 + height) * 251 + framerate.hashCode();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
* Please use Camera1Enumerator.getDeviceNames() instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public static String[] getDeviceNames() {
|
||||
return new Camera1Enumerator().getDeviceNames();
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
* Please use Camera1Enumerator.getDeviceNames().length instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public static int getDeviceCount() {
|
||||
return new Camera1Enumerator().getDeviceNames().length;
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
* Please use Camera1Enumerator.getDeviceNames().get(index) instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public static String getDeviceName(int index) {
|
||||
return new Camera1Enumerator().getDeviceName(index);
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
* Please use Camera1Enumerator.isFrontFacing(String deviceName) instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public static String getNameOfFrontFacingDevice() {
|
||||
return getNameOfDevice(android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT);
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
* Please use Camera1Enumerator.isBackFacing(String deviceName) instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public static String getNameOfBackFacingDevice() {
|
||||
return getNameOfDevice(android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK);
|
||||
}
|
||||
|
||||
// Helper class for finding the closest supported format for the two functions below. It creates a
|
||||
// comparator based on the difference to some requested parameters, where the element with the
|
||||
// minimum difference is the element that is closest to the requested parameters.
|
||||
private static abstract class ClosestComparator<T> implements Comparator<T> {
|
||||
// Difference between supported and requested parameter.
|
||||
abstract int diff(T supportedParameter);
|
||||
|
||||
@Override
|
||||
public int compare(T t1, T t2) {
|
||||
return diff(t1) - diff(t2);
|
||||
}
|
||||
}
|
||||
|
||||
// Prefer a fps range with an upper bound close to |framerate|. Also prefer a fps range with a low
|
||||
// lower bound, to allow the framerate to fluctuate based on lightning conditions.
|
||||
public static CaptureFormat.FramerateRange getClosestSupportedFramerateRange(
|
||||
List<CaptureFormat.FramerateRange> supportedFramerates, final int requestedFps) {
|
||||
return Collections.min(supportedFramerates,
|
||||
new ClosestComparator<CaptureFormat.FramerateRange>() {
|
||||
// Progressive penalty if the upper bound is further away than |MAX_FPS_DIFF_THRESHOLD|
|
||||
// from requested.
|
||||
private static final int MAX_FPS_DIFF_THRESHOLD = 5000;
|
||||
private static final int MAX_FPS_LOW_DIFF_WEIGHT = 1;
|
||||
private static final int MAX_FPS_HIGH_DIFF_WEIGHT = 3;
|
||||
|
||||
// Progressive penalty if the lower bound is bigger than |MIN_FPS_THRESHOLD|.
|
||||
private static final int MIN_FPS_THRESHOLD = 8000;
|
||||
private static final int MIN_FPS_LOW_VALUE_WEIGHT = 1;
|
||||
private static final int MIN_FPS_HIGH_VALUE_WEIGHT = 4;
|
||||
|
||||
// Use one weight for small |value| less than |threshold|, and another weight above.
|
||||
private int progressivePenalty(int value, int threshold, int lowWeight, int highWeight) {
|
||||
return (value < threshold)
|
||||
? value * lowWeight
|
||||
: threshold * lowWeight + (value - threshold) * highWeight;
|
||||
}
|
||||
|
||||
@Override
|
||||
int diff(CaptureFormat.FramerateRange range) {
|
||||
final int minFpsError = progressivePenalty(range.min,
|
||||
MIN_FPS_THRESHOLD, MIN_FPS_LOW_VALUE_WEIGHT, MIN_FPS_HIGH_VALUE_WEIGHT);
|
||||
final int maxFpsError = progressivePenalty(Math.abs(requestedFps * 1000 - range.max),
|
||||
MAX_FPS_DIFF_THRESHOLD, MAX_FPS_LOW_DIFF_WEIGHT, MAX_FPS_HIGH_DIFF_WEIGHT);
|
||||
return minFpsError + maxFpsError;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public static Size getClosestSupportedSize(
|
||||
List<Size> supportedSizes, final int requestedWidth,
|
||||
final int requestedHeight) {
|
||||
return Collections.min(supportedSizes,
|
||||
new ClosestComparator<Size>() {
|
||||
@Override
|
||||
int diff(Size size) {
|
||||
return abs(requestedWidth - size.width) + abs(requestedHeight - size.height);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private static String getNameOfDevice(int facing) {
|
||||
final android.hardware.Camera.CameraInfo info = new android.hardware.Camera.CameraInfo();
|
||||
for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
|
||||
try {
|
||||
android.hardware.Camera.getCameraInfo(i, info);
|
||||
if (info.facing == facing) {
|
||||
return getDeviceName(i);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
Logging.e(TAG, "getCameraInfo() failed on index " + i, e);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
@ -1,20 +0,0 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
public interface CameraEnumerator {
|
||||
public String[] getDeviceNames();
|
||||
public boolean isFrontFacing(String deviceName);
|
||||
public boolean isBackFacing(String deviceName);
|
||||
|
||||
public CameraVideoCapturer createCapturer(String deviceName,
|
||||
CameraVideoCapturer.CameraEventsHandler eventsHandler);
|
||||
}
|
||||
@ -1,128 +0,0 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
/**
|
||||
* Base interface for camera1 and camera2 implementations. Extends VideoCapturer with a
|
||||
* switchCamera() function. Also provides subinterfaces for handling camera events, and a helper
|
||||
* class for detecting camera freezes.
|
||||
*/
|
||||
public interface CameraVideoCapturer extends VideoCapturer {
|
||||
/**
|
||||
* Camera events handler - can be used to be notifed about camera events. The callbacks are
|
||||
* executed from an arbitrary thread.
|
||||
*/
|
||||
public interface CameraEventsHandler {
|
||||
// Camera error handler - invoked when camera can not be opened
|
||||
// or any camera exception happens on camera thread.
|
||||
void onCameraError(String errorDescription);
|
||||
|
||||
// Invoked when camera stops receiving frames.
|
||||
void onCameraFreezed(String errorDescription);
|
||||
|
||||
// Callback invoked when camera is opening.
|
||||
void onCameraOpening(int cameraId);
|
||||
|
||||
// Callback invoked when first camera frame is available after camera is started.
|
||||
void onFirstFrameAvailable();
|
||||
|
||||
// Callback invoked when camera is closed.
|
||||
void onCameraClosed();
|
||||
}
|
||||
|
||||
/**
|
||||
* Camera switch handler - one of these functions are invoked with the result of switchCamera().
|
||||
* The callback may be called on an arbitrary thread.
|
||||
*/
|
||||
public interface CameraSwitchHandler {
|
||||
// Invoked on success. |isFrontCamera| is true if the new camera is front facing.
|
||||
void onCameraSwitchDone(boolean isFrontCamera);
|
||||
|
||||
// Invoked on failure, e.g. camera is stopped or only one camera available.
|
||||
void onCameraSwitchError(String errorDescription);
|
||||
}
|
||||
|
||||
/**
|
||||
* Switch camera to the next valid camera id. This can only be called while the camera is running.
|
||||
* This function can be called from any thread.
|
||||
*/
|
||||
void switchCamera(CameraSwitchHandler switchEventsHandler);
|
||||
|
||||
/**
|
||||
* Helper class to log framerate and detect if the camera freezes. It will run periodic callbacks
|
||||
* on the SurfaceTextureHelper thread passed in the ctor, and should only be operated from that
|
||||
* thread.
|
||||
*/
|
||||
public static class CameraStatistics {
|
||||
private final static String TAG = "CameraStatistics";
|
||||
private final static int CAMERA_OBSERVER_PERIOD_MS = 2000;
|
||||
private final static int CAMERA_FREEZE_REPORT_TIMOUT_MS = 4000;
|
||||
|
||||
private final SurfaceTextureHelper surfaceTextureHelper;
|
||||
private final CameraEventsHandler eventsHandler;
|
||||
private int frameCount;
|
||||
private int freezePeriodCount;
|
||||
// Camera observer - monitors camera framerate. Observer is executed on camera thread.
|
||||
private final Runnable cameraObserver = new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
final int cameraFps = Math.round(frameCount * 1000.0f / CAMERA_OBSERVER_PERIOD_MS);
|
||||
Logging.d(TAG, "Camera fps: " + cameraFps +".");
|
||||
if (frameCount == 0) {
|
||||
++freezePeriodCount;
|
||||
if (CAMERA_OBSERVER_PERIOD_MS * freezePeriodCount >= CAMERA_FREEZE_REPORT_TIMOUT_MS
|
||||
&& eventsHandler != null) {
|
||||
Logging.e(TAG, "Camera freezed.");
|
||||
if (surfaceTextureHelper.isTextureInUse()) {
|
||||
// This can only happen if we are capturing to textures.
|
||||
eventsHandler.onCameraFreezed("Camera failure. Client must return video buffers.");
|
||||
} else {
|
||||
eventsHandler.onCameraFreezed("Camera failure.");
|
||||
}
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
freezePeriodCount = 0;
|
||||
}
|
||||
frameCount = 0;
|
||||
surfaceTextureHelper.getHandler().postDelayed(this, CAMERA_OBSERVER_PERIOD_MS);
|
||||
}
|
||||
};
|
||||
|
||||
public CameraStatistics(
|
||||
SurfaceTextureHelper surfaceTextureHelper, CameraEventsHandler eventsHandler) {
|
||||
if (surfaceTextureHelper == null) {
|
||||
throw new IllegalArgumentException("SurfaceTextureHelper is null");
|
||||
}
|
||||
this.surfaceTextureHelper = surfaceTextureHelper;
|
||||
this.eventsHandler = eventsHandler;
|
||||
this.frameCount = 0;
|
||||
this.freezePeriodCount = 0;
|
||||
surfaceTextureHelper.getHandler().postDelayed(cameraObserver, CAMERA_OBSERVER_PERIOD_MS);
|
||||
}
|
||||
|
||||
private void checkThread() {
|
||||
if (Thread.currentThread() != surfaceTextureHelper.getHandler().getLooper().getThread()) {
|
||||
throw new IllegalStateException("Wrong thread");
|
||||
}
|
||||
}
|
||||
|
||||
public void addFrame() {
|
||||
checkThread();
|
||||
++frameCount;
|
||||
}
|
||||
|
||||
public void release() {
|
||||
checkThread();
|
||||
surfaceTextureHelper.getHandler().removeCallbacks(cameraObserver);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,128 +0,0 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.view.Surface;
|
||||
|
||||
import javax.microedition.khronos.egl.EGL10;
|
||||
|
||||
|
||||
/**
|
||||
* Holds EGL state and utility methods for handling an egl 1.0 EGLContext, an EGLDisplay,
|
||||
* and an EGLSurface.
|
||||
*/
|
||||
public abstract class EglBase {
|
||||
// EGL wrapper for an actual EGLContext.
|
||||
public static class Context {
|
||||
}
|
||||
|
||||
// According to the documentation, EGL can be used from multiple threads at the same time if each
|
||||
// thread has its own EGLContext, but in practice it deadlocks on some devices when doing this.
|
||||
// Therefore, synchronize on this global lock before calling dangerous EGL functions that might
|
||||
// deadlock. See https://bugs.chromium.org/p/webrtc/issues/detail?id=5702 for more info.
|
||||
public static final Object lock = new Object();
|
||||
|
||||
// These constants are taken from EGL14.EGL_OPENGL_ES2_BIT and EGL14.EGL_CONTEXT_CLIENT_VERSION.
|
||||
// https://android.googlesource.com/platform/frameworks/base/+/master/opengl/java/android/opengl/EGL14.java
|
||||
// This is similar to how GlSurfaceView does:
|
||||
// http://grepcode.com/file/repository.grepcode.com/java/ext/com.google.android/android/5.1.1_r1/android/opengl/GLSurfaceView.java#760
|
||||
private static final int EGL_OPENGL_ES2_BIT = 4;
|
||||
// Android-specific extension.
|
||||
private static final int EGL_RECORDABLE_ANDROID = 0x3142;
|
||||
|
||||
public static final int[] CONFIG_PLAIN = {
|
||||
EGL10.EGL_RED_SIZE, 8,
|
||||
EGL10.EGL_GREEN_SIZE, 8,
|
||||
EGL10.EGL_BLUE_SIZE, 8,
|
||||
EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
|
||||
EGL10.EGL_NONE
|
||||
};
|
||||
public static final int[] CONFIG_RGBA = {
|
||||
EGL10.EGL_RED_SIZE, 8,
|
||||
EGL10.EGL_GREEN_SIZE, 8,
|
||||
EGL10.EGL_BLUE_SIZE, 8,
|
||||
EGL10.EGL_ALPHA_SIZE, 8,
|
||||
EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
|
||||
EGL10.EGL_NONE
|
||||
};
|
||||
public static final int[] CONFIG_PIXEL_BUFFER = {
|
||||
EGL10.EGL_RED_SIZE, 8,
|
||||
EGL10.EGL_GREEN_SIZE, 8,
|
||||
EGL10.EGL_BLUE_SIZE, 8,
|
||||
EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
|
||||
EGL10.EGL_SURFACE_TYPE, EGL10.EGL_PBUFFER_BIT,
|
||||
EGL10.EGL_NONE
|
||||
};
|
||||
public static final int[] CONFIG_PIXEL_RGBA_BUFFER = {
|
||||
EGL10.EGL_RED_SIZE, 8,
|
||||
EGL10.EGL_GREEN_SIZE, 8,
|
||||
EGL10.EGL_BLUE_SIZE, 8,
|
||||
EGL10.EGL_ALPHA_SIZE, 8,
|
||||
EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
|
||||
EGL10.EGL_SURFACE_TYPE, EGL10.EGL_PBUFFER_BIT,
|
||||
EGL10.EGL_NONE
|
||||
};
|
||||
public static final int[] CONFIG_RECORDABLE = {
|
||||
EGL10.EGL_RED_SIZE, 8,
|
||||
EGL10.EGL_GREEN_SIZE, 8,
|
||||
EGL10.EGL_BLUE_SIZE, 8,
|
||||
EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
|
||||
EGL_RECORDABLE_ANDROID, 1,
|
||||
EGL10.EGL_NONE
|
||||
};
|
||||
|
||||
// Create a new context with the specified config attributes, sharing data with sharedContext.
|
||||
// |sharedContext| can be null.
|
||||
public static EglBase create(Context sharedContext, int[] configAttributes) {
|
||||
return (EglBase14.isEGL14Supported()
|
||||
&& (sharedContext == null || sharedContext instanceof EglBase14.Context))
|
||||
? new EglBase14((EglBase14.Context) sharedContext, configAttributes)
|
||||
: new EglBase10((EglBase10.Context) sharedContext, configAttributes);
|
||||
}
|
||||
|
||||
public static EglBase create() {
|
||||
return create(null, CONFIG_PLAIN);
|
||||
}
|
||||
|
||||
public static EglBase create(Context sharedContext) {
|
||||
return create(sharedContext, CONFIG_PLAIN);
|
||||
}
|
||||
|
||||
public abstract void createSurface(Surface surface);
|
||||
|
||||
// Create EGLSurface from the Android SurfaceTexture.
|
||||
public abstract void createSurface(SurfaceTexture surfaceTexture);
|
||||
|
||||
// Create dummy 1x1 pixel buffer surface so the context can be made current.
|
||||
public abstract void createDummyPbufferSurface();
|
||||
|
||||
public abstract void createPbufferSurface(int width, int height);
|
||||
|
||||
public abstract Context getEglBaseContext();
|
||||
|
||||
public abstract boolean hasSurface();
|
||||
|
||||
public abstract int surfaceWidth();
|
||||
|
||||
public abstract int surfaceHeight();
|
||||
|
||||
public abstract void releaseSurface();
|
||||
|
||||
public abstract void release();
|
||||
|
||||
public abstract void makeCurrent();
|
||||
|
||||
// Detach the current EGL context, so that it can be made current on another thread.
|
||||
public abstract void detachCurrent();
|
||||
|
||||
public abstract void swapBuffers();
|
||||
}
|
||||
@ -1,301 +0,0 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.graphics.Canvas;
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.graphics.Rect;
|
||||
import android.view.Surface;
|
||||
import android.view.SurfaceHolder;
|
||||
|
||||
import javax.microedition.khronos.egl.EGL10;
|
||||
import javax.microedition.khronos.egl.EGLConfig;
|
||||
import javax.microedition.khronos.egl.EGLContext;
|
||||
import javax.microedition.khronos.egl.EGLDisplay;
|
||||
import javax.microedition.khronos.egl.EGLSurface;
|
||||
|
||||
/**
|
||||
* Holds EGL state and utility methods for handling an egl 1.0 EGLContext, an EGLDisplay,
|
||||
* and an EGLSurface.
|
||||
*/
|
||||
final class EglBase10 extends EglBase {
|
||||
// This constant is taken from EGL14.EGL_CONTEXT_CLIENT_VERSION.
|
||||
private static final int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
|
||||
|
||||
private final EGL10 egl;
|
||||
private EGLContext eglContext;
|
||||
private EGLConfig eglConfig;
|
||||
private EGLDisplay eglDisplay;
|
||||
private EGLSurface eglSurface = EGL10.EGL_NO_SURFACE;
|
||||
|
||||
// EGL wrapper for an actual EGLContext.
|
||||
public static class Context extends EglBase.Context {
|
||||
private final EGLContext eglContext;
|
||||
|
||||
public Context(EGLContext eglContext) {
|
||||
this.eglContext = eglContext;
|
||||
}
|
||||
}
|
||||
|
||||
// Create a new context with the specified config type, sharing data with sharedContext.
|
||||
EglBase10(Context sharedContext, int[] configAttributes) {
|
||||
this.egl = (EGL10) EGLContext.getEGL();
|
||||
eglDisplay = getEglDisplay();
|
||||
eglConfig = getEglConfig(eglDisplay, configAttributes);
|
||||
eglContext = createEglContext(sharedContext, eglDisplay, eglConfig);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void createSurface(Surface surface) {
|
||||
/**
|
||||
* We have to wrap Surface in a SurfaceHolder because for some reason eglCreateWindowSurface
|
||||
* couldn't actually take a Surface object until API 17. Older versions fortunately just call
|
||||
* SurfaceHolder.getSurface(), so we'll do that. No other methods are relevant.
|
||||
*/
|
||||
class FakeSurfaceHolder implements SurfaceHolder {
|
||||
private final Surface surface;
|
||||
|
||||
FakeSurfaceHolder(Surface surface) {
|
||||
this.surface = surface;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addCallback(Callback callback) {}
|
||||
|
||||
@Override
|
||||
public void removeCallback(Callback callback) {}
|
||||
|
||||
@Override
|
||||
public boolean isCreating() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Deprecated
|
||||
@Override
|
||||
public void setType(int i) {}
|
||||
|
||||
@Override
|
||||
public void setFixedSize(int i, int i2) {}
|
||||
|
||||
@Override
|
||||
public void setSizeFromLayout() {}
|
||||
|
||||
@Override
|
||||
public void setFormat(int i) {}
|
||||
|
||||
@Override
|
||||
public void setKeepScreenOn(boolean b) {}
|
||||
|
||||
@Override
|
||||
public Canvas lockCanvas() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Canvas lockCanvas(Rect rect) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void unlockCanvasAndPost(Canvas canvas) {}
|
||||
|
||||
@Override
|
||||
public Rect getSurfaceFrame() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Surface getSurface() {
|
||||
return surface;
|
||||
}
|
||||
}
|
||||
|
||||
createSurfaceInternal(new FakeSurfaceHolder(surface));
|
||||
}
|
||||
|
||||
// Create EGLSurface from the Android SurfaceTexture.
|
||||
@Override
|
||||
public void createSurface(SurfaceTexture surfaceTexture) {
|
||||
createSurfaceInternal(surfaceTexture);
|
||||
}
|
||||
|
||||
// Create EGLSurface from either a SurfaceHolder or a SurfaceTexture.
|
||||
private void createSurfaceInternal(Object nativeWindow) {
|
||||
if (!(nativeWindow instanceof SurfaceHolder) && !(nativeWindow instanceof SurfaceTexture)) {
|
||||
throw new IllegalStateException("Input must be either a SurfaceHolder or SurfaceTexture");
|
||||
}
|
||||
checkIsNotReleased();
|
||||
if (eglSurface != EGL10.EGL_NO_SURFACE) {
|
||||
throw new RuntimeException("Already has an EGLSurface");
|
||||
}
|
||||
int[] surfaceAttribs = {EGL10.EGL_NONE};
|
||||
eglSurface = egl.eglCreateWindowSurface(eglDisplay, eglConfig, nativeWindow, surfaceAttribs);
|
||||
if (eglSurface == EGL10.EGL_NO_SURFACE) {
|
||||
throw new RuntimeException("Failed to create window surface");
|
||||
}
|
||||
}
|
||||
|
||||
// Create dummy 1x1 pixel buffer surface so the context can be made current.
|
||||
@Override
|
||||
public void createDummyPbufferSurface() {
|
||||
createPbufferSurface(1, 1);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void createPbufferSurface(int width, int height) {
|
||||
checkIsNotReleased();
|
||||
if (eglSurface != EGL10.EGL_NO_SURFACE) {
|
||||
throw new RuntimeException("Already has an EGLSurface");
|
||||
}
|
||||
int[] surfaceAttribs = {EGL10.EGL_WIDTH, width, EGL10.EGL_HEIGHT, height, EGL10.EGL_NONE};
|
||||
eglSurface = egl.eglCreatePbufferSurface(eglDisplay, eglConfig, surfaceAttribs);
|
||||
if (eglSurface == EGL10.EGL_NO_SURFACE) {
|
||||
throw new RuntimeException(
|
||||
"Failed to create pixel buffer surface with size: " + width + "x" + height);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public org.webrtc.EglBase.Context getEglBaseContext() {
|
||||
return new EglBase10.Context(eglContext);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasSurface() {
|
||||
return eglSurface != EGL10.EGL_NO_SURFACE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int surfaceWidth() {
|
||||
final int widthArray[] = new int[1];
|
||||
egl.eglQuerySurface(eglDisplay, eglSurface, EGL10.EGL_WIDTH, widthArray);
|
||||
return widthArray[0];
|
||||
}
|
||||
|
||||
@Override
|
||||
public int surfaceHeight() {
|
||||
final int heightArray[] = new int[1];
|
||||
egl.eglQuerySurface(eglDisplay, eglSurface, EGL10.EGL_HEIGHT, heightArray);
|
||||
return heightArray[0];
|
||||
}
|
||||
|
||||
@Override
|
||||
public void releaseSurface() {
|
||||
if (eglSurface != EGL10.EGL_NO_SURFACE) {
|
||||
egl.eglDestroySurface(eglDisplay, eglSurface);
|
||||
eglSurface = EGL10.EGL_NO_SURFACE;
|
||||
}
|
||||
}
|
||||
|
||||
private void checkIsNotReleased() {
|
||||
if (eglDisplay == EGL10.EGL_NO_DISPLAY || eglContext == EGL10.EGL_NO_CONTEXT
|
||||
|| eglConfig == null) {
|
||||
throw new RuntimeException("This object has been released");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void release() {
|
||||
checkIsNotReleased();
|
||||
releaseSurface();
|
||||
detachCurrent();
|
||||
egl.eglDestroyContext(eglDisplay, eglContext);
|
||||
egl.eglTerminate(eglDisplay);
|
||||
eglContext = EGL10.EGL_NO_CONTEXT;
|
||||
eglDisplay = EGL10.EGL_NO_DISPLAY;
|
||||
eglConfig = null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void makeCurrent() {
|
||||
checkIsNotReleased();
|
||||
if (eglSurface == EGL10.EGL_NO_SURFACE) {
|
||||
throw new RuntimeException("No EGLSurface - can't make current");
|
||||
}
|
||||
synchronized (EglBase.lock) {
|
||||
if (!egl.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
|
||||
throw new RuntimeException("eglMakeCurrent failed");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Detach the current EGL context, so that it can be made current on another thread.
|
||||
@Override
|
||||
public void detachCurrent() {
|
||||
synchronized (EglBase.lock) {
|
||||
if (!egl.eglMakeCurrent(
|
||||
eglDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT)) {
|
||||
throw new RuntimeException("eglDetachCurrent failed");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void swapBuffers() {
|
||||
checkIsNotReleased();
|
||||
if (eglSurface == EGL10.EGL_NO_SURFACE) {
|
||||
throw new RuntimeException("No EGLSurface - can't swap buffers");
|
||||
}
|
||||
synchronized (EglBase.lock) {
|
||||
egl.eglSwapBuffers(eglDisplay, eglSurface);
|
||||
}
|
||||
}
|
||||
|
||||
// Return an EGLDisplay, or die trying.
|
||||
private EGLDisplay getEglDisplay() {
|
||||
EGLDisplay eglDisplay = egl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
|
||||
if (eglDisplay == EGL10.EGL_NO_DISPLAY) {
|
||||
throw new RuntimeException("Unable to get EGL10 display");
|
||||
}
|
||||
int[] version = new int[2];
|
||||
if (!egl.eglInitialize(eglDisplay, version)) {
|
||||
throw new RuntimeException("Unable to initialize EGL10");
|
||||
}
|
||||
return eglDisplay;
|
||||
}
|
||||
|
||||
// Return an EGLConfig, or die trying.
|
||||
private EGLConfig getEglConfig(EGLDisplay eglDisplay, int[] configAttributes) {
|
||||
EGLConfig[] configs = new EGLConfig[1];
|
||||
int[] numConfigs = new int[1];
|
||||
if (!egl.eglChooseConfig(
|
||||
eglDisplay, configAttributes, configs, configs.length, numConfigs)) {
|
||||
throw new RuntimeException("eglChooseConfig failed");
|
||||
}
|
||||
if (numConfigs[0] <= 0) {
|
||||
throw new RuntimeException("Unable to find any matching EGL config");
|
||||
}
|
||||
final EGLConfig eglConfig = configs[0];
|
||||
if (eglConfig == null) {
|
||||
throw new RuntimeException("eglChooseConfig returned null");
|
||||
}
|
||||
return eglConfig;
|
||||
}
|
||||
|
||||
// Return an EGLConfig, or die trying.
|
||||
private EGLContext createEglContext(
|
||||
Context sharedContext, EGLDisplay eglDisplay, EGLConfig eglConfig) {
|
||||
if (sharedContext != null && sharedContext.eglContext == EGL10.EGL_NO_CONTEXT) {
|
||||
throw new RuntimeException("Invalid sharedContext");
|
||||
}
|
||||
int[] contextAttributes = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE};
|
||||
EGLContext rootContext =
|
||||
sharedContext == null ? EGL10.EGL_NO_CONTEXT : sharedContext.eglContext;
|
||||
final EGLContext eglContext;
|
||||
synchronized (EglBase.lock) {
|
||||
eglContext = egl.eglCreateContext(eglDisplay, eglConfig, rootContext, contextAttributes);
|
||||
}
|
||||
if (eglContext == EGL10.EGL_NO_CONTEXT) {
|
||||
throw new RuntimeException("Failed to create EGL context");
|
||||
}
|
||||
return eglContext;
|
||||
}
|
||||
}
|
||||
@ -1,258 +0,0 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.annotation.TargetApi;
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.opengl.EGL14;
|
||||
import android.opengl.EGLConfig;
|
||||
import android.opengl.EGLContext;
|
||||
import android.opengl.EGLDisplay;
|
||||
import android.opengl.EGLExt;
|
||||
import android.opengl.EGLSurface;
|
||||
import android.view.Surface;
|
||||
|
||||
import org.webrtc.Logging;
|
||||
|
||||
/**
|
||||
* Holds EGL state and utility methods for handling an EGL14 EGLContext, an EGLDisplay,
|
||||
* and an EGLSurface.
|
||||
*/
|
||||
@TargetApi(18)
|
||||
public final class EglBase14 extends EglBase {
|
||||
private static final String TAG = "EglBase14";
|
||||
private static final int EGLExt_SDK_VERSION = android.os.Build.VERSION_CODES.JELLY_BEAN_MR2;
|
||||
private static final int CURRENT_SDK_VERSION = android.os.Build.VERSION.SDK_INT;
|
||||
private EGLContext eglContext;
|
||||
private EGLConfig eglConfig;
|
||||
private EGLDisplay eglDisplay;
|
||||
private EGLSurface eglSurface = EGL14.EGL_NO_SURFACE;
|
||||
|
||||
// EGL 1.4 is supported from API 17. But EGLExt that is used for setting presentation
|
||||
// time stamp on a surface is supported from 18 so we require 18.
|
||||
public static boolean isEGL14Supported() {
|
||||
Logging.d(TAG, "SDK version: " + CURRENT_SDK_VERSION
|
||||
+ ". isEGL14Supported: " + (CURRENT_SDK_VERSION >= EGLExt_SDK_VERSION));
|
||||
return (CURRENT_SDK_VERSION >= EGLExt_SDK_VERSION);
|
||||
}
|
||||
|
||||
public static class Context extends EglBase.Context {
|
||||
private final android.opengl.EGLContext egl14Context;
|
||||
|
||||
public Context(android.opengl.EGLContext eglContext) {
|
||||
this.egl14Context = eglContext;
|
||||
}
|
||||
}
|
||||
|
||||
// Create a new context with the specified config type, sharing data with sharedContext.
|
||||
// |sharedContext| may be null.
|
||||
public EglBase14(EglBase14.Context sharedContext, int[] configAttributes) {
|
||||
eglDisplay = getEglDisplay();
|
||||
eglConfig = getEglConfig(eglDisplay, configAttributes);
|
||||
eglContext = createEglContext(sharedContext, eglDisplay, eglConfig);
|
||||
}
|
||||
|
||||
// Create EGLSurface from the Android Surface.
|
||||
@Override
|
||||
public void createSurface(Surface surface) {
|
||||
createSurfaceInternal(surface);
|
||||
}
|
||||
|
||||
// Create EGLSurface from the Android SurfaceTexture.
|
||||
@Override
|
||||
public void createSurface(SurfaceTexture surfaceTexture) {
|
||||
createSurfaceInternal(surfaceTexture);
|
||||
}
|
||||
|
||||
// Create EGLSurface from either Surface or SurfaceTexture.
|
||||
private void createSurfaceInternal(Object surface) {
|
||||
if (!(surface instanceof Surface) && !(surface instanceof SurfaceTexture)) {
|
||||
throw new IllegalStateException("Input must be either a Surface or SurfaceTexture");
|
||||
}
|
||||
checkIsNotReleased();
|
||||
if (eglSurface != EGL14.EGL_NO_SURFACE) {
|
||||
throw new RuntimeException("Already has an EGLSurface");
|
||||
}
|
||||
int[] surfaceAttribs = {EGL14.EGL_NONE};
|
||||
eglSurface = EGL14.eglCreateWindowSurface(eglDisplay, eglConfig, surface, surfaceAttribs, 0);
|
||||
if (eglSurface == EGL14.EGL_NO_SURFACE) {
|
||||
throw new RuntimeException("Failed to create window surface");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void createDummyPbufferSurface() {
|
||||
createPbufferSurface(1, 1);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void createPbufferSurface(int width, int height) {
|
||||
checkIsNotReleased();
|
||||
if (eglSurface != EGL14.EGL_NO_SURFACE) {
|
||||
throw new RuntimeException("Already has an EGLSurface");
|
||||
}
|
||||
int[] surfaceAttribs = {EGL14.EGL_WIDTH, width, EGL14.EGL_HEIGHT, height, EGL14.EGL_NONE};
|
||||
eglSurface = EGL14.eglCreatePbufferSurface(eglDisplay, eglConfig, surfaceAttribs, 0);
|
||||
if (eglSurface == EGL14.EGL_NO_SURFACE) {
|
||||
throw new RuntimeException(
|
||||
"Failed to create pixel buffer surface with size: " + width + "x" + height);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Context getEglBaseContext() {
|
||||
return new EglBase14.Context(eglContext);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasSurface() {
|
||||
return eglSurface != EGL14.EGL_NO_SURFACE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int surfaceWidth() {
|
||||
final int widthArray[] = new int[1];
|
||||
EGL14.eglQuerySurface(eglDisplay, eglSurface, EGL14.EGL_WIDTH, widthArray, 0);
|
||||
return widthArray[0];
|
||||
}
|
||||
|
||||
@Override
|
||||
public int surfaceHeight() {
|
||||
final int heightArray[] = new int[1];
|
||||
EGL14.eglQuerySurface(eglDisplay, eglSurface, EGL14.EGL_HEIGHT, heightArray, 0);
|
||||
return heightArray[0];
|
||||
}
|
||||
|
||||
@Override
|
||||
public void releaseSurface() {
|
||||
if (eglSurface != EGL14.EGL_NO_SURFACE) {
|
||||
EGL14.eglDestroySurface(eglDisplay, eglSurface);
|
||||
eglSurface = EGL14.EGL_NO_SURFACE;
|
||||
}
|
||||
}
|
||||
|
||||
private void checkIsNotReleased() {
|
||||
if (eglDisplay == EGL14.EGL_NO_DISPLAY || eglContext == EGL14.EGL_NO_CONTEXT
|
||||
|| eglConfig == null) {
|
||||
throw new RuntimeException("This object has been released");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void release() {
|
||||
checkIsNotReleased();
|
||||
releaseSurface();
|
||||
detachCurrent();
|
||||
EGL14.eglDestroyContext(eglDisplay, eglContext);
|
||||
EGL14.eglReleaseThread();
|
||||
EGL14.eglTerminate(eglDisplay);
|
||||
eglContext = EGL14.EGL_NO_CONTEXT;
|
||||
eglDisplay = EGL14.EGL_NO_DISPLAY;
|
||||
eglConfig = null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void makeCurrent() {
|
||||
checkIsNotReleased();
|
||||
if (eglSurface == EGL14.EGL_NO_SURFACE) {
|
||||
throw new RuntimeException("No EGLSurface - can't make current");
|
||||
}
|
||||
synchronized (EglBase.lock) {
|
||||
if (!EGL14.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
|
||||
throw new RuntimeException("eglMakeCurrent failed");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Detach the current EGL context, so that it can be made current on another thread.
|
||||
@Override
|
||||
public void detachCurrent() {
|
||||
synchronized (EglBase.lock) {
|
||||
if (!EGL14.eglMakeCurrent(
|
||||
eglDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT)) {
|
||||
throw new RuntimeException("eglDetachCurrent failed");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void swapBuffers() {
|
||||
checkIsNotReleased();
|
||||
if (eglSurface == EGL14.EGL_NO_SURFACE) {
|
||||
throw new RuntimeException("No EGLSurface - can't swap buffers");
|
||||
}
|
||||
synchronized (EglBase.lock) {
|
||||
EGL14.eglSwapBuffers(eglDisplay, eglSurface);
|
||||
}
|
||||
}
|
||||
|
||||
public void swapBuffers(long timeStampNs) {
|
||||
checkIsNotReleased();
|
||||
if (eglSurface == EGL14.EGL_NO_SURFACE) {
|
||||
throw new RuntimeException("No EGLSurface - can't swap buffers");
|
||||
}
|
||||
synchronized (EglBase.lock) {
|
||||
// See https://android.googlesource.com/platform/frameworks/native/+/tools_r22.2/opengl/specs/EGL_ANDROID_presentation_time.txt
|
||||
EGLExt.eglPresentationTimeANDROID(eglDisplay, eglSurface, timeStampNs);
|
||||
EGL14.eglSwapBuffers(eglDisplay, eglSurface);
|
||||
}
|
||||
}
|
||||
|
||||
// Return an EGLDisplay, or die trying.
|
||||
private static EGLDisplay getEglDisplay() {
|
||||
EGLDisplay eglDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
|
||||
if (eglDisplay == EGL14.EGL_NO_DISPLAY) {
|
||||
throw new RuntimeException("Unable to get EGL14 display");
|
||||
}
|
||||
int[] version = new int[2];
|
||||
if (!EGL14.eglInitialize(eglDisplay, version, 0, version, 1)) {
|
||||
throw new RuntimeException("Unable to initialize EGL14");
|
||||
}
|
||||
return eglDisplay;
|
||||
}
|
||||
|
||||
// Return an EGLConfig, or die trying.
|
||||
private static EGLConfig getEglConfig(EGLDisplay eglDisplay, int[] configAttributes) {
|
||||
EGLConfig[] configs = new EGLConfig[1];
|
||||
int[] numConfigs = new int[1];
|
||||
if (!EGL14.eglChooseConfig(
|
||||
eglDisplay, configAttributes, 0, configs, 0, configs.length, numConfigs, 0)) {
|
||||
throw new RuntimeException("eglChooseConfig failed");
|
||||
}
|
||||
if (numConfigs[0] <= 0) {
|
||||
throw new RuntimeException("Unable to find any matching EGL config");
|
||||
}
|
||||
final EGLConfig eglConfig = configs[0];
|
||||
if (eglConfig == null) {
|
||||
throw new RuntimeException("eglChooseConfig returned null");
|
||||
}
|
||||
return eglConfig;
|
||||
}
|
||||
|
||||
// Return an EGLConfig, or die trying.
|
||||
private static EGLContext createEglContext(
|
||||
EglBase14.Context sharedContext, EGLDisplay eglDisplay, EGLConfig eglConfig) {
|
||||
if (sharedContext != null && sharedContext.egl14Context == EGL14.EGL_NO_CONTEXT) {
|
||||
throw new RuntimeException("Invalid sharedContext");
|
||||
}
|
||||
int[] contextAttributes = {EGL14.EGL_CONTEXT_CLIENT_VERSION, 2, EGL14.EGL_NONE};
|
||||
EGLContext rootContext =
|
||||
sharedContext == null ? EGL14.EGL_NO_CONTEXT : sharedContext.egl14Context;
|
||||
final EGLContext eglContext;
|
||||
synchronized (EglBase.lock) {
|
||||
eglContext = EGL14.eglCreateContext(eglDisplay, eglConfig, rootContext, contextAttributes, 0);
|
||||
}
|
||||
if (eglContext == EGL14.EGL_NO_CONTEXT) {
|
||||
throw new RuntimeException("Failed to create EGL context");
|
||||
}
|
||||
return eglContext;
|
||||
}
|
||||
}
|
||||
@ -1,216 +0,0 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.opengl.GLES11Ext;
|
||||
import android.opengl.GLES20;
|
||||
|
||||
import org.webrtc.GlShader;
|
||||
import org.webrtc.GlUtil;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.FloatBuffer;
|
||||
import java.util.Arrays;
|
||||
import java.util.IdentityHashMap;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Helper class to draw an opaque quad on the target viewport location. Rotation, mirror, and
|
||||
* cropping is specified using a 4x4 texture coordinate transform matrix. The frame input can either
|
||||
* be an OES texture or YUV textures in I420 format. The GL state must be preserved between draw
|
||||
* calls, this is intentional to maximize performance. The function release() must be called
|
||||
* manually to free the resources held by this object.
|
||||
*/
|
||||
public class GlRectDrawer implements RendererCommon.GlDrawer {
|
||||
// Simple vertex shader, used for both YUV and OES.
|
||||
private static final String VERTEX_SHADER_STRING =
|
||||
"varying vec2 interp_tc;\n"
|
||||
+ "attribute vec4 in_pos;\n"
|
||||
+ "attribute vec4 in_tc;\n"
|
||||
+ "\n"
|
||||
+ "uniform mat4 texMatrix;\n"
|
||||
+ "\n"
|
||||
+ "void main() {\n"
|
||||
+ " gl_Position = in_pos;\n"
|
||||
+ " interp_tc = (texMatrix * in_tc).xy;\n"
|
||||
+ "}\n";
|
||||
|
||||
private static final String YUV_FRAGMENT_SHADER_STRING =
|
||||
"precision mediump float;\n"
|
||||
+ "varying vec2 interp_tc;\n"
|
||||
+ "\n"
|
||||
+ "uniform sampler2D y_tex;\n"
|
||||
+ "uniform sampler2D u_tex;\n"
|
||||
+ "uniform sampler2D v_tex;\n"
|
||||
+ "\n"
|
||||
+ "void main() {\n"
|
||||
// CSC according to http://www.fourcc.org/fccyvrgb.php
|
||||
+ " float y = texture2D(y_tex, interp_tc).r;\n"
|
||||
+ " float u = texture2D(u_tex, interp_tc).r - 0.5;\n"
|
||||
+ " float v = texture2D(v_tex, interp_tc).r - 0.5;\n"
|
||||
+ " gl_FragColor = vec4(y + 1.403 * v, "
|
||||
+ " y - 0.344 * u - 0.714 * v, "
|
||||
+ " y + 1.77 * u, 1);\n"
|
||||
+ "}\n";
|
||||
|
||||
private static final String RGB_FRAGMENT_SHADER_STRING =
|
||||
"precision mediump float;\n"
|
||||
+ "varying vec2 interp_tc;\n"
|
||||
+ "\n"
|
||||
+ "uniform sampler2D rgb_tex;\n"
|
||||
+ "\n"
|
||||
+ "void main() {\n"
|
||||
+ " gl_FragColor = texture2D(rgb_tex, interp_tc);\n"
|
||||
+ "}\n";
|
||||
|
||||
private static final String OES_FRAGMENT_SHADER_STRING =
|
||||
"#extension GL_OES_EGL_image_external : require\n"
|
||||
+ "precision mediump float;\n"
|
||||
+ "varying vec2 interp_tc;\n"
|
||||
+ "\n"
|
||||
+ "uniform samplerExternalOES oes_tex;\n"
|
||||
+ "\n"
|
||||
+ "void main() {\n"
|
||||
+ " gl_FragColor = texture2D(oes_tex, interp_tc);\n"
|
||||
+ "}\n";
|
||||
|
||||
// Vertex coordinates in Normalized Device Coordinates, i.e. (-1, -1) is bottom-left and (1, 1) is
|
||||
// top-right.
|
||||
private static final FloatBuffer FULL_RECTANGLE_BUF =
|
||||
GlUtil.createFloatBuffer(new float[] {
|
||||
-1.0f, -1.0f, // Bottom left.
|
||||
1.0f, -1.0f, // Bottom right.
|
||||
-1.0f, 1.0f, // Top left.
|
||||
1.0f, 1.0f, // Top right.
|
||||
});
|
||||
|
||||
// Texture coordinates - (0, 0) is bottom-left and (1, 1) is top-right.
|
||||
private static final FloatBuffer FULL_RECTANGLE_TEX_BUF =
|
||||
GlUtil.createFloatBuffer(new float[] {
|
||||
0.0f, 0.0f, // Bottom left.
|
||||
1.0f, 0.0f, // Bottom right.
|
||||
0.0f, 1.0f, // Top left.
|
||||
1.0f, 1.0f // Top right.
|
||||
});
|
||||
|
||||
private static class Shader {
|
||||
public final GlShader glShader;
|
||||
public final int texMatrixLocation;
|
||||
|
||||
public Shader(String fragmentShader) {
|
||||
this.glShader = new GlShader(VERTEX_SHADER_STRING, fragmentShader);
|
||||
this.texMatrixLocation = glShader.getUniformLocation("texMatrix");
|
||||
}
|
||||
}
|
||||
|
||||
// The keys are one of the fragments shaders above.
|
||||
private final Map<String, Shader> shaders = new IdentityHashMap<String, Shader>();
|
||||
|
||||
/**
|
||||
* Draw an OES texture frame with specified texture transformation matrix. Required resources are
|
||||
* allocated at the first call to this function.
|
||||
*/
|
||||
@Override
|
||||
public void drawOes(int oesTextureId, float[] texMatrix, int frameWidth, int frameHeight,
|
||||
int viewportX, int viewportY, int viewportWidth, int viewportHeight) {
|
||||
prepareShader(OES_FRAGMENT_SHADER_STRING, texMatrix);
|
||||
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
|
||||
// updateTexImage() may be called from another thread in another EGL context, so we need to
|
||||
// bind/unbind the texture in each draw call so that GLES understads it's a new texture.
|
||||
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, oesTextureId);
|
||||
drawRectangle(viewportX, viewportY, viewportWidth, viewportHeight);
|
||||
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Draw a RGB(A) texture frame with specified texture transformation matrix. Required resources
|
||||
* are allocated at the first call to this function.
|
||||
*/
|
||||
@Override
|
||||
public void drawRgb(int textureId, float[] texMatrix, int frameWidth, int frameHeight,
|
||||
int viewportX, int viewportY, int viewportWidth, int viewportHeight) {
|
||||
prepareShader(RGB_FRAGMENT_SHADER_STRING, texMatrix);
|
||||
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
|
||||
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
|
||||
drawRectangle(viewportX, viewportY, viewportWidth, viewportHeight);
|
||||
// Unbind the texture as a precaution.
|
||||
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Draw a YUV frame with specified texture transformation matrix. Required resources are
|
||||
* allocated at the first call to this function.
|
||||
*/
|
||||
@Override
|
||||
public void drawYuv(int[] yuvTextures, float[] texMatrix, int frameWidth, int frameHeight,
|
||||
int viewportX, int viewportY, int viewportWidth, int viewportHeight) {
|
||||
prepareShader(YUV_FRAGMENT_SHADER_STRING, texMatrix);
|
||||
// Bind the textures.
|
||||
for (int i = 0; i < 3; ++i) {
|
||||
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
|
||||
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
|
||||
}
|
||||
drawRectangle(viewportX, viewportY, viewportWidth, viewportHeight);
|
||||
// Unbind the textures as a precaution..
|
||||
for (int i = 0; i < 3; ++i) {
|
||||
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
|
||||
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
|
||||
}
|
||||
}
|
||||
|
||||
private void drawRectangle(int x, int y, int width, int height) {
|
||||
// Draw quad.
|
||||
GLES20.glViewport(x, y, width, height);
|
||||
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
|
||||
}
|
||||
|
||||
private void prepareShader(String fragmentShader, float[] texMatrix) {
|
||||
final Shader shader;
|
||||
if (shaders.containsKey(fragmentShader)) {
|
||||
shader = shaders.get(fragmentShader);
|
||||
} else {
|
||||
// Lazy allocation.
|
||||
shader = new Shader(fragmentShader);
|
||||
shaders.put(fragmentShader, shader);
|
||||
shader.glShader.useProgram();
|
||||
// Initialize fragment shader uniform values.
|
||||
if (fragmentShader == YUV_FRAGMENT_SHADER_STRING) {
|
||||
GLES20.glUniform1i(shader.glShader.getUniformLocation("y_tex"), 0);
|
||||
GLES20.glUniform1i(shader.glShader.getUniformLocation("u_tex"), 1);
|
||||
GLES20.glUniform1i(shader.glShader.getUniformLocation("v_tex"), 2);
|
||||
} else if (fragmentShader == RGB_FRAGMENT_SHADER_STRING) {
|
||||
GLES20.glUniform1i(shader.glShader.getUniformLocation("rgb_tex"), 0);
|
||||
} else if (fragmentShader == OES_FRAGMENT_SHADER_STRING) {
|
||||
GLES20.glUniform1i(shader.glShader.getUniformLocation("oes_tex"), 0);
|
||||
} else {
|
||||
throw new IllegalStateException("Unknown fragment shader: " + fragmentShader);
|
||||
}
|
||||
GlUtil.checkNoGLES2Error("Initialize fragment shader uniform values.");
|
||||
// Initialize vertex shader attributes.
|
||||
shader.glShader.setVertexAttribArray("in_pos", 2, FULL_RECTANGLE_BUF);
|
||||
shader.glShader.setVertexAttribArray("in_tc", 2, FULL_RECTANGLE_TEX_BUF);
|
||||
}
|
||||
shader.glShader.useProgram();
|
||||
// Copy the texture transformation matrix over.
|
||||
GLES20.glUniformMatrix4fv(shader.texMatrixLocation, 1, false, texMatrix, 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Release all GLES resources. This needs to be done manually, otherwise the resources are leaked.
|
||||
*/
|
||||
@Override
|
||||
public void release() {
|
||||
for (Shader shader : shaders.values()) {
|
||||
shader.glShader.release();
|
||||
}
|
||||
shaders.clear();
|
||||
}
|
||||
}
|
||||
@ -1,128 +0,0 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.opengl.GLES20;
|
||||
|
||||
import org.webrtc.Logging;
|
||||
|
||||
import java.nio.FloatBuffer;
|
||||
|
||||
// Helper class for handling OpenGL shaders and shader programs.
|
||||
public class GlShader {
|
||||
private static final String TAG = "GlShader";
|
||||
|
||||
private static int compileShader(int shaderType, String source) {
|
||||
final int shader = GLES20.glCreateShader(shaderType);
|
||||
if (shader == 0) {
|
||||
throw new RuntimeException("glCreateShader() failed. GLES20 error: " + GLES20.glGetError());
|
||||
}
|
||||
GLES20.glShaderSource(shader, source);
|
||||
GLES20.glCompileShader(shader);
|
||||
int[] compileStatus = new int[] {
|
||||
GLES20.GL_FALSE
|
||||
};
|
||||
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
|
||||
if (compileStatus[0] != GLES20.GL_TRUE) {
|
||||
Logging.e(TAG, "Could not compile shader " + shaderType + ":" +
|
||||
GLES20.glGetShaderInfoLog(shader));
|
||||
throw new RuntimeException(GLES20.glGetShaderInfoLog(shader));
|
||||
}
|
||||
GlUtil.checkNoGLES2Error("compileShader");
|
||||
return shader;
|
||||
}
|
||||
|
||||
private int program;
|
||||
|
||||
public GlShader(String vertexSource, String fragmentSource) {
|
||||
final int vertexShader = compileShader(GLES20.GL_VERTEX_SHADER, vertexSource);
|
||||
final int fragmentShader = compileShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
|
||||
program = GLES20.glCreateProgram();
|
||||
if (program == 0) {
|
||||
throw new RuntimeException("glCreateProgram() failed. GLES20 error: " + GLES20.glGetError());
|
||||
}
|
||||
GLES20.glAttachShader(program, vertexShader);
|
||||
GLES20.glAttachShader(program, fragmentShader);
|
||||
GLES20.glLinkProgram(program);
|
||||
int[] linkStatus = new int[] {
|
||||
GLES20.GL_FALSE
|
||||
};
|
||||
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
|
||||
if (linkStatus[0] != GLES20.GL_TRUE) {
|
||||
Logging.e(TAG, "Could not link program: " +
|
||||
GLES20.glGetProgramInfoLog(program));
|
||||
throw new RuntimeException(GLES20.glGetProgramInfoLog(program));
|
||||
}
|
||||
// According to the documentation of glLinkProgram():
|
||||
// "After the link operation, applications are free to modify attached shader objects, compile
|
||||
// attached shader objects, detach shader objects, delete shader objects, and attach additional
|
||||
// shader objects. None of these operations affects the information log or the program that is
|
||||
// part of the program object."
|
||||
// But in practice, detaching shaders from the program seems to break some devices. Deleting the
|
||||
// shaders are fine however - it will delete them when they are no longer attached to a program.
|
||||
GLES20.glDeleteShader(vertexShader);
|
||||
GLES20.glDeleteShader(fragmentShader);
|
||||
GlUtil.checkNoGLES2Error("Creating GlShader");
|
||||
}
|
||||
|
||||
public int getAttribLocation(String label) {
|
||||
if (program == -1) {
|
||||
throw new RuntimeException("The program has been released");
|
||||
}
|
||||
int location = GLES20.glGetAttribLocation(program, label);
|
||||
if (location < 0) {
|
||||
throw new RuntimeException("Could not locate '" + label + "' in program");
|
||||
}
|
||||
return location;
|
||||
}
|
||||
|
||||
/**
|
||||
* Enable and upload a vertex array for attribute |label|. The vertex data is specified in
|
||||
* |buffer| with |dimension| number of components per vertex.
|
||||
*/
|
||||
public void setVertexAttribArray(String label, int dimension, FloatBuffer buffer) {
|
||||
if (program == -1) {
|
||||
throw new RuntimeException("The program has been released");
|
||||
}
|
||||
int location = getAttribLocation(label);
|
||||
GLES20.glEnableVertexAttribArray(location);
|
||||
GLES20.glVertexAttribPointer(location, dimension, GLES20.GL_FLOAT, false, 0, buffer);
|
||||
GlUtil.checkNoGLES2Error("setVertexAttribArray");
|
||||
}
|
||||
|
||||
public int getUniformLocation(String label) {
|
||||
if (program == -1) {
|
||||
throw new RuntimeException("The program has been released");
|
||||
}
|
||||
int location = GLES20.glGetUniformLocation(program, label);
|
||||
if (location < 0) {
|
||||
throw new RuntimeException("Could not locate uniform '" + label + "' in program");
|
||||
}
|
||||
return location;
|
||||
}
|
||||
|
||||
public void useProgram() {
|
||||
if (program == -1) {
|
||||
throw new RuntimeException("The program has been released");
|
||||
}
|
||||
GLES20.glUseProgram(program);
|
||||
GlUtil.checkNoGLES2Error("glUseProgram");
|
||||
}
|
||||
|
||||
public void release() {
|
||||
Logging.d(TAG, "Deleting shader.");
|
||||
// Delete program, automatically detaching any shaders from it.
|
||||
if (program != -1) {
|
||||
GLES20.glDeleteProgram(program);
|
||||
program = -1;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,125 +0,0 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.opengl.GLES20;
|
||||
|
||||
/**
|
||||
* Helper class for handling OpenGL framebuffer with only color attachment and no depth or stencil
|
||||
* buffer. Intended for simple tasks such as texture copy, texture downscaling, and texture color
|
||||
* conversion.
|
||||
*/
|
||||
// TODO(magjed): Add unittests for this class.
|
||||
public class GlTextureFrameBuffer {
|
||||
private final int frameBufferId;
|
||||
private final int textureId;
|
||||
private final int pixelFormat;
|
||||
private int width;
|
||||
private int height;
|
||||
|
||||
/**
|
||||
* Generate texture and framebuffer resources. An EGLContext must be bound on the current thread
|
||||
* when calling this function. The framebuffer is not complete until setSize() is called.
|
||||
*/
|
||||
public GlTextureFrameBuffer(int pixelFormat) {
|
||||
switch (pixelFormat) {
|
||||
case GLES20.GL_LUMINANCE:
|
||||
case GLES20.GL_RGB:
|
||||
case GLES20.GL_RGBA:
|
||||
this.pixelFormat = pixelFormat;
|
||||
break;
|
||||
default:
|
||||
throw new IllegalArgumentException("Invalid pixel format: " + pixelFormat);
|
||||
}
|
||||
|
||||
textureId = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
|
||||
this.width = 0;
|
||||
this.height = 0;
|
||||
|
||||
// Create framebuffer object and bind it.
|
||||
final int frameBuffers[] = new int[1];
|
||||
GLES20.glGenFramebuffers(1, frameBuffers, 0);
|
||||
frameBufferId = frameBuffers[0];
|
||||
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBufferId);
|
||||
GlUtil.checkNoGLES2Error("Generate framebuffer");
|
||||
|
||||
// Attach the texture to the framebuffer as color attachment.
|
||||
GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0,
|
||||
GLES20.GL_TEXTURE_2D, textureId, 0);
|
||||
GlUtil.checkNoGLES2Error("Attach texture to framebuffer");
|
||||
|
||||
// Restore normal framebuffer.
|
||||
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* (Re)allocate texture. Will do nothing if the requested size equals the current size. An
|
||||
* EGLContext must be bound on the current thread when calling this function. Must be called at
|
||||
* least once before using the framebuffer. May be called multiple times to change size.
|
||||
*/
|
||||
public void setSize(int width, int height) {
|
||||
if (width == 0 || height == 0) {
|
||||
throw new IllegalArgumentException("Invalid size: " + width + "x" + height);
|
||||
}
|
||||
if (width == this.width && height == this.height) {
|
||||
return;
|
||||
}
|
||||
this.width = width;
|
||||
this.height = height;
|
||||
|
||||
// Bind our framebuffer.
|
||||
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBufferId);
|
||||
GlUtil.checkNoGLES2Error("glBindFramebuffer");
|
||||
|
||||
// Allocate texture.
|
||||
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
|
||||
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
|
||||
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, pixelFormat, width, height, 0, pixelFormat,
|
||||
GLES20.GL_UNSIGNED_BYTE, null);
|
||||
|
||||
// Check that the framebuffer is in a good state.
|
||||
final int status = GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER);
|
||||
if (status != GLES20.GL_FRAMEBUFFER_COMPLETE) {
|
||||
throw new IllegalStateException("Framebuffer not complete, status: " + status);
|
||||
}
|
||||
|
||||
// Restore normal framebuffer.
|
||||
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
|
||||
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
|
||||
}
|
||||
|
||||
public int getWidth() {
|
||||
return width;
|
||||
}
|
||||
|
||||
public int getHeight() {
|
||||
return height;
|
||||
}
|
||||
|
||||
public int getFrameBufferId() {
|
||||
return frameBufferId;
|
||||
}
|
||||
|
||||
public int getTextureId() {
|
||||
return textureId;
|
||||
}
|
||||
|
||||
/**
|
||||
* Release texture and framebuffer. An EGLContext must be bound on the current thread when calling
|
||||
* this function. This object should not be used after this call.
|
||||
*/
|
||||
public void release() {
|
||||
GLES20.glDeleteTextures(1, new int[] {textureId}, 0);
|
||||
GLES20.glDeleteFramebuffers(1, new int[] {frameBufferId}, 0);
|
||||
width = 0;
|
||||
height = 0;
|
||||
}
|
||||
}
|
||||
@ -1,58 +0,0 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.opengl.GLES20;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.ByteOrder;
|
||||
import java.nio.FloatBuffer;
|
||||
|
||||
/**
|
||||
* Some OpenGL static utility functions.
|
||||
*/
|
||||
public class GlUtil {
|
||||
private GlUtil() {}
|
||||
|
||||
// Assert that no OpenGL ES 2.0 error has been raised.
|
||||
public static void checkNoGLES2Error(String msg) {
|
||||
int error = GLES20.glGetError();
|
||||
if (error != GLES20.GL_NO_ERROR) {
|
||||
throw new RuntimeException(msg + ": GLES20 error: " + error);
|
||||
}
|
||||
}
|
||||
|
||||
public static FloatBuffer createFloatBuffer(float[] coords) {
|
||||
// Allocate a direct ByteBuffer, using 4 bytes per float, and copy coords into it.
|
||||
ByteBuffer bb = ByteBuffer.allocateDirect(coords.length * 4);
|
||||
bb.order(ByteOrder.nativeOrder());
|
||||
FloatBuffer fb = bb.asFloatBuffer();
|
||||
fb.put(coords);
|
||||
fb.position(0);
|
||||
return fb;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate texture with standard parameters.
|
||||
*/
|
||||
public static int generateTexture(int target) {
|
||||
final int textureArray[] = new int[1];
|
||||
GLES20.glGenTextures(1, textureArray, 0);
|
||||
final int textureId = textureArray[0];
|
||||
GLES20.glBindTexture(target, textureId);
|
||||
GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
|
||||
GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
|
||||
GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
|
||||
GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
|
||||
checkNoGLES2Error("generateTexture");
|
||||
return textureId;
|
||||
}
|
||||
}
|
||||
@ -1,252 +0,0 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import static org.webrtc.NetworkMonitorAutoDetect.ConnectionType;
|
||||
import static org.webrtc.NetworkMonitorAutoDetect.INVALID_NET_ID;
|
||||
import static org.webrtc.NetworkMonitorAutoDetect.NetworkInformation;
|
||||
|
||||
import org.webrtc.Logging;
|
||||
|
||||
import android.content.Context;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Borrowed from Chromium's src/net/android/java/src/org/chromium/net/NetworkChangeNotifier.java
|
||||
*
|
||||
* Triggers updates to the underlying network state from OS networking events.
|
||||
*
|
||||
* WARNING: This class is not thread-safe.
|
||||
*/
|
||||
public class NetworkMonitor {
|
||||
/**
|
||||
* Alerted when the connection type of the network changes.
|
||||
* The alert is fired on the UI thread.
|
||||
*/
|
||||
public interface NetworkObserver {
|
||||
public void onConnectionTypeChanged(ConnectionType connectionType);
|
||||
}
|
||||
|
||||
private static final String TAG = "NetworkMonitor";
|
||||
private static NetworkMonitor instance;
|
||||
|
||||
private final Context applicationContext;
|
||||
|
||||
// Native observers of the connection type changes.
|
||||
private final ArrayList<Long> nativeNetworkObservers;
|
||||
// Java observers of the connection type changes.
|
||||
private final ArrayList<NetworkObserver> networkObservers;
|
||||
|
||||
// Object that detects the connection type changes.
|
||||
private NetworkMonitorAutoDetect autoDetector;
|
||||
|
||||
private ConnectionType currentConnectionType = ConnectionType.CONNECTION_UNKNOWN;
|
||||
|
||||
private NetworkMonitor(Context context) {
|
||||
assertIsTrue(context != null);
|
||||
applicationContext =
|
||||
context.getApplicationContext() == null ? context : context.getApplicationContext();
|
||||
|
||||
nativeNetworkObservers = new ArrayList<Long>();
|
||||
networkObservers = new ArrayList<NetworkObserver>();
|
||||
}
|
||||
|
||||
/**
|
||||
* Initializes the singleton once.
|
||||
* Called from the native code.
|
||||
*/
|
||||
public static NetworkMonitor init(Context context) {
|
||||
if (!isInitialized()) {
|
||||
instance = new NetworkMonitor(context);
|
||||
}
|
||||
return instance;
|
||||
}
|
||||
|
||||
public static boolean isInitialized() {
|
||||
return instance != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the singleton instance.
|
||||
*/
|
||||
public static NetworkMonitor getInstance() {
|
||||
return instance;
|
||||
}
|
||||
|
||||
/**
|
||||
* Enables auto detection of the current network state based on notifications from the system.
|
||||
* Note that passing true here requires the embedding app have the platform ACCESS_NETWORK_STATE
|
||||
* permission.
|
||||
*
|
||||
* @param shouldAutoDetect true if the NetworkMonitor should listen for system changes in
|
||||
* network connectivity.
|
||||
*/
|
||||
public static void setAutoDetectConnectivityState(boolean shouldAutoDetect) {
|
||||
getInstance().setAutoDetectConnectivityStateInternal(shouldAutoDetect);
|
||||
}
|
||||
|
||||
private static void assertIsTrue(boolean condition) {
|
||||
if (!condition) {
|
||||
throw new AssertionError("Expected to be true");
|
||||
}
|
||||
}
|
||||
|
||||
// Called by the native code.
|
||||
private void startMonitoring(long nativeObserver) {
|
||||
Logging.d(TAG, "Start monitoring from native observer " + nativeObserver);
|
||||
nativeNetworkObservers.add(nativeObserver);
|
||||
setAutoDetectConnectivityStateInternal(true);
|
||||
}
|
||||
|
||||
// Called by the native code.
|
||||
private void stopMonitoring(long nativeObserver) {
|
||||
Logging.d(TAG, "Stop monitoring from native observer " + nativeObserver);
|
||||
setAutoDetectConnectivityStateInternal(false);
|
||||
nativeNetworkObservers.remove(nativeObserver);
|
||||
}
|
||||
|
||||
private ConnectionType getCurrentConnectionType() {
|
||||
return currentConnectionType;
|
||||
}
|
||||
|
||||
private int getCurrentDefaultNetId() {
|
||||
return autoDetector == null ? INVALID_NET_ID : autoDetector.getDefaultNetId();
|
||||
}
|
||||
|
||||
private void destroyAutoDetector() {
|
||||
if (autoDetector != null) {
|
||||
autoDetector.destroy();
|
||||
autoDetector = null;
|
||||
}
|
||||
}
|
||||
|
||||
private void setAutoDetectConnectivityStateInternal(boolean shouldAutoDetect) {
|
||||
if (!shouldAutoDetect) {
|
||||
destroyAutoDetector();
|
||||
return;
|
||||
}
|
||||
if (autoDetector == null) {
|
||||
autoDetector = new NetworkMonitorAutoDetect(
|
||||
new NetworkMonitorAutoDetect.Observer() {
|
||||
|
||||
@Override
|
||||
public void onConnectionTypeChanged(ConnectionType newConnectionType) {
|
||||
updateCurrentConnectionType(newConnectionType);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onNetworkConnect(NetworkInformation networkInfo) {
|
||||
notifyObserversOfNetworkConnect(networkInfo);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onNetworkDisconnect(int networkHandle) {
|
||||
notifyObserversOfNetworkDisconnect(networkHandle);
|
||||
}
|
||||
},
|
||||
applicationContext);
|
||||
final NetworkMonitorAutoDetect.NetworkState networkState =
|
||||
autoDetector.getCurrentNetworkState();
|
||||
updateCurrentConnectionType(NetworkMonitorAutoDetect.getConnectionType(networkState));
|
||||
updateActiveNetworkList();
|
||||
}
|
||||
}
|
||||
|
||||
private void updateCurrentConnectionType(ConnectionType newConnectionType) {
|
||||
currentConnectionType = newConnectionType;
|
||||
notifyObserversOfConnectionTypeChange(newConnectionType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Alerts all observers of a connection change.
|
||||
*/
|
||||
private void notifyObserversOfConnectionTypeChange(ConnectionType newConnectionType) {
|
||||
for (long nativeObserver : nativeNetworkObservers) {
|
||||
nativeNotifyConnectionTypeChanged(nativeObserver);
|
||||
}
|
||||
for (NetworkObserver observer : networkObservers) {
|
||||
observer.onConnectionTypeChanged(newConnectionType);
|
||||
}
|
||||
}
|
||||
|
||||
private void notifyObserversOfNetworkConnect(NetworkInformation networkInfo) {
|
||||
for (long nativeObserver : nativeNetworkObservers) {
|
||||
nativeNotifyOfNetworkConnect(nativeObserver, networkInfo);
|
||||
}
|
||||
}
|
||||
|
||||
private void notifyObserversOfNetworkDisconnect(int networkHandle) {
|
||||
for (long nativeObserver : nativeNetworkObservers) {
|
||||
nativeNotifyOfNetworkDisconnect(nativeObserver, networkHandle);
|
||||
}
|
||||
}
|
||||
|
||||
private void updateActiveNetworkList() {
|
||||
List<NetworkInformation> networkInfoList = autoDetector.getActiveNetworkList();
|
||||
if (networkInfoList == null || networkInfoList.size() == 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
NetworkInformation[] networkInfos = new NetworkInformation[networkInfoList.size()];
|
||||
networkInfos = networkInfoList.toArray(networkInfos);
|
||||
for (long nativeObserver : nativeNetworkObservers) {
|
||||
nativeNotifyOfActiveNetworkList(nativeObserver, networkInfos);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds an observer for any connection type changes.
|
||||
*/
|
||||
public static void addNetworkObserver(NetworkObserver observer) {
|
||||
getInstance().addNetworkObserverInternal(observer);
|
||||
}
|
||||
|
||||
private void addNetworkObserverInternal(NetworkObserver observer) {
|
||||
networkObservers.add(observer);
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes an observer for any connection type changes.
|
||||
*/
|
||||
public static void removeNetworkObserver(NetworkObserver observer) {
|
||||
getInstance().removeNetworkObserverInternal(observer);
|
||||
}
|
||||
|
||||
private void removeNetworkObserverInternal(NetworkObserver observer) {
|
||||
networkObservers.remove(observer);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if there currently is connectivity.
|
||||
*/
|
||||
public static boolean isOnline() {
|
||||
ConnectionType connectionType = getInstance().getCurrentConnectionType();
|
||||
return connectionType != ConnectionType.CONNECTION_NONE;
|
||||
}
|
||||
|
||||
private native void nativeNotifyConnectionTypeChanged(long nativePtr);
|
||||
private native void nativeNotifyOfNetworkConnect(long nativePtr, NetworkInformation networkInfo);
|
||||
private native void nativeNotifyOfNetworkDisconnect(long nativePtr, int networkHandle);
|
||||
private native void nativeNotifyOfActiveNetworkList(long nativePtr,
|
||||
NetworkInformation[] networkInfos);
|
||||
|
||||
// For testing only.
|
||||
static void resetInstanceForTests(Context context) {
|
||||
instance = new NetworkMonitor(context);
|
||||
}
|
||||
|
||||
// For testing only.
|
||||
public static NetworkMonitorAutoDetect getAutoDetectorForTest() {
|
||||
return getInstance().autoDetector;
|
||||
}
|
||||
}
|
||||
@ -1,622 +0,0 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import static android.net.NetworkCapabilities.NET_CAPABILITY_INTERNET;
|
||||
import static android.net.NetworkCapabilities.TRANSPORT_CELLULAR;
|
||||
|
||||
|
||||
import org.webrtc.Logging;
|
||||
|
||||
import android.Manifest.permission;
|
||||
import android.annotation.SuppressLint;
|
||||
import android.content.BroadcastReceiver;
|
||||
import android.content.Context;
|
||||
import android.content.Intent;
|
||||
import android.content.IntentFilter;
|
||||
import android.content.pm.PackageManager;
|
||||
import android.net.ConnectivityManager;
|
||||
import android.net.ConnectivityManager.NetworkCallback;
|
||||
import android.net.LinkAddress;
|
||||
import android.net.LinkProperties;
|
||||
import android.net.Network;
|
||||
import android.net.NetworkCapabilities;
|
||||
import android.net.NetworkInfo;
|
||||
import android.net.NetworkRequest;
|
||||
import android.net.wifi.WifiInfo;
|
||||
import android.net.wifi.WifiManager;
|
||||
import android.os.Build;
|
||||
import android.telephony.TelephonyManager;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Borrowed from Chromium's
|
||||
* src/net/android/java/src/org/chromium/net/NetworkChangeNotifierAutoDetect.java
|
||||
*
|
||||
* Used by the NetworkMonitor to listen to platform changes in connectivity.
|
||||
* Note that use of this class requires that the app have the platform
|
||||
* ACCESS_NETWORK_STATE permission.
|
||||
*/
|
||||
public class NetworkMonitorAutoDetect extends BroadcastReceiver {
|
||||
public static enum ConnectionType {
|
||||
CONNECTION_UNKNOWN,
|
||||
CONNECTION_ETHERNET,
|
||||
CONNECTION_WIFI,
|
||||
CONNECTION_4G,
|
||||
CONNECTION_3G,
|
||||
CONNECTION_2G,
|
||||
CONNECTION_UNKNOWN_CELLULAR,
|
||||
CONNECTION_BLUETOOTH,
|
||||
CONNECTION_NONE
|
||||
}
|
||||
|
||||
public static class IPAddress {
|
||||
public final byte[] address;
|
||||
public IPAddress (byte[] address) {
|
||||
this.address = address;
|
||||
}
|
||||
}
|
||||
|
||||
/** Java version of NetworkMonitor.NetworkInformation */
|
||||
public static class NetworkInformation{
|
||||
public final String name;
|
||||
public final ConnectionType type;
|
||||
public final int handle;
|
||||
public final IPAddress[] ipAddresses;
|
||||
public NetworkInformation(String name, ConnectionType type, int handle,
|
||||
IPAddress[] addresses) {
|
||||
this.name = name;
|
||||
this.type = type;
|
||||
this.handle = handle;
|
||||
this.ipAddresses = addresses;
|
||||
}
|
||||
};
|
||||
|
||||
static class NetworkState {
|
||||
private final boolean connected;
|
||||
// Defined from ConnectivityManager.TYPE_XXX for non-mobile; for mobile, it is
|
||||
// further divided into 2G, 3G, or 4G from the subtype.
|
||||
private final int type;
|
||||
// Defined from NetworkInfo.subtype, which is one of the TelephonyManager.NETWORK_TYPE_XXXs.
|
||||
// Will be useful to find the maximum bandwidth.
|
||||
private final int subtype;
|
||||
|
||||
public NetworkState(boolean connected, int type, int subtype) {
|
||||
this.connected = connected;
|
||||
this.type = type;
|
||||
this.subtype = subtype;
|
||||
}
|
||||
|
||||
public boolean isConnected() {
|
||||
return connected;
|
||||
}
|
||||
|
||||
public int getNetworkType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
public int getNetworkSubType() {
|
||||
return subtype;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* The methods in this class get called when the network changes if the callback
|
||||
* is registered with a proper network request. It is only available in Android Lollipop
|
||||
* and above.
|
||||
*/
|
||||
@SuppressLint("NewApi")
|
||||
private class SimpleNetworkCallback extends NetworkCallback {
|
||||
|
||||
@Override
|
||||
public void onAvailable(Network network) {
|
||||
Logging.d(TAG, "Network becomes available: " + network.toString());
|
||||
onNetworkChanged(network);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onCapabilitiesChanged(
|
||||
Network network, NetworkCapabilities networkCapabilities) {
|
||||
// A capabilities change may indicate the ConnectionType has changed,
|
||||
// so forward the new NetworkInformation along to the observer.
|
||||
Logging.d(TAG, "capabilities changed: " + networkCapabilities.toString());
|
||||
onNetworkChanged(network);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onLinkPropertiesChanged(Network network, LinkProperties linkProperties) {
|
||||
// A link property change may indicate the IP address changes.
|
||||
// so forward the new NetworkInformation to the observer.
|
||||
Logging.d(TAG, "link properties changed: " + linkProperties.toString());
|
||||
onNetworkChanged(network);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onLosing(Network network, int maxMsToLive) {
|
||||
// Tell the network is going to lose in MaxMsToLive milliseconds.
|
||||
// We may use this signal later.
|
||||
Logging.d(TAG,
|
||||
"Network " + network.toString() + " is about to lose in " + maxMsToLive + "ms");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onLost(Network network) {
|
||||
Logging.d(TAG, "Network " + network.toString() + " is disconnected");
|
||||
observer.onNetworkDisconnect(networkToNetId(network));
|
||||
}
|
||||
|
||||
private void onNetworkChanged(Network network) {
|
||||
NetworkInformation networkInformation = connectivityManagerDelegate.networkToInfo(network);
|
||||
if (networkInformation != null) {
|
||||
observer.onNetworkConnect(networkInformation);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** Queries the ConnectivityManager for information about the current connection. */
|
||||
static class ConnectivityManagerDelegate {
|
||||
/**
|
||||
* Note: In some rare Android systems connectivityManager is null. We handle that
|
||||
* gracefully below.
|
||||
*/
|
||||
private final ConnectivityManager connectivityManager;
|
||||
|
||||
ConnectivityManagerDelegate(Context context) {
|
||||
connectivityManager =
|
||||
(ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE);
|
||||
}
|
||||
|
||||
// For testing.
|
||||
ConnectivityManagerDelegate() {
|
||||
// All the methods below should be overridden.
|
||||
connectivityManager = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns connection type and status information about the current
|
||||
* default network.
|
||||
*/
|
||||
NetworkState getNetworkState() {
|
||||
if (connectivityManager == null) {
|
||||
return new NetworkState(false, -1, -1);
|
||||
}
|
||||
return getNetworkState(connectivityManager.getActiveNetworkInfo());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns connection type and status information about |network|.
|
||||
* Only callable on Lollipop and newer releases.
|
||||
*/
|
||||
@SuppressLint("NewApi")
|
||||
NetworkState getNetworkState(Network network) {
|
||||
if (connectivityManager == null) {
|
||||
return new NetworkState(false, -1, -1);
|
||||
}
|
||||
return getNetworkState(connectivityManager.getNetworkInfo(network));
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns connection type and status information gleaned from networkInfo.
|
||||
*/
|
||||
NetworkState getNetworkState(NetworkInfo networkInfo) {
|
||||
if (networkInfo == null || !networkInfo.isConnected()) {
|
||||
return new NetworkState(false, -1, -1);
|
||||
}
|
||||
return new NetworkState(true, networkInfo.getType(), networkInfo.getSubtype());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns all connected networks.
|
||||
* Only callable on Lollipop and newer releases.
|
||||
*/
|
||||
@SuppressLint("NewApi")
|
||||
Network[] getAllNetworks() {
|
||||
if (connectivityManager == null) {
|
||||
return new Network[0];
|
||||
}
|
||||
return connectivityManager.getAllNetworks();
|
||||
}
|
||||
|
||||
List<NetworkInformation> getActiveNetworkList() {
|
||||
if (!supportNetworkCallback()) {
|
||||
return null;
|
||||
}
|
||||
ArrayList<NetworkInformation> netInfoList = new ArrayList<NetworkInformation>();
|
||||
for (Network network : getAllNetworks()) {
|
||||
NetworkInformation info = networkToInfo(network);
|
||||
if (info != null) {
|
||||
netInfoList.add(info);
|
||||
}
|
||||
}
|
||||
return netInfoList;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the NetID of the current default network. Returns
|
||||
* INVALID_NET_ID if no current default network connected.
|
||||
* Only callable on Lollipop and newer releases.
|
||||
*/
|
||||
@SuppressLint("NewApi")
|
||||
int getDefaultNetId() {
|
||||
if (!supportNetworkCallback()) {
|
||||
return INVALID_NET_ID;
|
||||
}
|
||||
// Android Lollipop had no API to get the default network; only an
|
||||
// API to return the NetworkInfo for the default network. To
|
||||
// determine the default network one can find the network with
|
||||
// type matching that of the default network.
|
||||
final NetworkInfo defaultNetworkInfo = connectivityManager.getActiveNetworkInfo();
|
||||
if (defaultNetworkInfo == null) {
|
||||
return INVALID_NET_ID;
|
||||
}
|
||||
final Network[] networks = getAllNetworks();
|
||||
int defaultNetId = INVALID_NET_ID;
|
||||
for (Network network : networks) {
|
||||
if (!hasInternetCapability(network)) {
|
||||
continue;
|
||||
}
|
||||
final NetworkInfo networkInfo = connectivityManager.getNetworkInfo(network);
|
||||
if (networkInfo != null && networkInfo.getType() == defaultNetworkInfo.getType()) {
|
||||
// There should not be multiple connected networks of the
|
||||
// same type. At least as of Android Marshmallow this is
|
||||
// not supported. If this becomes supported this assertion
|
||||
// may trigger. At that point we could consider using
|
||||
// ConnectivityManager.getDefaultNetwork() though this
|
||||
// may give confusing results with VPNs and is only
|
||||
// available with Android Marshmallow.
|
||||
assert defaultNetId == INVALID_NET_ID;
|
||||
defaultNetId = networkToNetId(network);
|
||||
}
|
||||
}
|
||||
return defaultNetId;
|
||||
}
|
||||
|
||||
@SuppressLint("NewApi")
|
||||
private NetworkInformation networkToInfo(Network network) {
|
||||
LinkProperties linkProperties = connectivityManager.getLinkProperties(network);
|
||||
// getLinkProperties will return null if the network is unknown.
|
||||
if (linkProperties == null) {
|
||||
Logging.w(TAG, "Detected unknown network: " + network.toString());
|
||||
return null;
|
||||
}
|
||||
if (linkProperties.getInterfaceName() == null) {
|
||||
Logging.w(TAG, "Null interface name for network " + network.toString());
|
||||
return null;
|
||||
}
|
||||
|
||||
NetworkState networkState = getNetworkState(network);
|
||||
ConnectionType connectionType = getConnectionType(networkState);
|
||||
if (connectionType == ConnectionType.CONNECTION_NONE) {
|
||||
// This may not be an error. The OS may signal a network event with connection type
|
||||
// NONE when the network disconnects.
|
||||
Logging.d(TAG, "Network " + network.toString() + " is disconnected");
|
||||
return null;
|
||||
}
|
||||
|
||||
// Some android device may return a CONNECTION_UNKNOWN_CELLULAR or CONNECTION_UNKNOWN type,
|
||||
// which appears to be usable. Just log them here.
|
||||
if (connectionType == ConnectionType.CONNECTION_UNKNOWN
|
||||
|| connectionType == ConnectionType.CONNECTION_UNKNOWN_CELLULAR) {
|
||||
Logging.d(TAG, "Network " + network.toString() + " connection type is " + connectionType
|
||||
+ " because it has type " + networkState.getNetworkType()
|
||||
+ " and subtype " + networkState.getNetworkSubType());
|
||||
}
|
||||
|
||||
NetworkInformation networkInformation = new NetworkInformation(
|
||||
linkProperties.getInterfaceName(),
|
||||
connectionType,
|
||||
networkToNetId(network),
|
||||
getIPAddresses(linkProperties));
|
||||
return networkInformation;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if {@code network} can provide Internet access. Can be used to
|
||||
* ignore specialized networks (e.g. IMS, FOTA).
|
||||
*/
|
||||
@SuppressLint("NewApi")
|
||||
boolean hasInternetCapability(Network network) {
|
||||
if (connectivityManager == null) {
|
||||
return false;
|
||||
}
|
||||
final NetworkCapabilities capabilities =
|
||||
connectivityManager.getNetworkCapabilities(network);
|
||||
return capabilities != null && capabilities.hasCapability(NET_CAPABILITY_INTERNET);
|
||||
}
|
||||
|
||||
/** Only callable on Lollipop and newer releases. */
|
||||
@SuppressLint("NewApi")
|
||||
public void registerNetworkCallback(NetworkCallback networkCallback) {
|
||||
connectivityManager.registerNetworkCallback(
|
||||
new NetworkRequest.Builder().addCapability(NET_CAPABILITY_INTERNET).build(),
|
||||
networkCallback);
|
||||
}
|
||||
|
||||
/** Only callable on Lollipop and newer releases. */
|
||||
@SuppressLint("NewApi")
|
||||
public void requestMobileNetwork(NetworkCallback networkCallback) {
|
||||
NetworkRequest.Builder builder = new NetworkRequest.Builder();
|
||||
builder.addCapability(NET_CAPABILITY_INTERNET).addTransportType(TRANSPORT_CELLULAR);
|
||||
connectivityManager.requestNetwork(builder.build(), networkCallback);
|
||||
}
|
||||
|
||||
@SuppressLint("NewApi")
|
||||
IPAddress[] getIPAddresses(LinkProperties linkProperties) {
|
||||
IPAddress[] ipAddresses = new IPAddress[linkProperties.getLinkAddresses().size()];
|
||||
int i = 0;
|
||||
for (LinkAddress linkAddress : linkProperties.getLinkAddresses()) {
|
||||
ipAddresses[i] = new IPAddress(linkAddress.getAddress().getAddress());
|
||||
++i;
|
||||
}
|
||||
return ipAddresses;
|
||||
}
|
||||
|
||||
@SuppressLint("NewApi")
|
||||
public void releaseCallback(NetworkCallback networkCallback) {
|
||||
if (supportNetworkCallback()) {
|
||||
Logging.d(TAG, "Unregister network callback");
|
||||
connectivityManager.unregisterNetworkCallback(networkCallback);
|
||||
}
|
||||
}
|
||||
|
||||
public boolean supportNetworkCallback() {
|
||||
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP && connectivityManager != null;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/** Queries the WifiManager for SSID of the current Wifi connection. */
|
||||
static class WifiManagerDelegate {
|
||||
private final Context context;
|
||||
WifiManagerDelegate(Context context) {
|
||||
this.context = context;
|
||||
}
|
||||
|
||||
// For testing.
|
||||
WifiManagerDelegate() {
|
||||
// All the methods below should be overridden.
|
||||
context = null;
|
||||
}
|
||||
|
||||
String getWifiSSID() {
|
||||
final Intent intent = context.registerReceiver(null,
|
||||
new IntentFilter(WifiManager.NETWORK_STATE_CHANGED_ACTION));
|
||||
if (intent != null) {
|
||||
final WifiInfo wifiInfo = intent.getParcelableExtra(WifiManager.EXTRA_WIFI_INFO);
|
||||
if (wifiInfo != null) {
|
||||
final String ssid = wifiInfo.getSSID();
|
||||
if (ssid != null) {
|
||||
return ssid;
|
||||
}
|
||||
}
|
||||
}
|
||||
return "";
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static final int INVALID_NET_ID = -1;
|
||||
private static final String TAG = "NetworkMonitorAutoDetect";
|
||||
|
||||
// Observer for the connection type change.
|
||||
private final Observer observer;
|
||||
private final IntentFilter intentFilter;
|
||||
private final Context context;
|
||||
// Used to request mobile network. It does not do anything except for keeping
|
||||
// the callback for releasing the request.
|
||||
private final NetworkCallback mobileNetworkCallback;
|
||||
// Used to receive updates on all networks.
|
||||
private final NetworkCallback allNetworkCallback;
|
||||
// connectivityManagerDelegate and wifiManagerDelegate are only non-final for testing.
|
||||
private ConnectivityManagerDelegate connectivityManagerDelegate;
|
||||
private WifiManagerDelegate wifiManagerDelegate;
|
||||
|
||||
private boolean isRegistered;
|
||||
private ConnectionType connectionType;
|
||||
private String wifiSSID;
|
||||
|
||||
/**
|
||||
* Observer interface by which observer is notified of network changes.
|
||||
*/
|
||||
public static interface Observer {
|
||||
/**
|
||||
* Called when default network changes.
|
||||
*/
|
||||
public void onConnectionTypeChanged(ConnectionType newConnectionType);
|
||||
public void onNetworkConnect(NetworkInformation networkInfo);
|
||||
public void onNetworkDisconnect(int networkHandle);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a NetworkMonitorAutoDetect. Should only be called on UI thread.
|
||||
*/
|
||||
@SuppressLint("NewApi")
|
||||
public NetworkMonitorAutoDetect(Observer observer, Context context) {
|
||||
this.observer = observer;
|
||||
this.context = context;
|
||||
connectivityManagerDelegate = new ConnectivityManagerDelegate(context);
|
||||
wifiManagerDelegate = new WifiManagerDelegate(context);
|
||||
|
||||
final NetworkState networkState = connectivityManagerDelegate.getNetworkState();
|
||||
connectionType = getConnectionType(networkState);
|
||||
wifiSSID = getWifiSSID(networkState);
|
||||
intentFilter = new IntentFilter(ConnectivityManager.CONNECTIVITY_ACTION);
|
||||
|
||||
registerReceiver();
|
||||
if (connectivityManagerDelegate.supportNetworkCallback()) {
|
||||
// On Android 6.0.0, the WRITE_SETTINGS permission is necessary for
|
||||
// requestNetwork, so it will fail. This was fixed in Android 6.0.1.
|
||||
NetworkCallback tempNetworkCallback = new NetworkCallback();
|
||||
try {
|
||||
connectivityManagerDelegate.requestMobileNetwork(tempNetworkCallback);
|
||||
} catch (java.lang.SecurityException e) {
|
||||
Logging.w(TAG, "Unable to obtain permission to request a cellular network.");
|
||||
tempNetworkCallback = null;
|
||||
}
|
||||
mobileNetworkCallback = tempNetworkCallback;
|
||||
allNetworkCallback = new SimpleNetworkCallback();
|
||||
connectivityManagerDelegate.registerNetworkCallback(allNetworkCallback);
|
||||
} else {
|
||||
mobileNetworkCallback = null;
|
||||
allNetworkCallback = null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Allows overriding the ConnectivityManagerDelegate for tests.
|
||||
*/
|
||||
void setConnectivityManagerDelegateForTests(ConnectivityManagerDelegate delegate) {
|
||||
connectivityManagerDelegate = delegate;
|
||||
}
|
||||
|
||||
/**
|
||||
* Allows overriding the WifiManagerDelegate for tests.
|
||||
*/
|
||||
void setWifiManagerDelegateForTests(WifiManagerDelegate delegate) {
|
||||
wifiManagerDelegate = delegate;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the object has registered to receive network connectivity intents.
|
||||
* Visible for testing.
|
||||
*/
|
||||
boolean isReceiverRegisteredForTesting() {
|
||||
return isRegistered;
|
||||
}
|
||||
|
||||
List<NetworkInformation> getActiveNetworkList() {
|
||||
return connectivityManagerDelegate.getActiveNetworkList();
|
||||
}
|
||||
|
||||
public void destroy() {
|
||||
if (allNetworkCallback != null) {
|
||||
connectivityManagerDelegate.releaseCallback(allNetworkCallback);
|
||||
}
|
||||
if (mobileNetworkCallback != null) {
|
||||
connectivityManagerDelegate.releaseCallback(mobileNetworkCallback);
|
||||
}
|
||||
unregisterReceiver();
|
||||
}
|
||||
|
||||
/**
|
||||
* Registers a BroadcastReceiver in the given context.
|
||||
*/
|
||||
private void registerReceiver() {
|
||||
if (isRegistered) return;
|
||||
|
||||
isRegistered = true;
|
||||
context.registerReceiver(this, intentFilter);
|
||||
}
|
||||
|
||||
/**
|
||||
* Unregisters the BroadcastReceiver in the given context.
|
||||
*/
|
||||
private void unregisterReceiver() {
|
||||
if (!isRegistered) return;
|
||||
|
||||
isRegistered = false;
|
||||
context.unregisterReceiver(this);
|
||||
}
|
||||
|
||||
public NetworkState getCurrentNetworkState() {
|
||||
return connectivityManagerDelegate.getNetworkState();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns NetID of device's current default connected network used for
|
||||
* communication.
|
||||
* Only implemented on Lollipop and newer releases, returns INVALID_NET_ID
|
||||
* when not implemented.
|
||||
*/
|
||||
public int getDefaultNetId() {
|
||||
return connectivityManagerDelegate.getDefaultNetId();
|
||||
}
|
||||
|
||||
public static ConnectionType getConnectionType(NetworkState networkState) {
|
||||
if (!networkState.isConnected()) {
|
||||
return ConnectionType.CONNECTION_NONE;
|
||||
}
|
||||
|
||||
switch (networkState.getNetworkType()) {
|
||||
case ConnectivityManager.TYPE_ETHERNET:
|
||||
return ConnectionType.CONNECTION_ETHERNET;
|
||||
case ConnectivityManager.TYPE_WIFI:
|
||||
return ConnectionType.CONNECTION_WIFI;
|
||||
case ConnectivityManager.TYPE_WIMAX:
|
||||
return ConnectionType.CONNECTION_4G;
|
||||
case ConnectivityManager.TYPE_BLUETOOTH:
|
||||
return ConnectionType.CONNECTION_BLUETOOTH;
|
||||
case ConnectivityManager.TYPE_MOBILE:
|
||||
// Use information from TelephonyManager to classify the connection.
|
||||
switch (networkState.getNetworkSubType()) {
|
||||
case TelephonyManager.NETWORK_TYPE_GPRS:
|
||||
case TelephonyManager.NETWORK_TYPE_EDGE:
|
||||
case TelephonyManager.NETWORK_TYPE_CDMA:
|
||||
case TelephonyManager.NETWORK_TYPE_1xRTT:
|
||||
case TelephonyManager.NETWORK_TYPE_IDEN:
|
||||
return ConnectionType.CONNECTION_2G;
|
||||
case TelephonyManager.NETWORK_TYPE_UMTS:
|
||||
case TelephonyManager.NETWORK_TYPE_EVDO_0:
|
||||
case TelephonyManager.NETWORK_TYPE_EVDO_A:
|
||||
case TelephonyManager.NETWORK_TYPE_HSDPA:
|
||||
case TelephonyManager.NETWORK_TYPE_HSUPA:
|
||||
case TelephonyManager.NETWORK_TYPE_HSPA:
|
||||
case TelephonyManager.NETWORK_TYPE_EVDO_B:
|
||||
case TelephonyManager.NETWORK_TYPE_EHRPD:
|
||||
case TelephonyManager.NETWORK_TYPE_HSPAP:
|
||||
return ConnectionType.CONNECTION_3G;
|
||||
case TelephonyManager.NETWORK_TYPE_LTE:
|
||||
return ConnectionType.CONNECTION_4G;
|
||||
default:
|
||||
return ConnectionType.CONNECTION_UNKNOWN_CELLULAR;
|
||||
}
|
||||
default:
|
||||
return ConnectionType.CONNECTION_UNKNOWN;
|
||||
}
|
||||
}
|
||||
|
||||
private String getWifiSSID(NetworkState networkState) {
|
||||
if (getConnectionType(networkState) != ConnectionType.CONNECTION_WIFI) return "";
|
||||
return wifiManagerDelegate.getWifiSSID();
|
||||
}
|
||||
|
||||
// BroadcastReceiver
|
||||
@Override
|
||||
public void onReceive(Context context, Intent intent) {
|
||||
final NetworkState networkState = getCurrentNetworkState();
|
||||
if (ConnectivityManager.CONNECTIVITY_ACTION.equals(intent.getAction())) {
|
||||
connectionTypeChanged(networkState);
|
||||
}
|
||||
}
|
||||
|
||||
private void connectionTypeChanged(NetworkState networkState) {
|
||||
ConnectionType newConnectionType = getConnectionType(networkState);
|
||||
String newWifiSSID = getWifiSSID(networkState);
|
||||
if (newConnectionType == connectionType && newWifiSSID.equals(wifiSSID)) return;
|
||||
|
||||
connectionType = newConnectionType;
|
||||
wifiSSID = newWifiSSID;
|
||||
Logging.d(TAG, "Network connectivity changed, type is: " + connectionType);
|
||||
observer.onConnectionTypeChanged(newConnectionType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts NetID of network. Only available on Lollipop and newer releases.
|
||||
*/
|
||||
@SuppressLint("NewApi")
|
||||
private static int networkToNetId(Network network) {
|
||||
// NOTE(pauljensen): This depends on Android framework implementation details.
|
||||
// Fortunately this functionality is unlikely to ever change.
|
||||
// TODO(honghaiz): When we update to Android M SDK, use Network.getNetworkHandle().
|
||||
return Integer.parseInt(network.toString());
|
||||
}
|
||||
}
|
||||
@ -1 +0,0 @@
|
||||
magjed@webrtc.org
|
||||
@ -1,246 +0,0 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.graphics.Point;
|
||||
import android.opengl.GLES20;
|
||||
import android.opengl.Matrix;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
/**
|
||||
* Static helper functions for renderer implementations.
|
||||
*/
|
||||
public class RendererCommon {
|
||||
/** Interface for reporting rendering events. */
|
||||
public static interface RendererEvents {
|
||||
/**
|
||||
* Callback fired once first frame is rendered.
|
||||
*/
|
||||
public void onFirstFrameRendered();
|
||||
|
||||
/**
|
||||
* Callback fired when rendered frame resolution or rotation has changed.
|
||||
*/
|
||||
public void onFrameResolutionChanged(int videoWidth, int videoHeight, int rotation);
|
||||
}
|
||||
|
||||
/** Interface for rendering frames on an EGLSurface. */
|
||||
public static interface GlDrawer {
|
||||
/**
|
||||
* Functions for drawing frames with different sources. The rendering surface target is
|
||||
* implied by the current EGL context of the calling thread and requires no explicit argument.
|
||||
* The coordinates specify the viewport location on the surface target.
|
||||
*/
|
||||
void drawOes(int oesTextureId, float[] texMatrix, int frameWidth, int frameHeight,
|
||||
int viewportX, int viewportY, int viewportWidth, int viewportHeight);
|
||||
void drawRgb(int textureId, float[] texMatrix, int frameWidth, int frameHeight,
|
||||
int viewportX, int viewportY, int viewportWidth, int viewportHeight);
|
||||
void drawYuv(int[] yuvTextures, float[] texMatrix, int frameWidth, int frameHeight,
|
||||
int viewportX, int viewportY, int viewportWidth, int viewportHeight);
|
||||
|
||||
/**
|
||||
* Release all GL resources. This needs to be done manually, otherwise resources may leak.
|
||||
*/
|
||||
void release();
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper class for uploading YUV bytebuffer frames to textures that handles stride > width. This
|
||||
* class keeps an internal ByteBuffer to avoid unnecessary allocations for intermediate copies.
|
||||
*/
|
||||
public static class YuvUploader {
|
||||
// Intermediate copy buffer for uploading yuv frames that are not packed, i.e. stride > width.
|
||||
// TODO(magjed): Investigate when GL_UNPACK_ROW_LENGTH is available, or make a custom shader
|
||||
// that handles stride and compare performance with intermediate copy.
|
||||
private ByteBuffer copyBuffer;
|
||||
|
||||
/**
|
||||
* Upload |planes| into |outputYuvTextures|, taking stride into consideration.
|
||||
* |outputYuvTextures| must have been generated in advance.
|
||||
*/
|
||||
public void uploadYuvData(
|
||||
int[] outputYuvTextures, int width, int height, int[] strides, ByteBuffer[] planes) {
|
||||
final int[] planeWidths = new int[] {width, width / 2, width / 2};
|
||||
final int[] planeHeights = new int[] {height, height / 2, height / 2};
|
||||
// Make a first pass to see if we need a temporary copy buffer.
|
||||
int copyCapacityNeeded = 0;
|
||||
for (int i = 0; i < 3; ++i) {
|
||||
if (strides[i] > planeWidths[i]) {
|
||||
copyCapacityNeeded = Math.max(copyCapacityNeeded, planeWidths[i] * planeHeights[i]);
|
||||
}
|
||||
}
|
||||
// Allocate copy buffer if necessary.
|
||||
if (copyCapacityNeeded > 0
|
||||
&& (copyBuffer == null || copyBuffer.capacity() < copyCapacityNeeded)) {
|
||||
copyBuffer = ByteBuffer.allocateDirect(copyCapacityNeeded);
|
||||
}
|
||||
// Upload each plane.
|
||||
for (int i = 0; i < 3; ++i) {
|
||||
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
|
||||
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, outputYuvTextures[i]);
|
||||
// GLES only accepts packed data, i.e. stride == planeWidth.
|
||||
final ByteBuffer packedByteBuffer;
|
||||
if (strides[i] == planeWidths[i]) {
|
||||
// Input is packed already.
|
||||
packedByteBuffer = planes[i];
|
||||
} else {
|
||||
VideoRenderer.nativeCopyPlane(
|
||||
planes[i], planeWidths[i], planeHeights[i], strides[i], copyBuffer, planeWidths[i]);
|
||||
packedByteBuffer = copyBuffer;
|
||||
}
|
||||
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, planeWidths[i],
|
||||
planeHeights[i], 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, packedByteBuffer);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Types of video scaling:
|
||||
// SCALE_ASPECT_FIT - video frame is scaled to fit the size of the view by
|
||||
// maintaining the aspect ratio (black borders may be displayed).
|
||||
// SCALE_ASPECT_FILL - video frame is scaled to fill the size of the view by
|
||||
// maintaining the aspect ratio. Some portion of the video frame may be
|
||||
// clipped.
|
||||
// SCALE_ASPECT_BALANCED - Compromise between FIT and FILL. Video frame will fill as much as
|
||||
// possible of the view while maintaining aspect ratio, under the constraint that at least
|
||||
// |BALANCED_VISIBLE_FRACTION| of the frame content will be shown.
|
||||
public static enum ScalingType { SCALE_ASPECT_FIT, SCALE_ASPECT_FILL, SCALE_ASPECT_BALANCED }
|
||||
// The minimum fraction of the frame content that will be shown for |SCALE_ASPECT_BALANCED|.
|
||||
// This limits excessive cropping when adjusting display size.
|
||||
private static float BALANCED_VISIBLE_FRACTION = 0.5625f;
|
||||
public static final float[] identityMatrix() {
|
||||
return new float[] {
|
||||
1, 0, 0, 0,
|
||||
0, 1, 0, 0,
|
||||
0, 0, 1, 0,
|
||||
0, 0, 0, 1};
|
||||
}
|
||||
// Matrix with transform y' = 1 - y.
|
||||
public static final float[] verticalFlipMatrix() {
|
||||
return new float[] {
|
||||
1, 0, 0, 0,
|
||||
0, -1, 0, 0,
|
||||
0, 0, 1, 0,
|
||||
0, 1, 0, 1};
|
||||
}
|
||||
|
||||
// Matrix with transform x' = 1 - x.
|
||||
public static final float[] horizontalFlipMatrix() {
|
||||
return new float[] {
|
||||
-1, 0, 0, 0,
|
||||
0, 1, 0, 0,
|
||||
0, 0, 1, 0,
|
||||
1, 0, 0, 1};
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns texture matrix that will have the effect of rotating the frame |rotationDegree|
|
||||
* clockwise when rendered.
|
||||
*/
|
||||
public static float[] rotateTextureMatrix(float[] textureMatrix, float rotationDegree) {
|
||||
final float[] rotationMatrix = new float[16];
|
||||
Matrix.setRotateM(rotationMatrix, 0, rotationDegree, 0, 0, 1);
|
||||
adjustOrigin(rotationMatrix);
|
||||
return multiplyMatrices(textureMatrix, rotationMatrix);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns new matrix with the result of a * b.
|
||||
*/
|
||||
public static float[] multiplyMatrices(float[] a, float[] b) {
|
||||
final float[] resultMatrix = new float[16];
|
||||
Matrix.multiplyMM(resultMatrix, 0, a, 0, b, 0);
|
||||
return resultMatrix;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns layout transformation matrix that applies an optional mirror effect and compensates
|
||||
* for video vs display aspect ratio.
|
||||
*/
|
||||
public static float[] getLayoutMatrix(
|
||||
boolean mirror, float videoAspectRatio, float displayAspectRatio) {
|
||||
float scaleX = 1;
|
||||
float scaleY = 1;
|
||||
// Scale X or Y dimension so that video and display size have same aspect ratio.
|
||||
if (displayAspectRatio > videoAspectRatio) {
|
||||
scaleY = videoAspectRatio / displayAspectRatio;
|
||||
} else {
|
||||
scaleX = displayAspectRatio / videoAspectRatio;
|
||||
}
|
||||
// Apply optional horizontal flip.
|
||||
if (mirror) {
|
||||
scaleX *= -1;
|
||||
}
|
||||
final float matrix[] = new float[16];
|
||||
Matrix.setIdentityM(matrix, 0);
|
||||
Matrix.scaleM(matrix, 0, scaleX, scaleY, 1);
|
||||
adjustOrigin(matrix);
|
||||
return matrix;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate display size based on scaling type, video aspect ratio, and maximum display size.
|
||||
*/
|
||||
public static Point getDisplaySize(ScalingType scalingType, float videoAspectRatio,
|
||||
int maxDisplayWidth, int maxDisplayHeight) {
|
||||
return getDisplaySize(convertScalingTypeToVisibleFraction(scalingType), videoAspectRatio,
|
||||
maxDisplayWidth, maxDisplayHeight);
|
||||
}
|
||||
|
||||
/**
|
||||
* Move |matrix| transformation origin to (0.5, 0.5). This is the origin for texture coordinates
|
||||
* that are in the range 0 to 1.
|
||||
*/
|
||||
private static void adjustOrigin(float[] matrix) {
|
||||
// Note that OpenGL is using column-major order.
|
||||
// Pre translate with -0.5 to move coordinates to range [-0.5, 0.5].
|
||||
matrix[12] -= 0.5f * (matrix[0] + matrix[4]);
|
||||
matrix[13] -= 0.5f * (matrix[1] + matrix[5]);
|
||||
// Post translate with 0.5 to move coordinates to range [0, 1].
|
||||
matrix[12] += 0.5f;
|
||||
matrix[13] += 0.5f;
|
||||
}
|
||||
|
||||
/**
|
||||
* Each scaling type has a one-to-one correspondence to a numeric minimum fraction of the video
|
||||
* that must remain visible.
|
||||
*/
|
||||
private static float convertScalingTypeToVisibleFraction(ScalingType scalingType) {
|
||||
switch (scalingType) {
|
||||
case SCALE_ASPECT_FIT:
|
||||
return 1.0f;
|
||||
case SCALE_ASPECT_FILL:
|
||||
return 0.0f;
|
||||
case SCALE_ASPECT_BALANCED:
|
||||
return BALANCED_VISIBLE_FRACTION;
|
||||
default:
|
||||
throw new IllegalArgumentException();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate display size based on minimum fraction of the video that must remain visible,
|
||||
* video aspect ratio, and maximum display size.
|
||||
*/
|
||||
private static Point getDisplaySize(float minVisibleFraction, float videoAspectRatio,
|
||||
int maxDisplayWidth, int maxDisplayHeight) {
|
||||
// If there is no constraint on the amount of cropping, fill the allowed display area.
|
||||
if (minVisibleFraction == 0 || videoAspectRatio == 0) {
|
||||
return new Point(maxDisplayWidth, maxDisplayHeight);
|
||||
}
|
||||
// Each dimension is constrained on max display size and how much we are allowed to crop.
|
||||
final int width = Math.min(maxDisplayWidth,
|
||||
Math.round(maxDisplayHeight / minVisibleFraction * videoAspectRatio));
|
||||
final int height = Math.min(maxDisplayHeight,
|
||||
Math.round(maxDisplayWidth / minVisibleFraction / videoAspectRatio));
|
||||
return new Point(width, height);
|
||||
}
|
||||
}
|
||||
@ -1,499 +0,0 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.opengl.GLES11Ext;
|
||||
import android.opengl.GLES20;
|
||||
import android.os.Build;
|
||||
import android.os.Handler;
|
||||
import android.os.HandlerThread;
|
||||
import android.os.SystemClock;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.FloatBuffer;
|
||||
import java.util.concurrent.Callable;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
/**
|
||||
* Helper class to create and synchronize access to a SurfaceTexture. The caller will get notified
|
||||
* of new frames in onTextureFrameAvailable(), and should call returnTextureFrame() when done with
|
||||
* the frame. Only one texture frame can be in flight at once, so returnTextureFrame() must be
|
||||
* called in order to receive a new frame. Call stopListening() to stop receiveing new frames. Call
|
||||
* dispose to release all resources once the texture frame is returned.
|
||||
* Note that there is a C++ counter part of this class that optionally can be used. It is used for
|
||||
* wrapping texture frames into webrtc::VideoFrames and also handles calling returnTextureFrame()
|
||||
* when the webrtc::VideoFrame is no longer used.
|
||||
*/
|
||||
class SurfaceTextureHelper {
|
||||
private static final String TAG = "SurfaceTextureHelper";
|
||||
/**
|
||||
* Callback interface for being notified that a new texture frame is available. The calls will be
|
||||
* made on a dedicated thread with a bound EGLContext. The thread will be the same throughout the
|
||||
* lifetime of the SurfaceTextureHelper instance, but different from the thread calling the
|
||||
* SurfaceTextureHelper constructor. The callee is not allowed to make another EGLContext current
|
||||
* on the calling thread.
|
||||
*/
|
||||
public interface OnTextureFrameAvailableListener {
|
||||
abstract void onTextureFrameAvailable(
|
||||
int oesTextureId, float[] transformMatrix, long timestampNs);
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct a new SurfaceTextureHelper sharing OpenGL resources with |sharedContext|. A dedicated
|
||||
* thread and handler is created for handling the SurfaceTexture. May return null if EGL fails to
|
||||
* initialize a pixel buffer surface and make it current.
|
||||
*/
|
||||
public static SurfaceTextureHelper create(
|
||||
final String threadName, final EglBase.Context sharedContext) {
|
||||
final HandlerThread thread = new HandlerThread(threadName);
|
||||
thread.start();
|
||||
final Handler handler = new Handler(thread.getLooper());
|
||||
|
||||
// The onFrameAvailable() callback will be executed on the SurfaceTexture ctor thread. See:
|
||||
// http://grepcode.com/file/repository.grepcode.com/java/ext/com.google.android/android/5.1.1_r1/android/graphics/SurfaceTexture.java#195.
|
||||
// Therefore, in order to control the callback thread on API lvl < 21, the SurfaceTextureHelper
|
||||
// is constructed on the |handler| thread.
|
||||
return ThreadUtils.invokeAtFrontUninterruptibly(handler, new Callable<SurfaceTextureHelper>() {
|
||||
@Override
|
||||
public SurfaceTextureHelper call() {
|
||||
try {
|
||||
return new SurfaceTextureHelper(sharedContext, handler);
|
||||
} catch (RuntimeException e) {
|
||||
Logging.e(TAG, threadName + " create failure", e);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// State for YUV conversion, instantiated on demand.
|
||||
static private class YuvConverter {
|
||||
private final EglBase eglBase;
|
||||
private final GlShader shader;
|
||||
private boolean released = false;
|
||||
|
||||
// Vertex coordinates in Normalized Device Coordinates, i.e.
|
||||
// (-1, -1) is bottom-left and (1, 1) is top-right.
|
||||
private static final FloatBuffer DEVICE_RECTANGLE =
|
||||
GlUtil.createFloatBuffer(new float[] {
|
||||
-1.0f, -1.0f, // Bottom left.
|
||||
1.0f, -1.0f, // Bottom right.
|
||||
-1.0f, 1.0f, // Top left.
|
||||
1.0f, 1.0f, // Top right.
|
||||
});
|
||||
|
||||
// Texture coordinates - (0, 0) is bottom-left and (1, 1) is top-right.
|
||||
private static final FloatBuffer TEXTURE_RECTANGLE =
|
||||
GlUtil.createFloatBuffer(new float[] {
|
||||
0.0f, 0.0f, // Bottom left.
|
||||
1.0f, 0.0f, // Bottom right.
|
||||
0.0f, 1.0f, // Top left.
|
||||
1.0f, 1.0f // Top right.
|
||||
});
|
||||
|
||||
private static final String VERTEX_SHADER =
|
||||
"varying vec2 interp_tc;\n"
|
||||
+ "attribute vec4 in_pos;\n"
|
||||
+ "attribute vec4 in_tc;\n"
|
||||
+ "\n"
|
||||
+ "uniform mat4 texMatrix;\n"
|
||||
+ "\n"
|
||||
+ "void main() {\n"
|
||||
+ " gl_Position = in_pos;\n"
|
||||
+ " interp_tc = (texMatrix * in_tc).xy;\n"
|
||||
+ "}\n";
|
||||
|
||||
private static final String FRAGMENT_SHADER =
|
||||
"#extension GL_OES_EGL_image_external : require\n"
|
||||
+ "precision mediump float;\n"
|
||||
+ "varying vec2 interp_tc;\n"
|
||||
+ "\n"
|
||||
+ "uniform samplerExternalOES oesTex;\n"
|
||||
// Difference in texture coordinate corresponding to one
|
||||
// sub-pixel in the x direction.
|
||||
+ "uniform vec2 xUnit;\n"
|
||||
// Color conversion coefficients, including constant term
|
||||
+ "uniform vec4 coeffs;\n"
|
||||
+ "\n"
|
||||
+ "void main() {\n"
|
||||
// Since the alpha read from the texture is always 1, this could
|
||||
// be written as a mat4 x vec4 multiply. However, that seems to
|
||||
// give a worse framerate, possibly because the additional
|
||||
// multiplies by 1.0 consume resources. TODO(nisse): Could also
|
||||
// try to do it as a vec3 x mat3x4, followed by an add in of a
|
||||
// constant vector.
|
||||
+ " gl_FragColor.r = coeffs.a + dot(coeffs.rgb,\n"
|
||||
+ " texture2D(oesTex, interp_tc - 1.5 * xUnit).rgb);\n"
|
||||
+ " gl_FragColor.g = coeffs.a + dot(coeffs.rgb,\n"
|
||||
+ " texture2D(oesTex, interp_tc - 0.5 * xUnit).rgb);\n"
|
||||
+ " gl_FragColor.b = coeffs.a + dot(coeffs.rgb,\n"
|
||||
+ " texture2D(oesTex, interp_tc + 0.5 * xUnit).rgb);\n"
|
||||
+ " gl_FragColor.a = coeffs.a + dot(coeffs.rgb,\n"
|
||||
+ " texture2D(oesTex, interp_tc + 1.5 * xUnit).rgb);\n"
|
||||
+ "}\n";
|
||||
|
||||
private int texMatrixLoc;
|
||||
private int xUnitLoc;
|
||||
private int coeffsLoc;;
|
||||
|
||||
YuvConverter (EglBase.Context sharedContext) {
|
||||
eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PIXEL_RGBA_BUFFER);
|
||||
eglBase.createDummyPbufferSurface();
|
||||
eglBase.makeCurrent();
|
||||
|
||||
shader = new GlShader(VERTEX_SHADER, FRAGMENT_SHADER);
|
||||
shader.useProgram();
|
||||
texMatrixLoc = shader.getUniformLocation("texMatrix");
|
||||
xUnitLoc = shader.getUniformLocation("xUnit");
|
||||
coeffsLoc = shader.getUniformLocation("coeffs");
|
||||
GLES20.glUniform1i(shader.getUniformLocation("oesTex"), 0);
|
||||
GlUtil.checkNoGLES2Error("Initialize fragment shader uniform values.");
|
||||
// Initialize vertex shader attributes.
|
||||
shader.setVertexAttribArray("in_pos", 2, DEVICE_RECTANGLE);
|
||||
// If the width is not a multiple of 4 pixels, the texture
|
||||
// will be scaled up slightly and clipped at the right border.
|
||||
shader.setVertexAttribArray("in_tc", 2, TEXTURE_RECTANGLE);
|
||||
eglBase.detachCurrent();
|
||||
}
|
||||
|
||||
synchronized void convert(ByteBuffer buf,
|
||||
int width, int height, int stride, int textureId, float [] transformMatrix) {
|
||||
if (released) {
|
||||
throw new IllegalStateException(
|
||||
"YuvConverter.convert called on released object");
|
||||
}
|
||||
|
||||
// We draw into a buffer laid out like
|
||||
//
|
||||
// +---------+
|
||||
// | |
|
||||
// | Y |
|
||||
// | |
|
||||
// | |
|
||||
// +----+----+
|
||||
// | U | V |
|
||||
// | | |
|
||||
// +----+----+
|
||||
//
|
||||
// In memory, we use the same stride for all of Y, U and V. The
|
||||
// U data starts at offset |height| * |stride| from the Y data,
|
||||
// and the V data starts at at offset |stride/2| from the U
|
||||
// data, with rows of U and V data alternating.
|
||||
//
|
||||
// Now, it would have made sense to allocate a pixel buffer with
|
||||
// a single byte per pixel (EGL10.EGL_COLOR_BUFFER_TYPE,
|
||||
// EGL10.EGL_LUMINANCE_BUFFER,), but that seems to be
|
||||
// unsupported by devices. So do the following hack: Allocate an
|
||||
// RGBA buffer, of width |stride|/4. To render each of these
|
||||
// large pixels, sample the texture at 4 different x coordinates
|
||||
// and store the results in the four components.
|
||||
//
|
||||
// Since the V data needs to start on a boundary of such a
|
||||
// larger pixel, it is not sufficient that |stride| is even, it
|
||||
// has to be a multiple of 8 pixels.
|
||||
|
||||
if (stride % 8 != 0) {
|
||||
throw new IllegalArgumentException(
|
||||
"Invalid stride, must be a multiple of 8");
|
||||
}
|
||||
if (stride < width){
|
||||
throw new IllegalArgumentException(
|
||||
"Invalid stride, must >= width");
|
||||
}
|
||||
|
||||
int y_width = (width+3) / 4;
|
||||
int uv_width = (width+7) / 8;
|
||||
int uv_height = (height+1)/2;
|
||||
int total_height = height + uv_height;
|
||||
int size = stride * total_height;
|
||||
|
||||
if (buf.capacity() < size) {
|
||||
throw new IllegalArgumentException("YuvConverter.convert called with too small buffer");
|
||||
}
|
||||
// Produce a frame buffer starting at top-left corner, not
|
||||
// bottom-left.
|
||||
transformMatrix =
|
||||
RendererCommon.multiplyMatrices(transformMatrix,
|
||||
RendererCommon.verticalFlipMatrix());
|
||||
|
||||
// Create new pBuffferSurface with the correct size if needed.
|
||||
if (eglBase.hasSurface()) {
|
||||
if (eglBase.surfaceWidth() != stride/4 ||
|
||||
eglBase.surfaceHeight() != total_height){
|
||||
eglBase.releaseSurface();
|
||||
eglBase.createPbufferSurface(stride/4, total_height);
|
||||
}
|
||||
} else {
|
||||
eglBase.createPbufferSurface(stride/4, total_height);
|
||||
}
|
||||
|
||||
eglBase.makeCurrent();
|
||||
|
||||
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
|
||||
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId);
|
||||
GLES20.glUniformMatrix4fv(texMatrixLoc, 1, false, transformMatrix, 0);
|
||||
|
||||
// Draw Y
|
||||
GLES20.glViewport(0, 0, y_width, height);
|
||||
// Matrix * (1;0;0;0) / width. Note that opengl uses column major order.
|
||||
GLES20.glUniform2f(xUnitLoc,
|
||||
transformMatrix[0] / width,
|
||||
transformMatrix[1] / width);
|
||||
// Y'UV444 to RGB888, see
|
||||
// https://en.wikipedia.org/wiki/YUV#Y.27UV444_to_RGB888_conversion.
|
||||
// We use the ITU-R coefficients for U and V */
|
||||
GLES20.glUniform4f(coeffsLoc, 0.299f, 0.587f, 0.114f, 0.0f);
|
||||
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
|
||||
|
||||
// Draw U
|
||||
GLES20.glViewport(0, height, uv_width, uv_height);
|
||||
// Matrix * (1;0;0;0) / (width / 2). Note that opengl uses column major order.
|
||||
GLES20.glUniform2f(xUnitLoc,
|
||||
2.0f * transformMatrix[0] / width,
|
||||
2.0f * transformMatrix[1] / width);
|
||||
GLES20.glUniform4f(coeffsLoc, -0.169f, -0.331f, 0.499f, 0.5f);
|
||||
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
|
||||
|
||||
// Draw V
|
||||
GLES20.glViewport(stride/8, height, uv_width, uv_height);
|
||||
GLES20.glUniform4f(coeffsLoc, 0.499f, -0.418f, -0.0813f, 0.5f);
|
||||
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
|
||||
|
||||
GLES20.glReadPixels(0, 0, stride/4, total_height, GLES20.GL_RGBA,
|
||||
GLES20.GL_UNSIGNED_BYTE, buf);
|
||||
|
||||
GlUtil.checkNoGLES2Error("YuvConverter.convert");
|
||||
|
||||
// Unbind texture. Reportedly needed on some devices to get
|
||||
// the texture updated from the camera.
|
||||
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
|
||||
eglBase.detachCurrent();
|
||||
}
|
||||
|
||||
synchronized void release() {
|
||||
released = true;
|
||||
eglBase.makeCurrent();
|
||||
shader.release();
|
||||
eglBase.release();
|
||||
}
|
||||
}
|
||||
|
||||
private final Handler handler;
|
||||
private final EglBase eglBase;
|
||||
private final SurfaceTexture surfaceTexture;
|
||||
private final int oesTextureId;
|
||||
private YuvConverter yuvConverter;
|
||||
|
||||
// These variables are only accessed from the |handler| thread.
|
||||
private OnTextureFrameAvailableListener listener;
|
||||
// The possible states of this class.
|
||||
private boolean hasPendingTexture = false;
|
||||
private volatile boolean isTextureInUse = false;
|
||||
private boolean isQuitting = false;
|
||||
// |pendingListener| is set in setListener() and the runnable is posted to the handler thread.
|
||||
// setListener() is not allowed to be called again before stopListening(), so this is thread safe.
|
||||
private OnTextureFrameAvailableListener pendingListener;
|
||||
final Runnable setListenerRunnable = new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
Logging.d(TAG, "Setting listener to " + pendingListener);
|
||||
listener = pendingListener;
|
||||
pendingListener = null;
|
||||
// May have a pending frame from the previous capture session - drop it.
|
||||
if (hasPendingTexture) {
|
||||
// Calling updateTexImage() is neccessary in order to receive new frames.
|
||||
updateTexImage();
|
||||
hasPendingTexture = false;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
private SurfaceTextureHelper(EglBase.Context sharedContext, Handler handler) {
|
||||
if (handler.getLooper().getThread() != Thread.currentThread()) {
|
||||
throw new IllegalStateException("SurfaceTextureHelper must be created on the handler thread");
|
||||
}
|
||||
this.handler = handler;
|
||||
|
||||
eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PIXEL_BUFFER);
|
||||
try {
|
||||
// Both these statements have been observed to fail on rare occasions, see BUG=webrtc:5682.
|
||||
eglBase.createDummyPbufferSurface();
|
||||
eglBase.makeCurrent();
|
||||
} catch (RuntimeException e) {
|
||||
// Clean up before rethrowing the exception.
|
||||
eglBase.release();
|
||||
handler.getLooper().quit();
|
||||
throw e;
|
||||
}
|
||||
|
||||
oesTextureId = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
|
||||
surfaceTexture = new SurfaceTexture(oesTextureId);
|
||||
surfaceTexture.setOnFrameAvailableListener(new SurfaceTexture.OnFrameAvailableListener() {
|
||||
@Override
|
||||
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
|
||||
hasPendingTexture = true;
|
||||
tryDeliverTextureFrame();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private YuvConverter getYuvConverter() {
|
||||
// yuvConverter is assigned once
|
||||
if (yuvConverter != null)
|
||||
return yuvConverter;
|
||||
|
||||
synchronized(this) {
|
||||
if (yuvConverter == null)
|
||||
yuvConverter = new YuvConverter(eglBase.getEglBaseContext());
|
||||
return yuvConverter;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Start to stream textures to the given |listener|. If you need to change listener, you need to
|
||||
* call stopListening() first.
|
||||
*/
|
||||
public void startListening(final OnTextureFrameAvailableListener listener) {
|
||||
if (this.listener != null || this.pendingListener != null) {
|
||||
throw new IllegalStateException("SurfaceTextureHelper listener has already been set.");
|
||||
}
|
||||
this.pendingListener = listener;
|
||||
handler.post(setListenerRunnable);
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop listening. The listener set in startListening() is guaranteded to not receive any more
|
||||
* onTextureFrameAvailable() callbacks after this function returns.
|
||||
*/
|
||||
public void stopListening() {
|
||||
Logging.d(TAG, "stopListening()");
|
||||
handler.removeCallbacks(setListenerRunnable);
|
||||
ThreadUtils.invokeAtFrontUninterruptibly(handler, new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
listener = null;
|
||||
pendingListener = null;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve the underlying SurfaceTexture. The SurfaceTexture should be passed in to a video
|
||||
* producer such as a camera or decoder.
|
||||
*/
|
||||
public SurfaceTexture getSurfaceTexture() {
|
||||
return surfaceTexture;
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve the handler that calls onTextureFrameAvailable(). This handler is valid until
|
||||
* dispose() is called.
|
||||
*/
|
||||
public Handler getHandler() {
|
||||
return handler;
|
||||
}
|
||||
|
||||
/**
|
||||
* Call this function to signal that you are done with the frame received in
|
||||
* onTextureFrameAvailable(). Only one texture frame can be in flight at once, so you must call
|
||||
* this function in order to receive a new frame.
|
||||
*/
|
||||
public void returnTextureFrame() {
|
||||
handler.post(new Runnable() {
|
||||
@Override public void run() {
|
||||
isTextureInUse = false;
|
||||
if (isQuitting) {
|
||||
release();
|
||||
} else {
|
||||
tryDeliverTextureFrame();
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public boolean isTextureInUse() {
|
||||
return isTextureInUse;
|
||||
}
|
||||
|
||||
/**
|
||||
* Call disconnect() to stop receiving frames. OpenGL resources are released and the handler is
|
||||
* stopped when the texture frame has been returned by a call to returnTextureFrame(). You are
|
||||
* guaranteed to not receive any more onTextureFrameAvailable() after this function returns.
|
||||
*/
|
||||
public void dispose() {
|
||||
Logging.d(TAG, "dispose()");
|
||||
ThreadUtils.invokeAtFrontUninterruptibly(handler, new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
isQuitting = true;
|
||||
if (!isTextureInUse) {
|
||||
release();
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public void textureToYUV(ByteBuffer buf,
|
||||
int width, int height, int stride, int textureId, float [] transformMatrix) {
|
||||
if (textureId != oesTextureId)
|
||||
throw new IllegalStateException("textureToByteBuffer called with unexpected textureId");
|
||||
|
||||
getYuvConverter().convert(buf, width, height, stride, textureId, transformMatrix);
|
||||
}
|
||||
|
||||
private void updateTexImage() {
|
||||
// SurfaceTexture.updateTexImage apparently can compete and deadlock with eglSwapBuffers,
|
||||
// as observed on Nexus 5. Therefore, synchronize it with the EGL functions.
|
||||
// See https://bugs.chromium.org/p/webrtc/issues/detail?id=5702 for more info.
|
||||
synchronized (EglBase.lock) {
|
||||
surfaceTexture.updateTexImage();
|
||||
}
|
||||
}
|
||||
|
||||
private void tryDeliverTextureFrame() {
|
||||
if (handler.getLooper().getThread() != Thread.currentThread()) {
|
||||
throw new IllegalStateException("Wrong thread.");
|
||||
}
|
||||
if (isQuitting || !hasPendingTexture || isTextureInUse || listener == null) {
|
||||
return;
|
||||
}
|
||||
isTextureInUse = true;
|
||||
hasPendingTexture = false;
|
||||
|
||||
updateTexImage();
|
||||
|
||||
final float[] transformMatrix = new float[16];
|
||||
surfaceTexture.getTransformMatrix(transformMatrix);
|
||||
final long timestampNs = (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH)
|
||||
? surfaceTexture.getTimestamp()
|
||||
: TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
|
||||
listener.onTextureFrameAvailable(oesTextureId, transformMatrix, timestampNs);
|
||||
}
|
||||
|
||||
private void release() {
|
||||
if (handler.getLooper().getThread() != Thread.currentThread()) {
|
||||
throw new IllegalStateException("Wrong thread.");
|
||||
}
|
||||
if (isTextureInUse || !isQuitting) {
|
||||
throw new IllegalStateException("Unexpected release.");
|
||||
}
|
||||
synchronized (this) {
|
||||
if (yuvConverter != null)
|
||||
yuvConverter.release();
|
||||
}
|
||||
GLES20.glDeleteTextures(1, new int[] {oesTextureId}, 0);
|
||||
surfaceTexture.release();
|
||||
eglBase.release();
|
||||
handler.getLooper().quit();
|
||||
}
|
||||
}
|
||||
@ -1,565 +0,0 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.content.Context;
|
||||
import android.content.res.Resources.NotFoundException;
|
||||
import android.graphics.Point;
|
||||
import android.opengl.GLES20;
|
||||
import android.os.Handler;
|
||||
import android.os.HandlerThread;
|
||||
import android.util.AttributeSet;
|
||||
import android.view.SurfaceHolder;
|
||||
import android.view.SurfaceView;
|
||||
|
||||
import org.webrtc.Logging;
|
||||
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
|
||||
import javax.microedition.khronos.egl.EGLContext;
|
||||
|
||||
/**
|
||||
* Implements org.webrtc.VideoRenderer.Callbacks by displaying the video stream on a SurfaceView.
|
||||
* renderFrame() is asynchronous to avoid blocking the calling thread.
|
||||
* This class is thread safe and handles access from potentially four different threads:
|
||||
* Interaction from the main app in init, release, setMirror, and setScalingtype.
|
||||
* Interaction from C++ rtc::VideoSinkInterface in renderFrame.
|
||||
* Interaction from the Activity lifecycle in surfaceCreated, surfaceChanged, and surfaceDestroyed.
|
||||
* Interaction with the layout framework in onMeasure and onSizeChanged.
|
||||
*/
|
||||
public class SurfaceViewRenderer extends SurfaceView
|
||||
implements SurfaceHolder.Callback, VideoRenderer.Callbacks {
|
||||
private static final String TAG = "SurfaceViewRenderer";
|
||||
|
||||
// Dedicated render thread.
|
||||
private HandlerThread renderThread;
|
||||
// |renderThreadHandler| is a handler for communicating with |renderThread|, and is synchronized
|
||||
// on |handlerLock|.
|
||||
private final Object handlerLock = new Object();
|
||||
private Handler renderThreadHandler;
|
||||
|
||||
// EGL and GL resources for drawing YUV/OES textures. After initilization, these are only accessed
|
||||
// from the render thread.
|
||||
private EglBase eglBase;
|
||||
private final RendererCommon.YuvUploader yuvUploader = new RendererCommon.YuvUploader();
|
||||
private RendererCommon.GlDrawer drawer;
|
||||
// Texture ids for YUV frames. Allocated on first arrival of a YUV frame.
|
||||
private int[] yuvTextures = null;
|
||||
|
||||
// Pending frame to render. Serves as a queue with size 1. Synchronized on |frameLock|.
|
||||
private final Object frameLock = new Object();
|
||||
private VideoRenderer.I420Frame pendingFrame;
|
||||
|
||||
// These variables are synchronized on |layoutLock|.
|
||||
private final Object layoutLock = new Object();
|
||||
// These dimension values are used to keep track of the state in these functions: onMeasure(),
|
||||
// onLayout(), and surfaceChanged(). A new layout is triggered with requestLayout(). This happens
|
||||
// internally when the incoming frame size changes. requestLayout() can also be triggered
|
||||
// externally. The layout change is a two pass process: first onMeasure() is called in a top-down
|
||||
// traversal of the View tree, followed by an onLayout() pass that is also top-down. During the
|
||||
// onLayout() pass, each parent is responsible for positioning its children using the sizes
|
||||
// computed in the measure pass.
|
||||
// |desiredLayoutsize| is the layout size we have requested in onMeasure() and are waiting for to
|
||||
// take effect.
|
||||
private Point desiredLayoutSize = new Point();
|
||||
// |layoutSize|/|surfaceSize| is the actual current layout/surface size. They are updated in
|
||||
// onLayout() and surfaceChanged() respectively.
|
||||
private final Point layoutSize = new Point();
|
||||
// TODO(magjed): Enable hardware scaler with SurfaceHolder.setFixedSize(). This will decouple
|
||||
// layout and surface size.
|
||||
private final Point surfaceSize = new Point();
|
||||
// |isSurfaceCreated| keeps track of the current status in surfaceCreated()/surfaceDestroyed().
|
||||
private boolean isSurfaceCreated;
|
||||
// Last rendered frame dimensions, or 0 if no frame has been rendered yet.
|
||||
private int frameWidth;
|
||||
private int frameHeight;
|
||||
private int frameRotation;
|
||||
// |scalingType| determines how the video will fill the allowed layout area in onMeasure().
|
||||
private RendererCommon.ScalingType scalingType = RendererCommon.ScalingType.SCALE_ASPECT_BALANCED;
|
||||
// If true, mirrors the video stream horizontally.
|
||||
private boolean mirror;
|
||||
// Callback for reporting renderer events.
|
||||
private RendererCommon.RendererEvents rendererEvents;
|
||||
|
||||
// These variables are synchronized on |statisticsLock|.
|
||||
private final Object statisticsLock = new Object();
|
||||
// Total number of video frames received in renderFrame() call.
|
||||
private int framesReceived;
|
||||
// Number of video frames dropped by renderFrame() because previous frame has not been rendered
|
||||
// yet.
|
||||
private int framesDropped;
|
||||
// Number of rendered video frames.
|
||||
private int framesRendered;
|
||||
// Time in ns when the first video frame was rendered.
|
||||
private long firstFrameTimeNs;
|
||||
// Time in ns spent in renderFrameOnRenderThread() function.
|
||||
private long renderTimeNs;
|
||||
|
||||
// Runnable for posting frames to render thread.
|
||||
private final Runnable renderFrameRunnable = new Runnable() {
|
||||
@Override public void run() {
|
||||
renderFrameOnRenderThread();
|
||||
}
|
||||
};
|
||||
// Runnable for clearing Surface to black.
|
||||
private final Runnable makeBlackRunnable = new Runnable() {
|
||||
@Override public void run() {
|
||||
makeBlack();
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Standard View constructor. In order to render something, you must first call init().
|
||||
*/
|
||||
public SurfaceViewRenderer(Context context) {
|
||||
super(context);
|
||||
getHolder().addCallback(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Standard View constructor. In order to render something, you must first call init().
|
||||
*/
|
||||
public SurfaceViewRenderer(Context context, AttributeSet attrs) {
|
||||
super(context, attrs);
|
||||
getHolder().addCallback(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize this class, sharing resources with |sharedContext|. It is allowed to call init() to
|
||||
* reinitialize the renderer after a previous init()/release() cycle.
|
||||
*/
|
||||
public void init(
|
||||
EglBase.Context sharedContext, RendererCommon.RendererEvents rendererEvents) {
|
||||
init(sharedContext, rendererEvents, EglBase.CONFIG_PLAIN, new GlRectDrawer());
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize this class, sharing resources with |sharedContext|. The custom |drawer| will be used
|
||||
* for drawing frames on the EGLSurface. This class is responsible for calling release() on
|
||||
* |drawer|. It is allowed to call init() to reinitialize the renderer after a previous
|
||||
* init()/release() cycle.
|
||||
*/
|
||||
public void init(EglBase.Context sharedContext, RendererCommon.RendererEvents rendererEvents,
|
||||
int[] configAttributes, RendererCommon.GlDrawer drawer) {
|
||||
synchronized (handlerLock) {
|
||||
if (renderThreadHandler != null) {
|
||||
throw new IllegalStateException(getResourceName() + "Already initialized");
|
||||
}
|
||||
Logging.d(TAG, getResourceName() + "Initializing.");
|
||||
this.rendererEvents = rendererEvents;
|
||||
this.drawer = drawer;
|
||||
renderThread = new HandlerThread(TAG);
|
||||
renderThread.start();
|
||||
eglBase = EglBase.create(sharedContext, configAttributes);
|
||||
renderThreadHandler = new Handler(renderThread.getLooper());
|
||||
}
|
||||
tryCreateEglSurface();
|
||||
}
|
||||
|
||||
/**
|
||||
* Create and make an EGLSurface current if both init() and surfaceCreated() have been called.
|
||||
*/
|
||||
public void tryCreateEglSurface() {
|
||||
// |renderThreadHandler| is only created after |eglBase| is created in init(), so the
|
||||
// following code will only execute if eglBase != null.
|
||||
runOnRenderThread(new Runnable() {
|
||||
@Override public void run() {
|
||||
synchronized (layoutLock) {
|
||||
if (isSurfaceCreated && !eglBase.hasSurface()) {
|
||||
eglBase.createSurface(getHolder().getSurface());
|
||||
eglBase.makeCurrent();
|
||||
// Necessary for YUV frames with odd width.
|
||||
GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Block until any pending frame is returned and all GL resources released, even if an interrupt
|
||||
* occurs. If an interrupt occurs during release(), the interrupt flag will be set. This function
|
||||
* should be called before the Activity is destroyed and the EGLContext is still valid. If you
|
||||
* don't call this function, the GL resources might leak.
|
||||
*/
|
||||
public void release() {
|
||||
final CountDownLatch eglCleanupBarrier = new CountDownLatch(1);
|
||||
synchronized (handlerLock) {
|
||||
if (renderThreadHandler == null) {
|
||||
Logging.d(TAG, getResourceName() + "Already released");
|
||||
return;
|
||||
}
|
||||
// Release EGL and GL resources on render thread.
|
||||
// TODO(magjed): This might not be necessary - all OpenGL resources are automatically deleted
|
||||
// when the EGL context is lost. It might be dangerous to delete them manually in
|
||||
// Activity.onDestroy().
|
||||
renderThreadHandler.postAtFrontOfQueue(new Runnable() {
|
||||
@Override public void run() {
|
||||
drawer.release();
|
||||
drawer = null;
|
||||
if (yuvTextures != null) {
|
||||
GLES20.glDeleteTextures(3, yuvTextures, 0);
|
||||
yuvTextures = null;
|
||||
}
|
||||
// Clear last rendered image to black.
|
||||
makeBlack();
|
||||
eglBase.release();
|
||||
eglBase = null;
|
||||
eglCleanupBarrier.countDown();
|
||||
}
|
||||
});
|
||||
// Don't accept any more frames or messages to the render thread.
|
||||
renderThreadHandler = null;
|
||||
}
|
||||
// Make sure the EGL/GL cleanup posted above is executed.
|
||||
ThreadUtils.awaitUninterruptibly(eglCleanupBarrier);
|
||||
renderThread.quit();
|
||||
synchronized (frameLock) {
|
||||
if (pendingFrame != null) {
|
||||
VideoRenderer.renderFrameDone(pendingFrame);
|
||||
pendingFrame = null;
|
||||
}
|
||||
}
|
||||
// The |renderThread| cleanup is not safe to cancel and we need to wait until it's done.
|
||||
ThreadUtils.joinUninterruptibly(renderThread);
|
||||
renderThread = null;
|
||||
// Reset statistics and event reporting.
|
||||
synchronized (layoutLock) {
|
||||
frameWidth = 0;
|
||||
frameHeight = 0;
|
||||
frameRotation = 0;
|
||||
rendererEvents = null;
|
||||
}
|
||||
resetStatistics();
|
||||
}
|
||||
|
||||
/**
|
||||
* Reset statistics. This will reset the logged statistics in logStatistics(), and
|
||||
* RendererEvents.onFirstFrameRendered() will be called for the next frame.
|
||||
*/
|
||||
public void resetStatistics() {
|
||||
synchronized (statisticsLock) {
|
||||
framesReceived = 0;
|
||||
framesDropped = 0;
|
||||
framesRendered = 0;
|
||||
firstFrameTimeNs = 0;
|
||||
renderTimeNs = 0;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set if the video stream should be mirrored or not.
|
||||
*/
|
||||
public void setMirror(final boolean mirror) {
|
||||
synchronized (layoutLock) {
|
||||
this.mirror = mirror;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set how the video will fill the allowed layout area.
|
||||
*/
|
||||
public void setScalingType(RendererCommon.ScalingType scalingType) {
|
||||
synchronized (layoutLock) {
|
||||
this.scalingType = scalingType;
|
||||
}
|
||||
}
|
||||
|
||||
// VideoRenderer.Callbacks interface.
|
||||
@Override
|
||||
public void renderFrame(VideoRenderer.I420Frame frame) {
|
||||
synchronized (statisticsLock) {
|
||||
++framesReceived;
|
||||
}
|
||||
synchronized (handlerLock) {
|
||||
if (renderThreadHandler == null) {
|
||||
Logging.d(TAG, getResourceName()
|
||||
+ "Dropping frame - Not initialized or already released.");
|
||||
VideoRenderer.renderFrameDone(frame);
|
||||
return;
|
||||
}
|
||||
synchronized (frameLock) {
|
||||
if (pendingFrame != null) {
|
||||
// Drop old frame.
|
||||
synchronized (statisticsLock) {
|
||||
++framesDropped;
|
||||
}
|
||||
VideoRenderer.renderFrameDone(pendingFrame);
|
||||
}
|
||||
pendingFrame = frame;
|
||||
updateFrameDimensionsAndReportEvents(frame);
|
||||
renderThreadHandler.post(renderFrameRunnable);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Returns desired layout size given current measure specification and video aspect ratio.
|
||||
private Point getDesiredLayoutSize(int widthSpec, int heightSpec) {
|
||||
synchronized (layoutLock) {
|
||||
final int maxWidth = getDefaultSize(Integer.MAX_VALUE, widthSpec);
|
||||
final int maxHeight = getDefaultSize(Integer.MAX_VALUE, heightSpec);
|
||||
final Point size =
|
||||
RendererCommon.getDisplaySize(scalingType, frameAspectRatio(), maxWidth, maxHeight);
|
||||
if (MeasureSpec.getMode(widthSpec) == MeasureSpec.EXACTLY) {
|
||||
size.x = maxWidth;
|
||||
}
|
||||
if (MeasureSpec.getMode(heightSpec) == MeasureSpec.EXACTLY) {
|
||||
size.y = maxHeight;
|
||||
}
|
||||
return size;
|
||||
}
|
||||
}
|
||||
|
||||
// View layout interface.
|
||||
@Override
|
||||
protected void onMeasure(int widthSpec, int heightSpec) {
|
||||
synchronized (layoutLock) {
|
||||
if (frameWidth == 0 || frameHeight == 0) {
|
||||
super.onMeasure(widthSpec, heightSpec);
|
||||
return;
|
||||
}
|
||||
desiredLayoutSize = getDesiredLayoutSize(widthSpec, heightSpec);
|
||||
if (desiredLayoutSize.x != getMeasuredWidth() || desiredLayoutSize.y != getMeasuredHeight()) {
|
||||
// Clear the surface asap before the layout change to avoid stretched video and other
|
||||
// render artifacs. Don't wait for it to finish because the IO thread should never be
|
||||
// blocked, so it's a best-effort attempt.
|
||||
synchronized (handlerLock) {
|
||||
if (renderThreadHandler != null) {
|
||||
renderThreadHandler.postAtFrontOfQueue(makeBlackRunnable);
|
||||
}
|
||||
}
|
||||
}
|
||||
setMeasuredDimension(desiredLayoutSize.x, desiredLayoutSize.y);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
|
||||
synchronized (layoutLock) {
|
||||
layoutSize.x = right - left;
|
||||
layoutSize.y = bottom - top;
|
||||
}
|
||||
// Might have a pending frame waiting for a layout of correct size.
|
||||
runOnRenderThread(renderFrameRunnable);
|
||||
}
|
||||
|
||||
// SurfaceHolder.Callback interface.
|
||||
@Override
|
||||
public void surfaceCreated(final SurfaceHolder holder) {
|
||||
Logging.d(TAG, getResourceName() + "Surface created.");
|
||||
synchronized (layoutLock) {
|
||||
isSurfaceCreated = true;
|
||||
}
|
||||
tryCreateEglSurface();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void surfaceDestroyed(SurfaceHolder holder) {
|
||||
Logging.d(TAG, getResourceName() + "Surface destroyed.");
|
||||
synchronized (layoutLock) {
|
||||
isSurfaceCreated = false;
|
||||
surfaceSize.x = 0;
|
||||
surfaceSize.y = 0;
|
||||
}
|
||||
runOnRenderThread(new Runnable() {
|
||||
@Override public void run() {
|
||||
eglBase.releaseSurface();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
|
||||
Logging.d(TAG, getResourceName() + "Surface changed: " + width + "x" + height);
|
||||
synchronized (layoutLock) {
|
||||
surfaceSize.x = width;
|
||||
surfaceSize.y = height;
|
||||
}
|
||||
// Might have a pending frame waiting for a surface of correct size.
|
||||
runOnRenderThread(renderFrameRunnable);
|
||||
}
|
||||
|
||||
/**
|
||||
* Private helper function to post tasks safely.
|
||||
*/
|
||||
private void runOnRenderThread(Runnable runnable) {
|
||||
synchronized (handlerLock) {
|
||||
if (renderThreadHandler != null) {
|
||||
renderThreadHandler.post(runnable);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private String getResourceName() {
|
||||
try {
|
||||
return getResources().getResourceEntryName(getId()) + ": ";
|
||||
} catch (NotFoundException e) {
|
||||
return "";
|
||||
}
|
||||
}
|
||||
|
||||
private void makeBlack() {
|
||||
if (Thread.currentThread() != renderThread) {
|
||||
throw new IllegalStateException(getResourceName() + "Wrong thread.");
|
||||
}
|
||||
if (eglBase != null && eglBase.hasSurface()) {
|
||||
GLES20.glClearColor(0, 0, 0, 0);
|
||||
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
|
||||
eglBase.swapBuffers();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Requests new layout if necessary. Returns true if layout and surface size are consistent.
|
||||
*/
|
||||
private boolean checkConsistentLayout() {
|
||||
if (Thread.currentThread() != renderThread) {
|
||||
throw new IllegalStateException(getResourceName() + "Wrong thread.");
|
||||
}
|
||||
synchronized (layoutLock) {
|
||||
// Return false while we are in the middle of a layout change.
|
||||
return layoutSize.equals(desiredLayoutSize) && surfaceSize.equals(layoutSize);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Renders and releases |pendingFrame|.
|
||||
*/
|
||||
private void renderFrameOnRenderThread() {
|
||||
if (Thread.currentThread() != renderThread) {
|
||||
throw new IllegalStateException(getResourceName() + "Wrong thread.");
|
||||
}
|
||||
// Fetch and render |pendingFrame|.
|
||||
final VideoRenderer.I420Frame frame;
|
||||
synchronized (frameLock) {
|
||||
if (pendingFrame == null) {
|
||||
return;
|
||||
}
|
||||
frame = pendingFrame;
|
||||
pendingFrame = null;
|
||||
}
|
||||
if (eglBase == null || !eglBase.hasSurface()) {
|
||||
Logging.d(TAG, getResourceName() + "No surface to draw on");
|
||||
VideoRenderer.renderFrameDone(frame);
|
||||
return;
|
||||
}
|
||||
if (!checkConsistentLayout()) {
|
||||
// Output intermediate black frames while the layout is updated.
|
||||
makeBlack();
|
||||
VideoRenderer.renderFrameDone(frame);
|
||||
return;
|
||||
}
|
||||
// After a surface size change, the EGLSurface might still have a buffer of the old size in the
|
||||
// pipeline. Querying the EGLSurface will show if the underlying buffer dimensions haven't yet
|
||||
// changed. Such a buffer will be rendered incorrectly, so flush it with a black frame.
|
||||
synchronized (layoutLock) {
|
||||
if (eglBase.surfaceWidth() != surfaceSize.x || eglBase.surfaceHeight() != surfaceSize.y) {
|
||||
makeBlack();
|
||||
}
|
||||
}
|
||||
|
||||
final long startTimeNs = System.nanoTime();
|
||||
final float[] texMatrix;
|
||||
synchronized (layoutLock) {
|
||||
final float[] rotatedSamplingMatrix =
|
||||
RendererCommon.rotateTextureMatrix(frame.samplingMatrix, frame.rotationDegree);
|
||||
final float[] layoutMatrix = RendererCommon.getLayoutMatrix(
|
||||
mirror, frameAspectRatio(), (float) layoutSize.x / layoutSize.y);
|
||||
texMatrix = RendererCommon.multiplyMatrices(rotatedSamplingMatrix, layoutMatrix);
|
||||
}
|
||||
|
||||
// TODO(magjed): glClear() shouldn't be necessary since every pixel is covered anyway, but it's
|
||||
// a workaround for bug 5147. Performance will be slightly worse.
|
||||
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
|
||||
if (frame.yuvFrame) {
|
||||
// Make sure YUV textures are allocated.
|
||||
if (yuvTextures == null) {
|
||||
yuvTextures = new int[3];
|
||||
for (int i = 0; i < 3; i++) {
|
||||
yuvTextures[i] = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
|
||||
}
|
||||
}
|
||||
yuvUploader.uploadYuvData(
|
||||
yuvTextures, frame.width, frame.height, frame.yuvStrides, frame.yuvPlanes);
|
||||
drawer.drawYuv(yuvTextures, texMatrix, frame.rotatedWidth(), frame.rotatedHeight(),
|
||||
0, 0, surfaceSize.x, surfaceSize.y);
|
||||
} else {
|
||||
drawer.drawOes(frame.textureId, texMatrix, frame.rotatedWidth(), frame.rotatedHeight(),
|
||||
0, 0, surfaceSize.x, surfaceSize.y);
|
||||
}
|
||||
|
||||
eglBase.swapBuffers();
|
||||
VideoRenderer.renderFrameDone(frame);
|
||||
synchronized (statisticsLock) {
|
||||
if (framesRendered == 0) {
|
||||
firstFrameTimeNs = startTimeNs;
|
||||
synchronized (layoutLock) {
|
||||
Logging.d(TAG, getResourceName() + "Reporting first rendered frame.");
|
||||
if (rendererEvents != null) {
|
||||
rendererEvents.onFirstFrameRendered();
|
||||
}
|
||||
}
|
||||
}
|
||||
++framesRendered;
|
||||
renderTimeNs += (System.nanoTime() - startTimeNs);
|
||||
if (framesRendered % 300 == 0) {
|
||||
logStatistics();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Return current frame aspect ratio, taking rotation into account.
|
||||
private float frameAspectRatio() {
|
||||
synchronized (layoutLock) {
|
||||
if (frameWidth == 0 || frameHeight == 0) {
|
||||
return 0.0f;
|
||||
}
|
||||
return (frameRotation % 180 == 0) ? (float) frameWidth / frameHeight
|
||||
: (float) frameHeight / frameWidth;
|
||||
}
|
||||
}
|
||||
|
||||
// Update frame dimensions and report any changes to |rendererEvents|.
|
||||
private void updateFrameDimensionsAndReportEvents(VideoRenderer.I420Frame frame) {
|
||||
synchronized (layoutLock) {
|
||||
if (frameWidth != frame.width || frameHeight != frame.height
|
||||
|| frameRotation != frame.rotationDegree) {
|
||||
Logging.d(TAG, getResourceName() + "Reporting frame resolution changed to "
|
||||
+ frame.width + "x" + frame.height + " with rotation " + frame.rotationDegree);
|
||||
if (rendererEvents != null) {
|
||||
rendererEvents.onFrameResolutionChanged(frame.width, frame.height, frame.rotationDegree);
|
||||
}
|
||||
frameWidth = frame.width;
|
||||
frameHeight = frame.height;
|
||||
frameRotation = frame.rotationDegree;
|
||||
post(new Runnable() {
|
||||
@Override public void run() {
|
||||
requestLayout();
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void logStatistics() {
|
||||
synchronized (statisticsLock) {
|
||||
Logging.d(TAG, getResourceName() + "Frames received: "
|
||||
+ framesReceived + ". Dropped: " + framesDropped + ". Rendered: " + framesRendered);
|
||||
if (framesReceived > 0 && framesRendered > 0) {
|
||||
final long timeSinceFirstFrameNs = System.nanoTime() - firstFrameTimeNs;
|
||||
Logging.d(TAG, getResourceName() + "Duration: " + (int) (timeSinceFirstFrameNs / 1e6) +
|
||||
" ms. FPS: " + framesRendered * 1e9 / timeSinceFirstFrameNs);
|
||||
Logging.d(TAG, getResourceName() + "Average render time: "
|
||||
+ (int) (renderTimeNs / (1000 * framesRendered)) + " us.");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,672 +0,0 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
|
||||
|
||||
import android.content.Context;
|
||||
import android.os.Handler;
|
||||
import android.os.SystemClock;
|
||||
import android.view.Surface;
|
||||
import android.view.WindowManager;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
// Android specific implementation of VideoCapturer.
|
||||
// An instance of this class can be created by an application using
|
||||
// VideoCapturerAndroid.create();
|
||||
// This class extends VideoCapturer with a method to easily switch between the
|
||||
// front and back camera. It also provides methods for enumerating valid device
|
||||
// names.
|
||||
//
|
||||
// Threading notes: this class is called from C++ code, Android Camera callbacks, and possibly
|
||||
// arbitrary Java threads. All public entry points are thread safe, and delegate the work to the
|
||||
// camera thread. The internal *OnCameraThread() methods must check |camera| for null to check if
|
||||
// the camera has been stopped.
|
||||
// TODO(magjed): This class name is now confusing - rename to Camera1VideoCapturer.
|
||||
@SuppressWarnings("deprecation")
|
||||
public class VideoCapturerAndroid implements
|
||||
CameraVideoCapturer,
|
||||
android.hardware.Camera.PreviewCallback,
|
||||
SurfaceTextureHelper.OnTextureFrameAvailableListener {
|
||||
private final static String TAG = "VideoCapturerAndroid";
|
||||
private static final int CAMERA_STOP_TIMEOUT_MS = 7000;
|
||||
|
||||
private android.hardware.Camera camera; // Only non-null while capturing.
|
||||
private final Object handlerLock = new Object();
|
||||
// |cameraThreadHandler| must be synchronized on |handlerLock| when not on the camera thread,
|
||||
// or when modifying the reference. Use maybePostOnCameraThread() instead of posting directly to
|
||||
// the handler - this way all callbacks with a specifed token can be removed at once.
|
||||
private Handler cameraThreadHandler;
|
||||
private Context applicationContext;
|
||||
// Synchronization lock for |id|.
|
||||
private final Object cameraIdLock = new Object();
|
||||
private int id;
|
||||
private android.hardware.Camera.CameraInfo info;
|
||||
private CameraStatistics cameraStatistics;
|
||||
// Remember the requested format in case we want to switch cameras.
|
||||
private int requestedWidth;
|
||||
private int requestedHeight;
|
||||
private int requestedFramerate;
|
||||
// The capture format will be the closest supported format to the requested format.
|
||||
private CaptureFormat captureFormat;
|
||||
private final Object pendingCameraSwitchLock = new Object();
|
||||
private volatile boolean pendingCameraSwitch;
|
||||
private CapturerObserver frameObserver = null;
|
||||
private final CameraEventsHandler eventsHandler;
|
||||
private boolean firstFrameReported;
|
||||
// Arbitrary queue depth. Higher number means more memory allocated & held,
|
||||
// lower number means more sensitivity to processing time in the client (and
|
||||
// potentially stalling the capturer if it runs out of buffers to write to).
|
||||
private static final int NUMBER_OF_CAPTURE_BUFFERS = 3;
|
||||
private final Set<byte[]> queuedBuffers = new HashSet<byte[]>();
|
||||
private final boolean isCapturingToTexture;
|
||||
private SurfaceTextureHelper surfaceHelper;
|
||||
private final static int MAX_OPEN_CAMERA_ATTEMPTS = 3;
|
||||
private final static int OPEN_CAMERA_DELAY_MS = 500;
|
||||
private int openCameraAttempts;
|
||||
|
||||
// Camera error callback.
|
||||
private final android.hardware.Camera.ErrorCallback cameraErrorCallback =
|
||||
new android.hardware.Camera.ErrorCallback() {
|
||||
@Override
|
||||
public void onError(int error, android.hardware.Camera camera) {
|
||||
String errorMessage;
|
||||
if (error == android.hardware.Camera.CAMERA_ERROR_SERVER_DIED) {
|
||||
errorMessage = "Camera server died!";
|
||||
} else {
|
||||
errorMessage = "Camera error: " + error;
|
||||
}
|
||||
Logging.e(TAG, errorMessage);
|
||||
if (eventsHandler != null) {
|
||||
eventsHandler.onCameraError(errorMessage);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
public static VideoCapturerAndroid create(String name,
|
||||
CameraEventsHandler eventsHandler) {
|
||||
return VideoCapturerAndroid.create(name, eventsHandler, false /* captureToTexture */);
|
||||
}
|
||||
|
||||
// Use ctor directly instead.
|
||||
@Deprecated
|
||||
public static VideoCapturerAndroid create(String name,
|
||||
CameraEventsHandler eventsHandler, boolean captureToTexture) {
|
||||
try {
|
||||
return new VideoCapturerAndroid(name, eventsHandler, captureToTexture);
|
||||
} catch (RuntimeException e) {
|
||||
Logging.e(TAG, "Couldn't create camera.", e);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
public void printStackTrace() {
|
||||
Thread cameraThread = null;
|
||||
synchronized (handlerLock) {
|
||||
if (cameraThreadHandler != null) {
|
||||
cameraThread = cameraThreadHandler.getLooper().getThread();
|
||||
}
|
||||
}
|
||||
if (cameraThread != null) {
|
||||
StackTraceElement[] cameraStackTraces = cameraThread.getStackTrace();
|
||||
if (cameraStackTraces.length > 0) {
|
||||
Logging.d(TAG, "VideoCapturerAndroid stacks trace:");
|
||||
for (StackTraceElement stackTrace : cameraStackTraces) {
|
||||
Logging.d(TAG, stackTrace.toString());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Switch camera to the next valid camera id. This can only be called while
|
||||
// the camera is running.
|
||||
@Override
|
||||
public void switchCamera(final CameraSwitchHandler switchEventsHandler) {
|
||||
if (android.hardware.Camera.getNumberOfCameras() < 2) {
|
||||
if (switchEventsHandler != null) {
|
||||
switchEventsHandler.onCameraSwitchError("No camera to switch to.");
|
||||
}
|
||||
return;
|
||||
}
|
||||
synchronized (pendingCameraSwitchLock) {
|
||||
if (pendingCameraSwitch) {
|
||||
// Do not handle multiple camera switch request to avoid blocking
|
||||
// camera thread by handling too many switch request from a queue.
|
||||
Logging.w(TAG, "Ignoring camera switch request.");
|
||||
if (switchEventsHandler != null) {
|
||||
switchEventsHandler.onCameraSwitchError("Pending camera switch already in progress.");
|
||||
}
|
||||
return;
|
||||
}
|
||||
pendingCameraSwitch = true;
|
||||
}
|
||||
final boolean didPost = maybePostOnCameraThread(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
switchCameraOnCameraThread();
|
||||
synchronized (pendingCameraSwitchLock) {
|
||||
pendingCameraSwitch = false;
|
||||
}
|
||||
if (switchEventsHandler != null) {
|
||||
switchEventsHandler.onCameraSwitchDone(
|
||||
info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT);
|
||||
}
|
||||
}
|
||||
});
|
||||
if (!didPost && switchEventsHandler != null) {
|
||||
switchEventsHandler.onCameraSwitchError("Camera is stopped.");
|
||||
}
|
||||
}
|
||||
|
||||
// Requests a new output format from the video capturer. Captured frames
|
||||
// by the camera will be scaled/or dropped by the video capturer.
|
||||
// It does not matter if width and height are flipped. I.E, |width| = 640, |height| = 480 produce
|
||||
// the same result as |width| = 480, |height| = 640.
|
||||
// TODO(magjed/perkj): Document what this function does. Change name?
|
||||
@Override
|
||||
public void onOutputFormatRequest(final int width, final int height, final int framerate) {
|
||||
maybePostOnCameraThread(new Runnable() {
|
||||
@Override public void run() {
|
||||
onOutputFormatRequestOnCameraThread(width, height, framerate);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Reconfigure the camera to capture in a new format. This should only be called while the camera
|
||||
// is running.
|
||||
@Override
|
||||
public void changeCaptureFormat(final int width, final int height, final int framerate) {
|
||||
maybePostOnCameraThread(new Runnable() {
|
||||
@Override public void run() {
|
||||
startPreviewOnCameraThread(width, height, framerate);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Helper function to retrieve the current camera id synchronously. Note that the camera id might
|
||||
// change at any point by switchCamera() calls.
|
||||
private int getCurrentCameraId() {
|
||||
synchronized (cameraIdLock) {
|
||||
return id;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<CaptureFormat> getSupportedFormats() {
|
||||
return Camera1Enumerator.getSupportedFormats(getCurrentCameraId());
|
||||
}
|
||||
|
||||
// Returns true if this VideoCapturer is setup to capture video frames to a SurfaceTexture.
|
||||
public boolean isCapturingToTexture() {
|
||||
return isCapturingToTexture;
|
||||
}
|
||||
|
||||
public VideoCapturerAndroid(String cameraName, CameraEventsHandler eventsHandler,
|
||||
boolean captureToTexture) {
|
||||
if (android.hardware.Camera.getNumberOfCameras() == 0) {
|
||||
throw new RuntimeException("No cameras available");
|
||||
}
|
||||
if (cameraName == null || cameraName.equals("")) {
|
||||
this.id = 0;
|
||||
} else {
|
||||
this.id = Camera1Enumerator.getCameraIndex(cameraName);
|
||||
}
|
||||
this.eventsHandler = eventsHandler;
|
||||
isCapturingToTexture = captureToTexture;
|
||||
Logging.d(TAG, "VideoCapturerAndroid isCapturingToTexture : " + isCapturingToTexture);
|
||||
}
|
||||
|
||||
private void checkIsOnCameraThread() {
|
||||
synchronized (handlerLock) {
|
||||
if (cameraThreadHandler == null) {
|
||||
Logging.e(TAG, "Camera is stopped - can't check thread.");
|
||||
} else if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) {
|
||||
throw new IllegalStateException("Wrong thread");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private boolean maybePostOnCameraThread(Runnable runnable) {
|
||||
return maybePostDelayedOnCameraThread(0 /* delayMs */, runnable);
|
||||
}
|
||||
|
||||
private boolean maybePostDelayedOnCameraThread(int delayMs, Runnable runnable) {
|
||||
synchronized (handlerLock) {
|
||||
return cameraThreadHandler != null
|
||||
&& cameraThreadHandler.postAtTime(
|
||||
runnable, this /* token */, SystemClock.uptimeMillis() + delayMs);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void dispose() {
|
||||
Logging.d(TAG, "dispose");
|
||||
}
|
||||
|
||||
// Note that this actually opens the camera, and Camera callbacks run on the
|
||||
// thread that calls open(), so this is done on the CameraThread.
|
||||
@Override
|
||||
public void startCapture(
|
||||
final int width, final int height, final int framerate,
|
||||
final SurfaceTextureHelper surfaceTextureHelper, final Context applicationContext,
|
||||
final CapturerObserver frameObserver) {
|
||||
Logging.d(TAG, "startCapture requested: " + width + "x" + height + "@" + framerate);
|
||||
if (surfaceTextureHelper == null) {
|
||||
frameObserver.onCapturerStarted(false /* success */);
|
||||
if (eventsHandler != null) {
|
||||
eventsHandler.onCameraError("No SurfaceTexture created.");
|
||||
}
|
||||
return;
|
||||
}
|
||||
if (applicationContext == null) {
|
||||
throw new IllegalArgumentException("applicationContext not set.");
|
||||
}
|
||||
if (frameObserver == null) {
|
||||
throw new IllegalArgumentException("frameObserver not set.");
|
||||
}
|
||||
synchronized (handlerLock) {
|
||||
if (this.cameraThreadHandler != null) {
|
||||
throw new RuntimeException("Camera has already been started.");
|
||||
}
|
||||
this.cameraThreadHandler = surfaceTextureHelper.getHandler();
|
||||
this.surfaceHelper = surfaceTextureHelper;
|
||||
final boolean didPost = maybePostOnCameraThread(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
openCameraAttempts = 0;
|
||||
startCaptureOnCameraThread(width, height, framerate, frameObserver,
|
||||
applicationContext);
|
||||
}
|
||||
});
|
||||
if (!didPost) {
|
||||
frameObserver.onCapturerStarted(false);
|
||||
if (eventsHandler != null) {
|
||||
eventsHandler.onCameraError("Could not post task to camera thread.");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void startCaptureOnCameraThread(
|
||||
final int width, final int height, final int framerate, final CapturerObserver frameObserver,
|
||||
final Context applicationContext) {
|
||||
synchronized (handlerLock) {
|
||||
if (cameraThreadHandler == null) {
|
||||
Logging.e(TAG, "startCaptureOnCameraThread: Camera is stopped");
|
||||
return;
|
||||
} else {
|
||||
checkIsOnCameraThread();
|
||||
}
|
||||
}
|
||||
if (camera != null) {
|
||||
Logging.e(TAG, "startCaptureOnCameraThread: Camera has already been started.");
|
||||
return;
|
||||
}
|
||||
this.applicationContext = applicationContext;
|
||||
this.frameObserver = frameObserver;
|
||||
this.firstFrameReported = false;
|
||||
|
||||
try {
|
||||
try {
|
||||
synchronized (cameraIdLock) {
|
||||
Logging.d(TAG, "Opening camera " + id);
|
||||
if (eventsHandler != null) {
|
||||
eventsHandler.onCameraOpening(id);
|
||||
}
|
||||
camera = android.hardware.Camera.open(id);
|
||||
info = new android.hardware.Camera.CameraInfo();
|
||||
android.hardware.Camera.getCameraInfo(id, info);
|
||||
}
|
||||
} catch (RuntimeException e) {
|
||||
openCameraAttempts++;
|
||||
if (openCameraAttempts < MAX_OPEN_CAMERA_ATTEMPTS) {
|
||||
Logging.e(TAG, "Camera.open failed, retrying", e);
|
||||
maybePostDelayedOnCameraThread(OPEN_CAMERA_DELAY_MS, new Runnable() {
|
||||
@Override public void run() {
|
||||
startCaptureOnCameraThread(width, height, framerate, frameObserver,
|
||||
applicationContext);
|
||||
}
|
||||
});
|
||||
return;
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
|
||||
camera.setPreviewTexture(surfaceHelper.getSurfaceTexture());
|
||||
|
||||
Logging.d(TAG, "Camera orientation: " + info.orientation +
|
||||
" .Device orientation: " + getDeviceOrientation());
|
||||
camera.setErrorCallback(cameraErrorCallback);
|
||||
startPreviewOnCameraThread(width, height, framerate);
|
||||
frameObserver.onCapturerStarted(true);
|
||||
if (isCapturingToTexture) {
|
||||
surfaceHelper.startListening(this);
|
||||
}
|
||||
|
||||
// Start camera observer.
|
||||
cameraStatistics = new CameraStatistics(surfaceHelper, eventsHandler);
|
||||
} catch (IOException|RuntimeException e) {
|
||||
Logging.e(TAG, "startCapture failed", e);
|
||||
// Make sure the camera is released.
|
||||
stopCaptureOnCameraThread(true /* stopHandler */);
|
||||
frameObserver.onCapturerStarted(false);
|
||||
if (eventsHandler != null) {
|
||||
eventsHandler.onCameraError("Camera can not be started.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// (Re)start preview with the closest supported format to |width| x |height| @ |framerate|.
|
||||
private void startPreviewOnCameraThread(int width, int height, int framerate) {
|
||||
synchronized (handlerLock) {
|
||||
if (cameraThreadHandler == null || camera == null) {
|
||||
Logging.e(TAG, "startPreviewOnCameraThread: Camera is stopped");
|
||||
return;
|
||||
} else {
|
||||
checkIsOnCameraThread();
|
||||
}
|
||||
}
|
||||
Logging.d(
|
||||
TAG, "startPreviewOnCameraThread requested: " + width + "x" + height + "@" + framerate);
|
||||
|
||||
requestedWidth = width;
|
||||
requestedHeight = height;
|
||||
requestedFramerate = framerate;
|
||||
|
||||
// Find closest supported format for |width| x |height| @ |framerate|.
|
||||
final android.hardware.Camera.Parameters parameters = camera.getParameters();
|
||||
final List<CaptureFormat.FramerateRange> supportedFramerates =
|
||||
Camera1Enumerator.convertFramerates(parameters.getSupportedPreviewFpsRange());
|
||||
Logging.d(TAG, "Available fps ranges: " + supportedFramerates);
|
||||
|
||||
final CaptureFormat.FramerateRange fpsRange =
|
||||
CameraEnumerationAndroid.getClosestSupportedFramerateRange(supportedFramerates, framerate);
|
||||
|
||||
final Size previewSize = CameraEnumerationAndroid.getClosestSupportedSize(
|
||||
Camera1Enumerator.convertSizes(parameters.getSupportedPreviewSizes()), width, height);
|
||||
|
||||
final CaptureFormat captureFormat =
|
||||
new CaptureFormat(previewSize.width, previewSize.height, fpsRange);
|
||||
|
||||
// Check if we are already using this capture format, then we don't need to do anything.
|
||||
if (captureFormat.equals(this.captureFormat)) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Update camera parameters.
|
||||
Logging.d(TAG, "isVideoStabilizationSupported: " +
|
||||
parameters.isVideoStabilizationSupported());
|
||||
if (parameters.isVideoStabilizationSupported()) {
|
||||
parameters.setVideoStabilization(true);
|
||||
}
|
||||
// Note: setRecordingHint(true) actually decrease frame rate on N5.
|
||||
// parameters.setRecordingHint(true);
|
||||
if (captureFormat.framerate.max > 0) {
|
||||
parameters.setPreviewFpsRange(captureFormat.framerate.min, captureFormat.framerate.max);
|
||||
}
|
||||
parameters.setPreviewSize(previewSize.width, previewSize.height);
|
||||
|
||||
if (!isCapturingToTexture) {
|
||||
parameters.setPreviewFormat(captureFormat.imageFormat);
|
||||
}
|
||||
// Picture size is for taking pictures and not for preview/video, but we need to set it anyway
|
||||
// as a workaround for an aspect ratio problem on Nexus 7.
|
||||
final Size pictureSize = CameraEnumerationAndroid.getClosestSupportedSize(
|
||||
Camera1Enumerator.convertSizes(parameters.getSupportedPictureSizes()), width, height);
|
||||
parameters.setPictureSize(pictureSize.width, pictureSize.height);
|
||||
|
||||
// Temporarily stop preview if it's already running.
|
||||
if (this.captureFormat != null) {
|
||||
camera.stopPreview();
|
||||
// Calling |setPreviewCallbackWithBuffer| with null should clear the internal camera buffer
|
||||
// queue, but sometimes we receive a frame with the old resolution after this call anyway.
|
||||
camera.setPreviewCallbackWithBuffer(null);
|
||||
}
|
||||
|
||||
// (Re)start preview.
|
||||
Logging.d(TAG, "Start capturing: " + captureFormat);
|
||||
this.captureFormat = captureFormat;
|
||||
|
||||
List<String> focusModes = parameters.getSupportedFocusModes();
|
||||
if (focusModes.contains(android.hardware.Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
|
||||
parameters.setFocusMode(android.hardware.Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
|
||||
}
|
||||
|
||||
camera.setParameters(parameters);
|
||||
// Calculate orientation manually and send it as CVO instead.
|
||||
camera.setDisplayOrientation(0 /* degrees */);
|
||||
if (!isCapturingToTexture) {
|
||||
queuedBuffers.clear();
|
||||
final int frameSize = captureFormat.frameSize();
|
||||
for (int i = 0; i < NUMBER_OF_CAPTURE_BUFFERS; ++i) {
|
||||
final ByteBuffer buffer = ByteBuffer.allocateDirect(frameSize);
|
||||
queuedBuffers.add(buffer.array());
|
||||
camera.addCallbackBuffer(buffer.array());
|
||||
}
|
||||
camera.setPreviewCallbackWithBuffer(this);
|
||||
}
|
||||
camera.startPreview();
|
||||
}
|
||||
|
||||
// Blocks until camera is known to be stopped.
|
||||
@Override
|
||||
public void stopCapture() throws InterruptedException {
|
||||
Logging.d(TAG, "stopCapture");
|
||||
final CountDownLatch barrier = new CountDownLatch(1);
|
||||
final boolean didPost = maybePostOnCameraThread(new Runnable() {
|
||||
@Override public void run() {
|
||||
stopCaptureOnCameraThread(true /* stopHandler */);
|
||||
barrier.countDown();
|
||||
}
|
||||
});
|
||||
if (!didPost) {
|
||||
Logging.e(TAG, "Calling stopCapture() for already stopped camera.");
|
||||
return;
|
||||
}
|
||||
if (!barrier.await(CAMERA_STOP_TIMEOUT_MS, TimeUnit.MILLISECONDS)) {
|
||||
Logging.e(TAG, "Camera stop timeout");
|
||||
printStackTrace();
|
||||
if (eventsHandler != null) {
|
||||
eventsHandler.onCameraError("Camera stop timeout");
|
||||
}
|
||||
}
|
||||
Logging.d(TAG, "stopCapture done");
|
||||
}
|
||||
|
||||
private void stopCaptureOnCameraThread(boolean stopHandler) {
|
||||
synchronized (handlerLock) {
|
||||
if (cameraThreadHandler == null) {
|
||||
Logging.e(TAG, "stopCaptureOnCameraThread: Camera is stopped");
|
||||
} else {
|
||||
checkIsOnCameraThread();
|
||||
}
|
||||
}
|
||||
Logging.d(TAG, "stopCaptureOnCameraThread");
|
||||
// Note that the camera might still not be started here if startCaptureOnCameraThread failed
|
||||
// and we posted a retry.
|
||||
|
||||
// Make sure onTextureFrameAvailable() is not called anymore.
|
||||
if (surfaceHelper != null) {
|
||||
surfaceHelper.stopListening();
|
||||
}
|
||||
if (stopHandler) {
|
||||
synchronized (handlerLock) {
|
||||
// Clear the cameraThreadHandler first, in case stopPreview or
|
||||
// other driver code deadlocks. Deadlock in
|
||||
// android.hardware.Camera._stopPreview(Native Method) has
|
||||
// been observed on Nexus 5 (hammerhead), OS version LMY48I.
|
||||
// The camera might post another one or two preview frames
|
||||
// before stopped, so we have to check for a null
|
||||
// cameraThreadHandler in our handler. Remove all pending
|
||||
// Runnables posted from |this|.
|
||||
if (cameraThreadHandler != null) {
|
||||
cameraThreadHandler.removeCallbacksAndMessages(this /* token */);
|
||||
cameraThreadHandler = null;
|
||||
}
|
||||
surfaceHelper = null;
|
||||
}
|
||||
}
|
||||
if (cameraStatistics != null) {
|
||||
cameraStatistics.release();
|
||||
cameraStatistics = null;
|
||||
}
|
||||
Logging.d(TAG, "Stop preview.");
|
||||
if (camera != null) {
|
||||
camera.stopPreview();
|
||||
camera.setPreviewCallbackWithBuffer(null);
|
||||
}
|
||||
queuedBuffers.clear();
|
||||
captureFormat = null;
|
||||
|
||||
Logging.d(TAG, "Release camera.");
|
||||
if (camera != null) {
|
||||
camera.release();
|
||||
camera = null;
|
||||
}
|
||||
if (eventsHandler != null) {
|
||||
eventsHandler.onCameraClosed();
|
||||
}
|
||||
Logging.d(TAG, "stopCaptureOnCameraThread done");
|
||||
}
|
||||
|
||||
private void switchCameraOnCameraThread() {
|
||||
synchronized (handlerLock) {
|
||||
if (cameraThreadHandler == null) {
|
||||
Logging.e(TAG, "switchCameraOnCameraThread: Camera is stopped");
|
||||
return;
|
||||
} else {
|
||||
checkIsOnCameraThread();
|
||||
}
|
||||
}
|
||||
Logging.d(TAG, "switchCameraOnCameraThread");
|
||||
stopCaptureOnCameraThread(false /* stopHandler */);
|
||||
synchronized (cameraIdLock) {
|
||||
id = (id + 1) % android.hardware.Camera.getNumberOfCameras();
|
||||
}
|
||||
startCaptureOnCameraThread(requestedWidth, requestedHeight, requestedFramerate, frameObserver,
|
||||
applicationContext);
|
||||
Logging.d(TAG, "switchCameraOnCameraThread done");
|
||||
}
|
||||
|
||||
private void onOutputFormatRequestOnCameraThread(int width, int height, int framerate) {
|
||||
synchronized (handlerLock) {
|
||||
if (cameraThreadHandler == null || camera == null) {
|
||||
Logging.e(TAG, "onOutputFormatRequestOnCameraThread: Camera is stopped");
|
||||
return;
|
||||
} else {
|
||||
checkIsOnCameraThread();
|
||||
}
|
||||
}
|
||||
Logging.d(TAG, "onOutputFormatRequestOnCameraThread: " + width + "x" + height +
|
||||
"@" + framerate);
|
||||
frameObserver.onOutputFormatRequest(width, height, framerate);
|
||||
}
|
||||
|
||||
private int getDeviceOrientation() {
|
||||
int orientation = 0;
|
||||
|
||||
WindowManager wm = (WindowManager) applicationContext.getSystemService(
|
||||
Context.WINDOW_SERVICE);
|
||||
switch(wm.getDefaultDisplay().getRotation()) {
|
||||
case Surface.ROTATION_90:
|
||||
orientation = 90;
|
||||
break;
|
||||
case Surface.ROTATION_180:
|
||||
orientation = 180;
|
||||
break;
|
||||
case Surface.ROTATION_270:
|
||||
orientation = 270;
|
||||
break;
|
||||
case Surface.ROTATION_0:
|
||||
default:
|
||||
orientation = 0;
|
||||
break;
|
||||
}
|
||||
return orientation;
|
||||
}
|
||||
|
||||
private int getFrameOrientation() {
|
||||
int rotation = getDeviceOrientation();
|
||||
if (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK) {
|
||||
rotation = 360 - rotation;
|
||||
}
|
||||
return (info.orientation + rotation) % 360;
|
||||
}
|
||||
|
||||
// Called on cameraThread so must not "synchronized".
|
||||
@Override
|
||||
public void onPreviewFrame(byte[] data, android.hardware.Camera callbackCamera) {
|
||||
synchronized (handlerLock) {
|
||||
if (cameraThreadHandler == null) {
|
||||
Logging.e(TAG, "onPreviewFrame: Camera is stopped");
|
||||
return;
|
||||
} else {
|
||||
checkIsOnCameraThread();
|
||||
}
|
||||
}
|
||||
if (!queuedBuffers.contains(data)) {
|
||||
// |data| is an old invalid buffer.
|
||||
return;
|
||||
}
|
||||
if (camera != callbackCamera) {
|
||||
throw new RuntimeException("Unexpected camera in callback!");
|
||||
}
|
||||
|
||||
final long captureTimeNs =
|
||||
TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
|
||||
|
||||
if (eventsHandler != null && !firstFrameReported) {
|
||||
eventsHandler.onFirstFrameAvailable();
|
||||
firstFrameReported = true;
|
||||
}
|
||||
|
||||
cameraStatistics.addFrame();
|
||||
frameObserver.onByteBufferFrameCaptured(data, captureFormat.width, captureFormat.height,
|
||||
getFrameOrientation(), captureTimeNs);
|
||||
camera.addCallbackBuffer(data);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onTextureFrameAvailable(
|
||||
int oesTextureId, float[] transformMatrix, long timestampNs) {
|
||||
synchronized (handlerLock) {
|
||||
if (cameraThreadHandler == null) {
|
||||
Logging.e(TAG, "onTextureFrameAvailable: Camera is stopped");
|
||||
surfaceHelper.returnTextureFrame();
|
||||
return;
|
||||
} else {
|
||||
checkIsOnCameraThread();
|
||||
}
|
||||
}
|
||||
if (eventsHandler != null && !firstFrameReported) {
|
||||
eventsHandler.onFirstFrameAvailable();
|
||||
firstFrameReported = true;
|
||||
}
|
||||
|
||||
int rotation = getFrameOrientation();
|
||||
if (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT) {
|
||||
// Undo the mirror that the OS "helps" us with.
|
||||
// http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int)
|
||||
transformMatrix =
|
||||
RendererCommon.multiplyMatrices(transformMatrix, RendererCommon.horizontalFlipMatrix());
|
||||
}
|
||||
cameraStatistics.addFrame();
|
||||
frameObserver.onTextureFrameCaptured(captureFormat.width, captureFormat.height, oesTextureId,
|
||||
transformMatrix, rotation, timestampNs);
|
||||
}
|
||||
}
|
||||
@ -1,650 +0,0 @@
|
||||
/*
|
||||
* Copyright 2014 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
|
||||
import javax.microedition.khronos.egl.EGLConfig;
|
||||
import javax.microedition.khronos.egl.EGL10;
|
||||
import javax.microedition.khronos.egl.EGLContext;
|
||||
import javax.microedition.khronos.opengles.GL10;
|
||||
|
||||
import android.annotation.SuppressLint;
|
||||
import android.graphics.Point;
|
||||
import android.graphics.Rect;
|
||||
import android.opengl.EGL14;
|
||||
import android.opengl.GLES20;
|
||||
import android.opengl.GLSurfaceView;
|
||||
|
||||
import org.webrtc.Logging;
|
||||
import org.webrtc.VideoRenderer.I420Frame;
|
||||
|
||||
/**
|
||||
* Efficiently renders YUV frames using the GPU for CSC.
|
||||
* Clients will want first to call setView() to pass GLSurfaceView
|
||||
* and then for each video stream either create instance of VideoRenderer using
|
||||
* createGui() call or VideoRenderer.Callbacks interface using create() call.
|
||||
* Only one instance of the class can be created.
|
||||
*/
|
||||
public class VideoRendererGui implements GLSurfaceView.Renderer {
|
||||
// |instance|, |instance.surface|, |eglContext|, and |eglContextReady| are synchronized on
|
||||
// |VideoRendererGui.class|.
|
||||
private static VideoRendererGui instance = null;
|
||||
private static Runnable eglContextReady = null;
|
||||
private static final String TAG = "VideoRendererGui";
|
||||
private GLSurfaceView surface;
|
||||
private static EglBase.Context eglContext = null;
|
||||
// Indicates if SurfaceView.Renderer.onSurfaceCreated was called.
|
||||
// If true then for every newly created yuv image renderer createTexture()
|
||||
// should be called. The variable is accessed on multiple threads and
|
||||
// all accesses are synchronized on yuvImageRenderers' object lock.
|
||||
private boolean onSurfaceCreatedCalled;
|
||||
private int screenWidth;
|
||||
private int screenHeight;
|
||||
// List of yuv renderers.
|
||||
private final ArrayList<YuvImageRenderer> yuvImageRenderers;
|
||||
// Render and draw threads.
|
||||
private static Thread renderFrameThread;
|
||||
private static Thread drawThread;
|
||||
|
||||
private VideoRendererGui(GLSurfaceView surface) {
|
||||
this.surface = surface;
|
||||
// Create an OpenGL ES 2.0 context.
|
||||
surface.setPreserveEGLContextOnPause(true);
|
||||
surface.setEGLContextClientVersion(2);
|
||||
surface.setRenderer(this);
|
||||
surface.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
|
||||
|
||||
yuvImageRenderers = new ArrayList<YuvImageRenderer>();
|
||||
}
|
||||
|
||||
/**
|
||||
* Class used to display stream of YUV420 frames at particular location
|
||||
* on a screen. New video frames are sent to display using renderFrame()
|
||||
* call.
|
||||
*/
|
||||
private static class YuvImageRenderer implements VideoRenderer.Callbacks {
|
||||
// |surface| is synchronized on |this|.
|
||||
private GLSurfaceView surface;
|
||||
private int id;
|
||||
// TODO(magjed): Delete GL resources in release(). Must be synchronized with draw(). We are
|
||||
// currently leaking resources to avoid a rare crash in release() where the EGLContext has
|
||||
// become invalid beforehand.
|
||||
private int[] yuvTextures = { 0, 0, 0 };
|
||||
private final RendererCommon.YuvUploader yuvUploader = new RendererCommon.YuvUploader();
|
||||
private final RendererCommon.GlDrawer drawer;
|
||||
// Resources for making a deep copy of incoming OES texture frame.
|
||||
private GlTextureFrameBuffer textureCopy;
|
||||
|
||||
// Pending frame to render. Serves as a queue with size 1. |pendingFrame| is accessed by two
|
||||
// threads - frames are received in renderFrame() and consumed in draw(). Frames are dropped in
|
||||
// renderFrame() if the previous frame has not been rendered yet.
|
||||
private I420Frame pendingFrame;
|
||||
private final Object pendingFrameLock = new Object();
|
||||
// Type of video frame used for recent frame rendering.
|
||||
private static enum RendererType { RENDERER_YUV, RENDERER_TEXTURE };
|
||||
private RendererType rendererType;
|
||||
private RendererCommon.ScalingType scalingType;
|
||||
private boolean mirror;
|
||||
private RendererCommon.RendererEvents rendererEvents;
|
||||
// Flag if renderFrame() was ever called.
|
||||
boolean seenFrame;
|
||||
// Total number of video frames received in renderFrame() call.
|
||||
private int framesReceived;
|
||||
// Number of video frames dropped by renderFrame() because previous
|
||||
// frame has not been rendered yet.
|
||||
private int framesDropped;
|
||||
// Number of rendered video frames.
|
||||
private int framesRendered;
|
||||
// Time in ns when the first video frame was rendered.
|
||||
private long startTimeNs = -1;
|
||||
// Time in ns spent in draw() function.
|
||||
private long drawTimeNs;
|
||||
// Time in ns spent in draw() copying resources from |pendingFrame| - including uploading frame
|
||||
// data to rendering planes.
|
||||
private long copyTimeNs;
|
||||
// The allowed view area in percentage of screen size.
|
||||
private final Rect layoutInPercentage;
|
||||
// The actual view area in pixels. It is a centered subrectangle of the rectangle defined by
|
||||
// |layoutInPercentage|.
|
||||
private final Rect displayLayout = new Rect();
|
||||
// Cached layout transformation matrix, calculated from current layout parameters.
|
||||
private float[] layoutMatrix;
|
||||
// Flag if layout transformation matrix update is needed.
|
||||
private boolean updateLayoutProperties;
|
||||
// Layout properties update lock. Guards |updateLayoutProperties|, |screenWidth|,
|
||||
// |screenHeight|, |videoWidth|, |videoHeight|, |rotationDegree|, |scalingType|, and |mirror|.
|
||||
private final Object updateLayoutLock = new Object();
|
||||
// Texture sampling matrix.
|
||||
private float[] rotatedSamplingMatrix;
|
||||
// Viewport dimensions.
|
||||
private int screenWidth;
|
||||
private int screenHeight;
|
||||
// Video dimension.
|
||||
private int videoWidth;
|
||||
private int videoHeight;
|
||||
|
||||
// This is the degree that the frame should be rotated clockwisely to have
|
||||
// it rendered up right.
|
||||
private int rotationDegree;
|
||||
|
||||
private YuvImageRenderer(
|
||||
GLSurfaceView surface, int id,
|
||||
int x, int y, int width, int height,
|
||||
RendererCommon.ScalingType scalingType, boolean mirror, RendererCommon.GlDrawer drawer) {
|
||||
Logging.d(TAG, "YuvImageRenderer.Create id: " + id);
|
||||
this.surface = surface;
|
||||
this.id = id;
|
||||
this.scalingType = scalingType;
|
||||
this.mirror = mirror;
|
||||
this.drawer = drawer;
|
||||
layoutInPercentage = new Rect(x, y, Math.min(100, x + width), Math.min(100, y + height));
|
||||
updateLayoutProperties = false;
|
||||
rotationDegree = 0;
|
||||
}
|
||||
|
||||
public synchronized void reset() {
|
||||
seenFrame = false;
|
||||
}
|
||||
|
||||
private synchronized void release() {
|
||||
surface = null;
|
||||
drawer.release();
|
||||
synchronized (pendingFrameLock) {
|
||||
if (pendingFrame != null) {
|
||||
VideoRenderer.renderFrameDone(pendingFrame);
|
||||
pendingFrame = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void createTextures() {
|
||||
Logging.d(TAG, " YuvImageRenderer.createTextures " + id + " on GL thread:" +
|
||||
Thread.currentThread().getId());
|
||||
|
||||
// Generate 3 texture ids for Y/U/V and place them into |yuvTextures|.
|
||||
for (int i = 0; i < 3; i++) {
|
||||
yuvTextures[i] = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
|
||||
}
|
||||
// Generate texture and framebuffer for offscreen texture copy.
|
||||
textureCopy = new GlTextureFrameBuffer(GLES20.GL_RGB);
|
||||
}
|
||||
|
||||
private void updateLayoutMatrix() {
|
||||
synchronized(updateLayoutLock) {
|
||||
if (!updateLayoutProperties) {
|
||||
return;
|
||||
}
|
||||
// Initialize to maximum allowed area. Round to integer coordinates inwards the layout
|
||||
// bounding box (ceil left/top and floor right/bottom) to not break constraints.
|
||||
displayLayout.set(
|
||||
(screenWidth * layoutInPercentage.left + 99) / 100,
|
||||
(screenHeight * layoutInPercentage.top + 99) / 100,
|
||||
(screenWidth * layoutInPercentage.right) / 100,
|
||||
(screenHeight * layoutInPercentage.bottom) / 100);
|
||||
Logging.d(TAG, "ID: " + id + ". AdjustTextureCoords. Allowed display size: "
|
||||
+ displayLayout.width() + " x " + displayLayout.height() + ". Video: " + videoWidth
|
||||
+ " x " + videoHeight + ". Rotation: " + rotationDegree + ". Mirror: " + mirror);
|
||||
final float videoAspectRatio = (rotationDegree % 180 == 0)
|
||||
? (float) videoWidth / videoHeight
|
||||
: (float) videoHeight / videoWidth;
|
||||
// Adjust display size based on |scalingType|.
|
||||
final Point displaySize = RendererCommon.getDisplaySize(scalingType,
|
||||
videoAspectRatio, displayLayout.width(), displayLayout.height());
|
||||
displayLayout.inset((displayLayout.width() - displaySize.x) / 2,
|
||||
(displayLayout.height() - displaySize.y) / 2);
|
||||
Logging.d(TAG, " Adjusted display size: " + displayLayout.width() + " x "
|
||||
+ displayLayout.height());
|
||||
layoutMatrix = RendererCommon.getLayoutMatrix(
|
||||
mirror, videoAspectRatio, (float) displayLayout.width() / displayLayout.height());
|
||||
updateLayoutProperties = false;
|
||||
Logging.d(TAG, " AdjustTextureCoords done");
|
||||
}
|
||||
}
|
||||
|
||||
private void draw() {
|
||||
if (!seenFrame) {
|
||||
// No frame received yet - nothing to render.
|
||||
return;
|
||||
}
|
||||
long now = System.nanoTime();
|
||||
|
||||
final boolean isNewFrame;
|
||||
synchronized (pendingFrameLock) {
|
||||
isNewFrame = (pendingFrame != null);
|
||||
if (isNewFrame && startTimeNs == -1) {
|
||||
startTimeNs = now;
|
||||
}
|
||||
|
||||
if (isNewFrame) {
|
||||
rotatedSamplingMatrix = RendererCommon.rotateTextureMatrix(
|
||||
pendingFrame.samplingMatrix, pendingFrame.rotationDegree);
|
||||
if (pendingFrame.yuvFrame) {
|
||||
rendererType = RendererType.RENDERER_YUV;
|
||||
yuvUploader.uploadYuvData(yuvTextures, pendingFrame.width, pendingFrame.height,
|
||||
pendingFrame.yuvStrides, pendingFrame.yuvPlanes);
|
||||
} else {
|
||||
rendererType = RendererType.RENDERER_TEXTURE;
|
||||
// External texture rendering. Make a deep copy of the external texture.
|
||||
// Reallocate offscreen texture if necessary.
|
||||
textureCopy.setSize(pendingFrame.rotatedWidth(), pendingFrame.rotatedHeight());
|
||||
|
||||
// Bind our offscreen framebuffer.
|
||||
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, textureCopy.getFrameBufferId());
|
||||
GlUtil.checkNoGLES2Error("glBindFramebuffer");
|
||||
|
||||
// Copy the OES texture content. This will also normalize the sampling matrix.
|
||||
drawer.drawOes(pendingFrame.textureId, rotatedSamplingMatrix,
|
||||
textureCopy.getWidth(), textureCopy.getHeight(),
|
||||
0, 0, textureCopy.getWidth(), textureCopy.getHeight());
|
||||
rotatedSamplingMatrix = RendererCommon.identityMatrix();
|
||||
|
||||
// Restore normal framebuffer.
|
||||
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
|
||||
GLES20.glFinish();
|
||||
}
|
||||
copyTimeNs += (System.nanoTime() - now);
|
||||
VideoRenderer.renderFrameDone(pendingFrame);
|
||||
pendingFrame = null;
|
||||
}
|
||||
}
|
||||
|
||||
updateLayoutMatrix();
|
||||
final float[] texMatrix =
|
||||
RendererCommon.multiplyMatrices(rotatedSamplingMatrix, layoutMatrix);
|
||||
// OpenGL defaults to lower left origin - flip viewport position vertically.
|
||||
final int viewportY = screenHeight - displayLayout.bottom;
|
||||
if (rendererType == RendererType.RENDERER_YUV) {
|
||||
drawer.drawYuv(yuvTextures, texMatrix, videoWidth, videoHeight,
|
||||
displayLayout.left, viewportY, displayLayout.width(), displayLayout.height());
|
||||
} else {
|
||||
drawer.drawRgb(textureCopy.getTextureId(), texMatrix, videoWidth, videoHeight,
|
||||
displayLayout.left, viewportY, displayLayout.width(), displayLayout.height());
|
||||
}
|
||||
|
||||
if (isNewFrame) {
|
||||
framesRendered++;
|
||||
drawTimeNs += (System.nanoTime() - now);
|
||||
if ((framesRendered % 300) == 0) {
|
||||
logStatistics();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void logStatistics() {
|
||||
long timeSinceFirstFrameNs = System.nanoTime() - startTimeNs;
|
||||
Logging.d(TAG, "ID: " + id + ". Type: " + rendererType +
|
||||
". Frames received: " + framesReceived +
|
||||
". Dropped: " + framesDropped + ". Rendered: " + framesRendered);
|
||||
if (framesReceived > 0 && framesRendered > 0) {
|
||||
Logging.d(TAG, "Duration: " + (int)(timeSinceFirstFrameNs / 1e6) +
|
||||
" ms. FPS: " + framesRendered * 1e9 / timeSinceFirstFrameNs);
|
||||
Logging.d(TAG, "Draw time: " +
|
||||
(int) (drawTimeNs / (1000 * framesRendered)) + " us. Copy time: " +
|
||||
(int) (copyTimeNs / (1000 * framesReceived)) + " us");
|
||||
}
|
||||
}
|
||||
|
||||
public void setScreenSize(final int screenWidth, final int screenHeight) {
|
||||
synchronized(updateLayoutLock) {
|
||||
if (screenWidth == this.screenWidth && screenHeight == this.screenHeight) {
|
||||
return;
|
||||
}
|
||||
Logging.d(TAG, "ID: " + id + ". YuvImageRenderer.setScreenSize: " +
|
||||
screenWidth + " x " + screenHeight);
|
||||
this.screenWidth = screenWidth;
|
||||
this.screenHeight = screenHeight;
|
||||
updateLayoutProperties = true;
|
||||
}
|
||||
}
|
||||
|
||||
public void setPosition(int x, int y, int width, int height,
|
||||
RendererCommon.ScalingType scalingType, boolean mirror) {
|
||||
final Rect layoutInPercentage =
|
||||
new Rect(x, y, Math.min(100, x + width), Math.min(100, y + height));
|
||||
synchronized(updateLayoutLock) {
|
||||
if (layoutInPercentage.equals(this.layoutInPercentage) && scalingType == this.scalingType
|
||||
&& mirror == this.mirror) {
|
||||
return;
|
||||
}
|
||||
Logging.d(TAG, "ID: " + id + ". YuvImageRenderer.setPosition: (" + x + ", " + y +
|
||||
") " + width + " x " + height + ". Scaling: " + scalingType +
|
||||
". Mirror: " + mirror);
|
||||
this.layoutInPercentage.set(layoutInPercentage);
|
||||
this.scalingType = scalingType;
|
||||
this.mirror = mirror;
|
||||
updateLayoutProperties = true;
|
||||
}
|
||||
}
|
||||
|
||||
private void setSize(final int videoWidth, final int videoHeight, final int rotation) {
|
||||
if (videoWidth == this.videoWidth && videoHeight == this.videoHeight
|
||||
&& rotation == rotationDegree) {
|
||||
return;
|
||||
}
|
||||
if (rendererEvents != null) {
|
||||
Logging.d(TAG, "ID: " + id +
|
||||
". Reporting frame resolution changed to " + videoWidth + " x " + videoHeight);
|
||||
rendererEvents.onFrameResolutionChanged(videoWidth, videoHeight, rotation);
|
||||
}
|
||||
|
||||
synchronized (updateLayoutLock) {
|
||||
Logging.d(TAG, "ID: " + id + ". YuvImageRenderer.setSize: " +
|
||||
videoWidth + " x " + videoHeight + " rotation " + rotation);
|
||||
|
||||
this.videoWidth = videoWidth;
|
||||
this.videoHeight = videoHeight;
|
||||
rotationDegree = rotation;
|
||||
updateLayoutProperties = true;
|
||||
Logging.d(TAG, " YuvImageRenderer.setSize done.");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized void renderFrame(I420Frame frame) {
|
||||
if (surface == null) {
|
||||
// This object has been released.
|
||||
VideoRenderer.renderFrameDone(frame);
|
||||
return;
|
||||
}
|
||||
if (renderFrameThread == null) {
|
||||
renderFrameThread = Thread.currentThread();
|
||||
}
|
||||
if (!seenFrame && rendererEvents != null) {
|
||||
Logging.d(TAG, "ID: " + id + ". Reporting first rendered frame.");
|
||||
rendererEvents.onFirstFrameRendered();
|
||||
}
|
||||
framesReceived++;
|
||||
synchronized (pendingFrameLock) {
|
||||
// Check input frame parameters.
|
||||
if (frame.yuvFrame) {
|
||||
if (frame.yuvStrides[0] < frame.width ||
|
||||
frame.yuvStrides[1] < frame.width / 2 ||
|
||||
frame.yuvStrides[2] < frame.width / 2) {
|
||||
Logging.e(TAG, "Incorrect strides " + frame.yuvStrides[0] + ", " +
|
||||
frame.yuvStrides[1] + ", " + frame.yuvStrides[2]);
|
||||
VideoRenderer.renderFrameDone(frame);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (pendingFrame != null) {
|
||||
// Skip rendering of this frame if previous frame was not rendered yet.
|
||||
framesDropped++;
|
||||
VideoRenderer.renderFrameDone(frame);
|
||||
seenFrame = true;
|
||||
return;
|
||||
}
|
||||
pendingFrame = frame;
|
||||
}
|
||||
setSize(frame.width, frame.height, frame.rotationDegree);
|
||||
seenFrame = true;
|
||||
|
||||
// Request rendering.
|
||||
surface.requestRender();
|
||||
}
|
||||
}
|
||||
|
||||
/** Passes GLSurfaceView to video renderer. */
|
||||
public static synchronized void setView(GLSurfaceView surface,
|
||||
Runnable eglContextReadyCallback) {
|
||||
Logging.d(TAG, "VideoRendererGui.setView");
|
||||
instance = new VideoRendererGui(surface);
|
||||
eglContextReady = eglContextReadyCallback;
|
||||
}
|
||||
|
||||
public static synchronized EglBase.Context getEglBaseContext() {
|
||||
return eglContext;
|
||||
}
|
||||
|
||||
/** Releases GLSurfaceView video renderer. */
|
||||
public static synchronized void dispose() {
|
||||
if (instance == null){
|
||||
return;
|
||||
}
|
||||
Logging.d(TAG, "VideoRendererGui.dispose");
|
||||
synchronized (instance.yuvImageRenderers) {
|
||||
for (YuvImageRenderer yuvImageRenderer : instance.yuvImageRenderers) {
|
||||
yuvImageRenderer.release();
|
||||
}
|
||||
instance.yuvImageRenderers.clear();
|
||||
}
|
||||
renderFrameThread = null;
|
||||
drawThread = null;
|
||||
instance.surface = null;
|
||||
eglContext = null;
|
||||
eglContextReady = null;
|
||||
instance = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates VideoRenderer with top left corner at (x, y) and resolution
|
||||
* (width, height). All parameters are in percentage of screen resolution.
|
||||
*/
|
||||
public static VideoRenderer createGui(int x, int y, int width, int height,
|
||||
RendererCommon.ScalingType scalingType, boolean mirror) throws Exception {
|
||||
YuvImageRenderer javaGuiRenderer = create(
|
||||
x, y, width, height, scalingType, mirror);
|
||||
return new VideoRenderer(javaGuiRenderer);
|
||||
}
|
||||
|
||||
public static VideoRenderer.Callbacks createGuiRenderer(
|
||||
int x, int y, int width, int height,
|
||||
RendererCommon.ScalingType scalingType, boolean mirror) {
|
||||
return create(x, y, width, height, scalingType, mirror);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates VideoRenderer.Callbacks with top left corner at (x, y) and
|
||||
* resolution (width, height). All parameters are in percentage of
|
||||
* screen resolution.
|
||||
*/
|
||||
public static synchronized YuvImageRenderer create(int x, int y, int width, int height,
|
||||
RendererCommon.ScalingType scalingType, boolean mirror) {
|
||||
return create(x, y, width, height, scalingType, mirror, new GlRectDrawer());
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates VideoRenderer.Callbacks with top left corner at (x, y) and resolution (width, height).
|
||||
* All parameters are in percentage of screen resolution. The custom |drawer| will be used for
|
||||
* drawing frames on the EGLSurface. This class is responsible for calling release() on |drawer|.
|
||||
*/
|
||||
public static synchronized YuvImageRenderer create(int x, int y, int width, int height,
|
||||
RendererCommon.ScalingType scalingType, boolean mirror, RendererCommon.GlDrawer drawer) {
|
||||
// Check display region parameters.
|
||||
if (x < 0 || x > 100 || y < 0 || y > 100 ||
|
||||
width < 0 || width > 100 || height < 0 || height > 100 ||
|
||||
x + width > 100 || y + height > 100) {
|
||||
throw new RuntimeException("Incorrect window parameters.");
|
||||
}
|
||||
|
||||
if (instance == null) {
|
||||
throw new RuntimeException(
|
||||
"Attempt to create yuv renderer before setting GLSurfaceView");
|
||||
}
|
||||
final YuvImageRenderer yuvImageRenderer = new YuvImageRenderer(
|
||||
instance.surface, instance.yuvImageRenderers.size(),
|
||||
x, y, width, height, scalingType, mirror, drawer);
|
||||
synchronized (instance.yuvImageRenderers) {
|
||||
if (instance.onSurfaceCreatedCalled) {
|
||||
// onSurfaceCreated has already been called for VideoRendererGui -
|
||||
// need to create texture for new image and add image to the
|
||||
// rendering list.
|
||||
final CountDownLatch countDownLatch = new CountDownLatch(1);
|
||||
instance.surface.queueEvent(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
yuvImageRenderer.createTextures();
|
||||
yuvImageRenderer.setScreenSize(
|
||||
instance.screenWidth, instance.screenHeight);
|
||||
countDownLatch.countDown();
|
||||
}
|
||||
});
|
||||
// Wait for task completion.
|
||||
try {
|
||||
countDownLatch.await();
|
||||
} catch (InterruptedException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
// Add yuv renderer to rendering list.
|
||||
instance.yuvImageRenderers.add(yuvImageRenderer);
|
||||
}
|
||||
return yuvImageRenderer;
|
||||
}
|
||||
|
||||
public static synchronized void update(
|
||||
VideoRenderer.Callbacks renderer, int x, int y, int width, int height,
|
||||
RendererCommon.ScalingType scalingType, boolean mirror) {
|
||||
Logging.d(TAG, "VideoRendererGui.update");
|
||||
if (instance == null) {
|
||||
throw new RuntimeException(
|
||||
"Attempt to update yuv renderer before setting GLSurfaceView");
|
||||
}
|
||||
synchronized (instance.yuvImageRenderers) {
|
||||
for (YuvImageRenderer yuvImageRenderer : instance.yuvImageRenderers) {
|
||||
if (yuvImageRenderer == renderer) {
|
||||
yuvImageRenderer.setPosition(x, y, width, height, scalingType, mirror);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static synchronized void setRendererEvents(
|
||||
VideoRenderer.Callbacks renderer, RendererCommon.RendererEvents rendererEvents) {
|
||||
Logging.d(TAG, "VideoRendererGui.setRendererEvents");
|
||||
if (instance == null) {
|
||||
throw new RuntimeException(
|
||||
"Attempt to set renderer events before setting GLSurfaceView");
|
||||
}
|
||||
synchronized (instance.yuvImageRenderers) {
|
||||
for (YuvImageRenderer yuvImageRenderer : instance.yuvImageRenderers) {
|
||||
if (yuvImageRenderer == renderer) {
|
||||
yuvImageRenderer.rendererEvents = rendererEvents;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static synchronized void remove(VideoRenderer.Callbacks renderer) {
|
||||
Logging.d(TAG, "VideoRendererGui.remove");
|
||||
if (instance == null) {
|
||||
throw new RuntimeException(
|
||||
"Attempt to remove renderer before setting GLSurfaceView");
|
||||
}
|
||||
synchronized (instance.yuvImageRenderers) {
|
||||
final int index = instance.yuvImageRenderers.indexOf(renderer);
|
||||
if (index == -1) {
|
||||
Logging.w(TAG, "Couldn't remove renderer (not present in current list)");
|
||||
} else {
|
||||
instance.yuvImageRenderers.remove(index).release();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static synchronized void reset(VideoRenderer.Callbacks renderer) {
|
||||
Logging.d(TAG, "VideoRendererGui.reset");
|
||||
if (instance == null) {
|
||||
throw new RuntimeException(
|
||||
"Attempt to reset renderer before setting GLSurfaceView");
|
||||
}
|
||||
synchronized (instance.yuvImageRenderers) {
|
||||
for (YuvImageRenderer yuvImageRenderer : instance.yuvImageRenderers) {
|
||||
if (yuvImageRenderer == renderer) {
|
||||
yuvImageRenderer.reset();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static void printStackTrace(Thread thread, String threadName) {
|
||||
if (thread != null) {
|
||||
StackTraceElement[] stackTraces = thread.getStackTrace();
|
||||
if (stackTraces.length > 0) {
|
||||
Logging.d(TAG, threadName + " stacks trace:");
|
||||
for (StackTraceElement stackTrace : stackTraces) {
|
||||
Logging.d(TAG, stackTrace.toString());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static synchronized void printStackTraces() {
|
||||
if (instance == null) {
|
||||
return;
|
||||
}
|
||||
printStackTrace(renderFrameThread, "Render frame thread");
|
||||
printStackTrace(drawThread, "Draw thread");
|
||||
}
|
||||
|
||||
@SuppressLint("NewApi")
|
||||
@Override
|
||||
public void onSurfaceCreated(GL10 unused, EGLConfig config) {
|
||||
Logging.d(TAG, "VideoRendererGui.onSurfaceCreated");
|
||||
// Store render EGL context.
|
||||
synchronized (VideoRendererGui.class) {
|
||||
if (EglBase14.isEGL14Supported()) {
|
||||
eglContext = new EglBase14.Context(EGL14.eglGetCurrentContext());
|
||||
} else {
|
||||
eglContext = new EglBase10.Context(((EGL10) EGLContext.getEGL()).eglGetCurrentContext());
|
||||
}
|
||||
|
||||
Logging.d(TAG, "VideoRendererGui EGL Context: " + eglContext);
|
||||
}
|
||||
|
||||
synchronized (yuvImageRenderers) {
|
||||
// Create textures for all images.
|
||||
for (YuvImageRenderer yuvImageRenderer : yuvImageRenderers) {
|
||||
yuvImageRenderer.createTextures();
|
||||
}
|
||||
onSurfaceCreatedCalled = true;
|
||||
}
|
||||
GlUtil.checkNoGLES2Error("onSurfaceCreated done");
|
||||
GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1);
|
||||
GLES20.glClearColor(0.15f, 0.15f, 0.15f, 1.0f);
|
||||
|
||||
// Fire EGL context ready event.
|
||||
synchronized (VideoRendererGui.class) {
|
||||
if (eglContextReady != null) {
|
||||
eglContextReady.run();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onSurfaceChanged(GL10 unused, int width, int height) {
|
||||
Logging.d(TAG, "VideoRendererGui.onSurfaceChanged: " +
|
||||
width + " x " + height + " ");
|
||||
screenWidth = width;
|
||||
screenHeight = height;
|
||||
synchronized (yuvImageRenderers) {
|
||||
for (YuvImageRenderer yuvImageRenderer : yuvImageRenderers) {
|
||||
yuvImageRenderer.setScreenSize(screenWidth, screenHeight);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onDrawFrame(GL10 unused) {
|
||||
if (drawThread == null) {
|
||||
drawThread = Thread.currentThread();
|
||||
}
|
||||
GLES20.glViewport(0, 0, screenWidth, screenHeight);
|
||||
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
|
||||
synchronized (yuvImageRenderers) {
|
||||
for (YuvImageRenderer yuvImageRenderer : yuvImageRenderers) {
|
||||
yuvImageRenderer.draw();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
Reference in New Issue
Block a user