Revert of Combine webrtc/api/java/android and webrtc/api/java/src. (patchset #1 id:1 of https://codereview.webrtc.org/2111823002/ )

Reason for revert:
Breaks downstream dependencies

Original issue's description:
> Combine webrtc/api/java/android and webrtc/api/java/src.
>
> It used to be that there was a Java api for devices not running Android
> but that is no longer the case. I combined the directories and made
> the folder structure chromium style.
>
> BUG=webrtc:6067
> R=magjed@webrtc.org, tommi@webrtc.org
>
> Committed: ceefe20dd6

TBR=magjed@webrtc.org,tommi@webrtc.org
# Skipping CQ checks because original CL landed less than 1 days ago.
NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
BUG=webrtc:6067

Review URL: https://codereview.webrtc.org/2106333005 .

Cr-Commit-Position: refs/heads/master@{#13357}
This commit is contained in:
Sami Kalliomaki
2016-07-01 09:37:42 +02:00
parent ceefe20dd6
commit 9b0dc622d4
73 changed files with 136 additions and 134 deletions

View File

@ -1,21 +0,0 @@
/*
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/**
* Java wrapper for a C++ AudioSourceInterface. Used as the source for one or
* more {@code AudioTrack} objects.
*/
public class AudioSource extends MediaSource {
public AudioSource(long nativeSource) {
super(nativeSource);
}
}

View File

@ -1,18 +0,0 @@
/*
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/** Java wrapper for a C++ AudioTrackInterface */
public class AudioTrack extends MediaStreamTrack {
public AudioTrack(long nativeTrack) {
super(nativeTrack);
}
}

View File

@ -1,40 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
public class CallSessionFileRotatingLogSink {
static {
System.loadLibrary("jingle_peerconnection_so");
}
private long nativeSink;
public static byte[] getLogData(String dirPath) {
return nativeGetLogData(dirPath);
}
public CallSessionFileRotatingLogSink(
String dirPath, int maxFileSize, Logging.Severity severity) {
nativeSink = nativeAddSink(dirPath, maxFileSize, severity.ordinal());
}
public void dispose() {
if (nativeSink != 0) {
nativeDeleteSink(nativeSink);
nativeSink = 0;
}
}
private static native long nativeAddSink(
String dirPath, int maxFileSize, int severity);
private static native void nativeDeleteSink(long nativeSink);
private static native byte[] nativeGetLogData(String dirPath);
}

View File

@ -1,172 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
import android.os.SystemClock;
import java.util.ArrayList;
import java.util.List;
@SuppressWarnings("deprecation")
public class Camera1Enumerator implements CameraEnumerator {
private final static String TAG = "Camera1Enumerator";
// Each entry contains the supported formats for corresponding camera index. The formats for all
// cameras are enumerated on the first call to getSupportedFormats(), and cached for future
// reference.
private static List<List<CaptureFormat>> cachedSupportedFormats;
private final boolean captureToTexture;
public Camera1Enumerator() {
this(true /* captureToTexture */);
}
public Camera1Enumerator(boolean captureToTexture) {
this.captureToTexture = captureToTexture;
}
// Returns device names that can be used to create a new VideoCapturerAndroid.
@Override
public String[] getDeviceNames() {
String[] names = new String[android.hardware.Camera.getNumberOfCameras()];
for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
names[i] = getDeviceName(i);
}
return names;
}
@Override
public boolean isFrontFacing(String deviceName) {
android.hardware.Camera.CameraInfo info = getCameraInfo(getCameraIndex(deviceName));
return info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT;
}
@Override
public boolean isBackFacing(String deviceName) {
android.hardware.Camera.CameraInfo info = getCameraInfo(getCameraIndex(deviceName));
return info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK;
}
@Override
public CameraVideoCapturer createCapturer(String deviceName,
CameraVideoCapturer.CameraEventsHandler eventsHandler) {
return new VideoCapturerAndroid(deviceName, eventsHandler, captureToTexture);
}
private static android.hardware.Camera.CameraInfo getCameraInfo(int index) {
android.hardware.Camera.CameraInfo info = new android.hardware.Camera.CameraInfo();
try {
android.hardware.Camera.getCameraInfo(index, info);
} catch (Exception e) {
Logging.e(TAG, "getCameraInfo failed on index " + index,e);
return null;
}
return info;
}
static synchronized List<CaptureFormat> getSupportedFormats(int cameraId) {
if (cachedSupportedFormats == null) {
cachedSupportedFormats = new ArrayList<List<CaptureFormat>>();
for (int i = 0; i < CameraEnumerationAndroid.getDeviceCount(); ++i) {
cachedSupportedFormats.add(enumerateFormats(i));
}
}
return cachedSupportedFormats.get(cameraId);
}
private static List<CaptureFormat> enumerateFormats(int cameraId) {
Logging.d(TAG, "Get supported formats for camera index " + cameraId + ".");
final long startTimeMs = SystemClock.elapsedRealtime();
final android.hardware.Camera.Parameters parameters;
android.hardware.Camera camera = null;
try {
Logging.d(TAG, "Opening camera with index " + cameraId);
camera = android.hardware.Camera.open(cameraId);
parameters = camera.getParameters();
} catch (RuntimeException e) {
Logging.e(TAG, "Open camera failed on camera index " + cameraId, e);
return new ArrayList<CaptureFormat>();
} finally {
if (camera != null) {
camera.release();
}
}
final List<CaptureFormat> formatList = new ArrayList<CaptureFormat>();
try {
int minFps = 0;
int maxFps = 0;
final List<int[]> listFpsRange = parameters.getSupportedPreviewFpsRange();
if (listFpsRange != null) {
// getSupportedPreviewFpsRange() returns a sorted list. Take the fps range
// corresponding to the highest fps.
final int[] range = listFpsRange.get(listFpsRange.size() - 1);
minFps = range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX];
maxFps = range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX];
}
for (android.hardware.Camera.Size size : parameters.getSupportedPreviewSizes()) {
formatList.add(new CaptureFormat(size.width, size.height, minFps, maxFps));
}
} catch (Exception e) {
Logging.e(TAG, "getSupportedFormats() failed on camera index " + cameraId, e);
}
final long endTimeMs = SystemClock.elapsedRealtime();
Logging.d(TAG, "Get supported formats for camera index " + cameraId + " done."
+ " Time spent: " + (endTimeMs - startTimeMs) + " ms.");
return formatList;
}
// Convert from android.hardware.Camera.Size to Size.
static List<Size> convertSizes(List<android.hardware.Camera.Size> cameraSizes) {
final List<Size> sizes = new ArrayList<Size>();
for (android.hardware.Camera.Size size : cameraSizes) {
sizes.add(new Size(size.width, size.height));
}
return sizes;
}
// Convert from int[2] to CaptureFormat.FramerateRange.
static List<CaptureFormat.FramerateRange> convertFramerates(List<int[]> arrayRanges) {
final List<CaptureFormat.FramerateRange> ranges = new ArrayList<CaptureFormat.FramerateRange>();
for (int[] range : arrayRanges) {
ranges.add(new CaptureFormat.FramerateRange(
range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX]));
}
return ranges;
}
// Returns the camera index for camera with name |deviceName|, or throws IllegalArgumentException
// if no such camera can be found.
static int getCameraIndex(String deviceName) {
Logging.d(TAG, "getCameraIndex: " + deviceName);
for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
if (deviceName.equals(CameraEnumerationAndroid.getDeviceName(i))) {
return i;
}
}
throw new IllegalArgumentException("No such camera: " + deviceName);
}
// Returns the name of the camera with camera index. Returns null if the
// camera can not be used.
static String getDeviceName(int index) {
android.hardware.Camera.CameraInfo info = getCameraInfo(index);
String facing =
(info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT) ? "front" : "back";
return "Camera " + index + ", Facing " + facing
+ ", Orientation " + info.orientation;
}
}

View File

@ -1,926 +0,0 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
import android.annotation.TargetApi;
import android.content.Context;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CameraMetadata;
import android.hardware.camera2.CaptureFailure;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.TotalCaptureResult;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.os.Build;
import android.os.Handler;
import android.os.SystemClock;
import android.util.Range;
import android.view.Surface;
import android.view.WindowManager;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.Semaphore;
@TargetApi(21)
public class Camera2Capturer implements
CameraVideoCapturer,
SurfaceTextureHelper.OnTextureFrameAvailableListener {
private final static String TAG = "Camera2Capturer";
private final static int MAX_OPEN_CAMERA_ATTEMPTS = 3;
private final static int OPEN_CAMERA_DELAY_MS = 500;
private final static int STOP_TIMEOUT = 10000;
private final static int START_TIMEOUT = 10000;
private final static Object STOP_TIMEOUT_RUNNABLE_TOKEN = new Object();
// In the Camera2 API, starting a camera is inherently asynchronous, and this state is
// represented with 'STARTING'. Stopping is also asynchronous and this state is 'STOPPING'.
private static enum CameraState { IDLE, STARTING, RUNNING, STOPPING }
// Thread safe objects.
// --------------------
private final CameraManager cameraManager;
private final CameraEventsHandler eventsHandler;
// Shared state - guarded by cameraStateLock. Will only be edited from camera thread (when it is
// running).
// ---------------------------------------------------------------------------------------------
private final Object cameraStateLock = new Object();
private CameraState cameraState = CameraState.IDLE;
// |cameraThreadHandler| must be synchronized on |cameraStateLock| when not on the camera thread,
// or when modifying the reference. Use postOnCameraThread() instead of posting directly to
// the handler - this way all callbacks with a specifed token can be removed at once.
// |cameraThreadHandler| must be null if and only if CameraState is IDLE.
private Handler cameraThreadHandler;
// Remember the requested format in case we want to switch cameras.
private int requestedWidth;
private int requestedHeight;
private int requestedFramerate;
// Will only be edited while camera state is IDLE and cameraStateLock is acquired.
private String cameraName;
private boolean isFrontCamera;
private int cameraOrientation;
// Semaphore for allowing only one switch at a time.
private final Semaphore pendingCameraSwitchSemaphore = new Semaphore(1);
// Guarded by pendingCameraSwitchSemaphore
private CameraSwitchHandler switchEventsHandler;
// Internal state - must only be modified from camera thread
// ---------------------------------------------------------
private CaptureFormat captureFormat;
private Context applicationContext;
private CapturerObserver capturerObserver;
private CameraStatistics cameraStatistics;
private SurfaceTextureHelper surfaceTextureHelper;
private CameraCaptureSession captureSession;
private Surface surface;
private CameraDevice cameraDevice;
private CameraStateCallback cameraStateCallback;
// Factor to convert between Android framerates and CaptureFormat.FramerateRange. It will be
// either 1 or 1000.
private int fpsUnitFactor;
private boolean firstFrameReported;
private int consecutiveCameraOpenFailures;
public Camera2Capturer(
Context context, String cameraName, CameraEventsHandler eventsHandler) {
Logging.d(TAG, "Camera2Capturer ctor, camera name: " + cameraName);
this.cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
this.eventsHandler = eventsHandler;
setCameraName(cameraName);
}
/**
* Helper method for checking method is executed on camera thread. Also allows calls from other
* threads if camera is closed.
*/
private void checkIsOnCameraThread() {
if (cameraState == CameraState.IDLE) {
return;
}
checkIsStrictlyOnCameraThread();
}
/**
* Like checkIsOnCameraThread but doesn't allow the camera to be stopped.
*/
private void checkIsStrictlyOnCameraThread() {
if (cameraThreadHandler == null) {
throw new IllegalStateException("Camera is closed.");
}
if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) {
throw new IllegalStateException("Wrong thread");
}
}
/**
* Checks method is not invoked on the camera thread. Used in functions waiting for the camera
* state to change since executing them on the camera thread would cause a deadlock.
*/
private void checkNotOnCameraThread() {
if (cameraThreadHandler == null) {
return;
}
if (Thread.currentThread() == cameraThreadHandler.getLooper().getThread()) {
throw new IllegalStateException(
"Method waiting for camera state to change executed on camera thread");
}
}
private void waitForCameraToExitTransitionalState(
CameraState transitionalState, long timeoutMs) {
checkNotOnCameraThread();
// We probably should already have the lock when this is called but acquire it in case
// we don't have it.
synchronized (cameraStateLock) {
long timeoutAt = SystemClock.uptimeMillis() + timeoutMs;
while (cameraState == transitionalState) {
Logging.d(TAG, "waitForCameraToExitTransitionalState waiting: "
+ cameraState);
long timeLeft = timeoutAt - SystemClock.uptimeMillis();
if (timeLeft <= 0) {
Logging.e(TAG, "Camera failed to exit transitional state " + transitionalState
+ " within the time limit.");
break;
}
try {
cameraStateLock.wait(timeLeft);
} catch (InterruptedException e) {
Logging.w(TAG, "Trying to interrupt while waiting to exit transitional state "
+ transitionalState + ", ignoring: " + e);
}
}
}
}
/**
* Waits until camera state is not STOPPING.
*/
private void waitForCameraToStopIfStopping() {
waitForCameraToExitTransitionalState(CameraState.STOPPING, STOP_TIMEOUT);
}
/**
* Wait until camera state is not STARTING.
*/
private void waitForCameraToStartIfStarting() {
waitForCameraToExitTransitionalState(CameraState.STARTING, START_TIMEOUT);
}
/**
* Sets the name of the camera. Camera must be stopped or stopping when this is called.
*/
private void setCameraName(String cameraName) {
final CameraCharacteristics characteristics;
try {
final String[] cameraIds = cameraManager.getCameraIdList();
if (cameraName.isEmpty() && cameraIds.length != 0) {
cameraName = cameraIds[0];
}
if (!Arrays.asList(cameraIds).contains(cameraName)) {
throw new IllegalArgumentException(
"Camera name: " + cameraName + " does not match any known camera device:");
}
characteristics = cameraManager.getCameraCharacteristics(cameraName);
} catch (CameraAccessException e) {
throw new RuntimeException("Camera access exception: " + e);
}
synchronized (cameraStateLock) {
waitForCameraToStopIfStopping();
if (cameraState != CameraState.IDLE) {
throw new RuntimeException("Changing camera name on running camera.");
}
// Note: Usually changing camera state from outside camera thread is not allowed. It is
// allowed here because camera is not running.
this.cameraName = cameraName;
isFrontCamera = characteristics.get(CameraCharacteristics.LENS_FACING)
== CameraMetadata.LENS_FACING_FRONT;
/*
* Clockwise angle through which the output image needs to be rotated to be upright on the
* device screen in its native orientation.
* Also defines the direction of rolling shutter readout, which is from top to bottom in the
* sensor's coordinate system.
* Units: Degrees of clockwise rotation; always a multiple of 90
*/
cameraOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
}
}
/**
* Triggers appropriate error handlers based on the camera state. Must be called on the camera
* thread and camera must not be stopped.
*/
private void reportError(String errorDescription) {
checkIsStrictlyOnCameraThread();
Logging.e(TAG, "Error in camera at state " + cameraState + ": " + errorDescription);
if (switchEventsHandler != null) {
switchEventsHandler.onCameraSwitchError(errorDescription);
switchEventsHandler = null;
pendingCameraSwitchSemaphore.release();
}
switch (cameraState) {
case STARTING:
capturerObserver.onCapturerStarted(false /* success */);
// fall through
case RUNNING:
if (eventsHandler != null) {
eventsHandler.onCameraError(errorDescription);
}
break;
case STOPPING:
setCameraState(CameraState.IDLE);
Logging.e(TAG, "Closing camera failed: " + errorDescription);
return; // We don't want to call closeAndRelease in this case.
default:
throw new RuntimeException("Unknown camera state: " + cameraState);
}
closeAndRelease();
}
private void closeAndRelease() {
checkIsStrictlyOnCameraThread();
Logging.d(TAG, "Close and release.");
setCameraState(CameraState.STOPPING);
// Remove all pending Runnables posted from |this|.
cameraThreadHandler.removeCallbacksAndMessages(this /* token */);
applicationContext = null;
capturerObserver = null;
if (cameraStatistics != null) {
cameraStatistics.release();
cameraStatistics = null;
}
if (surfaceTextureHelper != null) {
surfaceTextureHelper.stopListening();
surfaceTextureHelper = null;
}
if (captureSession != null) {
captureSession.close();
captureSession = null;
}
if (surface != null) {
surface.release();
surface = null;
}
if (cameraDevice != null) {
// Add a timeout for stopping the camera.
cameraThreadHandler.postAtTime(new Runnable() {
@Override
public void run() {
Logging.e(TAG, "Camera failed to stop within the timeout. Force stopping.");
setCameraState(CameraState.IDLE);
if (eventsHandler != null) {
eventsHandler.onCameraError("Camera failed to stop (timeout).");
}
}
}, STOP_TIMEOUT_RUNNABLE_TOKEN, SystemClock.uptimeMillis() + STOP_TIMEOUT);
cameraDevice.close();
cameraDevice = null;
} else {
Logging.w(TAG, "closeAndRelease called while cameraDevice is null");
setCameraState(CameraState.IDLE);
}
this.cameraStateCallback = null;
}
/**
* Sets the camera state while ensuring constraints are followed.
*/
private void setCameraState(CameraState newState) {
// State must only be modified on the camera thread. It can be edited from other threads
// if cameraState is IDLE since there is no camera thread.
checkIsOnCameraThread();
if (newState != CameraState.IDLE) {
if (cameraThreadHandler == null) {
throw new IllegalStateException(
"cameraThreadHandler must be null if and only if CameraState is IDLE.");
}
} else {
cameraThreadHandler = null;
}
switch (newState) {
case STARTING:
if (cameraState != CameraState.IDLE) {
throw new IllegalStateException("Only stopped camera can start.");
}
break;
case RUNNING:
if (cameraState != CameraState.STARTING) {
throw new IllegalStateException("Only starting camera can go to running state.");
}
break;
case STOPPING:
if (cameraState != CameraState.STARTING && cameraState != CameraState.RUNNING) {
throw new IllegalStateException("Only starting or running camera can stop.");
}
break;
case IDLE:
if (cameraState != CameraState.STOPPING) {
throw new IllegalStateException("Only stopping camera can go to idle state.");
}
break;
default:
throw new RuntimeException("Unknown camera state: " + newState);
}
synchronized (cameraStateLock) {
cameraState = newState;
cameraStateLock.notifyAll();
}
}
/**
* Internal method for opening the camera. Must be called on the camera thread.
*/
private void openCamera() {
try {
checkIsStrictlyOnCameraThread();
if (cameraState != CameraState.STARTING) {
throw new IllegalStateException("Camera should be in state STARTING in openCamera.");
}
if (cameraThreadHandler == null) {
throw new RuntimeException("Someone set cameraThreadHandler to null while the camera "
+ "state was STARTING. This should never happen");
}
// Camera is in state STARTING so cameraName will not be edited.
cameraManager.openCamera(cameraName, cameraStateCallback, cameraThreadHandler);
} catch (CameraAccessException e) {
reportError("Failed to open camera: " + e);
}
}
private void startCaptureOnCameraThread(
final int requestedWidth, final int requestedHeight, final int requestedFramerate,
final SurfaceTextureHelper surfaceTextureHelper, final Context applicationContext,
final CapturerObserver capturerObserver) {
checkIsStrictlyOnCameraThread();
firstFrameReported = false;
consecutiveCameraOpenFailures = 0;
this.applicationContext = applicationContext;
this.capturerObserver = capturerObserver;
this.surfaceTextureHelper = surfaceTextureHelper;
this.cameraStateCallback = new CameraStateCallback();
synchronized (cameraStateLock) {
// Remember the requested format in case we want to switch cameras.
this.requestedWidth = requestedWidth;
this.requestedHeight = requestedHeight;
this.requestedFramerate = requestedFramerate;
}
final CameraCharacteristics cameraCharacteristics;
try {
// Camera is in state STARTING so cameraName will not be edited.
cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraName);
} catch (CameraAccessException e) {
reportError("getCameraCharacteristics(): " + e.getMessage());
return;
}
List<CaptureFormat.FramerateRange> framerateRanges =
Camera2Enumerator.getSupportedFramerateRanges(cameraCharacteristics);
List<Size> sizes = Camera2Enumerator.getSupportedSizes(cameraCharacteristics);
if (framerateRanges.isEmpty() || sizes.isEmpty()) {
reportError("No supported capture formats.");
}
// Some LEGACY camera implementations use fps rates that are multiplied with 1000. Make sure
// all values are multiplied with 1000 for consistency.
this.fpsUnitFactor = (framerateRanges.get(0).max > 1000) ? 1 : 1000;
final CaptureFormat.FramerateRange bestFpsRange =
CameraEnumerationAndroid.getClosestSupportedFramerateRange(
framerateRanges, requestedFramerate);
final Size bestSize = CameraEnumerationAndroid.getClosestSupportedSize(
sizes, requestedWidth, requestedHeight);
this.captureFormat = new CaptureFormat(bestSize.width, bestSize.height, bestFpsRange);
Logging.d(TAG, "Using capture format: " + captureFormat);
Logging.d(TAG, "Opening camera " + cameraName);
if (eventsHandler != null) {
int cameraIndex = -1;
try {
cameraIndex = Integer.parseInt(cameraName);
} catch (NumberFormatException e) {
Logging.d(TAG, "External camera with non-int identifier: " + cameraName);
}
eventsHandler.onCameraOpening(cameraIndex);
}
openCamera();
}
/**
* Starts capture using specified settings. This is automatically called for you by
* VideoCapturerTrackSource if you are just using the camera as source for video track.
*/
@Override
public void startCapture(
final int requestedWidth, final int requestedHeight, final int requestedFramerate,
final SurfaceTextureHelper surfaceTextureHelper, final Context applicationContext,
final CapturerObserver capturerObserver) {
Logging.d(TAG, "startCapture requested: " + requestedWidth + "x" + requestedHeight
+ "@" + requestedFramerate);
if (surfaceTextureHelper == null) {
throw new IllegalArgumentException("surfaceTextureHelper not set.");
}
if (applicationContext == null) {
throw new IllegalArgumentException("applicationContext not set.");
}
if (capturerObserver == null) {
throw new IllegalArgumentException("capturerObserver not set.");
}
synchronized (cameraStateLock) {
waitForCameraToStopIfStopping();
if (cameraState != CameraState.IDLE) {
Logging.e(TAG, "Unexpected camera state for startCapture: " + cameraState);
return;
}
this.cameraThreadHandler = surfaceTextureHelper.getHandler();
setCameraState(CameraState.STARTING);
}
postOnCameraThread(new Runnable() {
@Override
public void run() {
startCaptureOnCameraThread(requestedWidth, requestedHeight, requestedFramerate,
surfaceTextureHelper, applicationContext, capturerObserver);
}
});
}
final class CameraStateCallback extends CameraDevice.StateCallback {
private String getErrorDescription(int errorCode) {
switch (errorCode) {
case CameraDevice.StateCallback.ERROR_CAMERA_DEVICE:
return "Camera device has encountered a fatal error.";
case CameraDevice.StateCallback.ERROR_CAMERA_DISABLED:
return "Camera device could not be opened due to a device policy.";
case CameraDevice.StateCallback.ERROR_CAMERA_IN_USE:
return "Camera device is in use already.";
case CameraDevice.StateCallback.ERROR_CAMERA_SERVICE:
return "Camera service has encountered a fatal error.";
case CameraDevice.StateCallback.ERROR_MAX_CAMERAS_IN_USE:
return "Camera device could not be opened because"
+ " there are too many other open camera devices.";
default:
return "Unknown camera error: " + errorCode;
}
}
@Override
public void onDisconnected(CameraDevice camera) {
checkIsStrictlyOnCameraThread();
cameraDevice = camera;
reportError("Camera disconnected.");
}
@Override
public void onError(CameraDevice camera, int errorCode) {
checkIsStrictlyOnCameraThread();
cameraDevice = camera;
if (cameraState == CameraState.STARTING && (
errorCode == CameraDevice.StateCallback.ERROR_CAMERA_IN_USE ||
errorCode == CameraDevice.StateCallback.ERROR_MAX_CAMERAS_IN_USE)) {
consecutiveCameraOpenFailures++;
if (consecutiveCameraOpenFailures < MAX_OPEN_CAMERA_ATTEMPTS) {
Logging.w(TAG, "Opening camera failed, trying again: " + getErrorDescription(errorCode));
postDelayedOnCameraThread(OPEN_CAMERA_DELAY_MS, new Runnable() {
public void run() {
openCamera();
}
});
return;
} else {
Logging.e(TAG, "Opening camera failed too many times. Passing the error.");
}
}
reportError(getErrorDescription(errorCode));
}
@Override
public void onOpened(CameraDevice camera) {
checkIsStrictlyOnCameraThread();
Logging.d(TAG, "Camera opened.");
if (cameraState != CameraState.STARTING) {
throw new IllegalStateException("Unexpected state when camera opened: " + cameraState);
}
cameraDevice = camera;
final SurfaceTexture surfaceTexture = surfaceTextureHelper.getSurfaceTexture();
surfaceTexture.setDefaultBufferSize(captureFormat.width, captureFormat.height);
surface = new Surface(surfaceTexture);
try {
camera.createCaptureSession(
Arrays.asList(surface), new CaptureSessionCallback(), cameraThreadHandler);
} catch (CameraAccessException e) {
reportError("Failed to create capture session. " + e);
}
}
@Override
public void onClosed(CameraDevice camera) {
checkIsStrictlyOnCameraThread();
Logging.d(TAG, "Camera device closed.");
if (cameraState != CameraState.STOPPING) {
Logging.e(TAG, "Camera state was not STOPPING in onClosed. Most likely camera didn't stop "
+ "within timelimit and this method was invoked twice.");
return;
}
cameraThreadHandler.removeCallbacksAndMessages(STOP_TIMEOUT_RUNNABLE_TOKEN);
setCameraState(CameraState.IDLE);
if (eventsHandler != null) {
eventsHandler.onCameraClosed();
}
}
}
final class CaptureSessionCallback extends CameraCaptureSession.StateCallback {
@Override
public void onConfigureFailed(CameraCaptureSession session) {
checkIsStrictlyOnCameraThread();
captureSession = session;
reportError("Failed to configure capture session.");
}
@Override
public void onConfigured(CameraCaptureSession session) {
checkIsStrictlyOnCameraThread();
Logging.d(TAG, "Camera capture session configured.");
captureSession = session;
try {
/*
* The viable options for video capture requests are:
* TEMPLATE_PREVIEW: High frame rate is given priority over the highest-quality
* post-processing.
* TEMPLATE_RECORD: Stable frame rate is used, and post-processing is set for recording
* quality.
*/
final CaptureRequest.Builder captureRequestBuilder =
cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
// Set auto exposure fps range.
captureRequestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, new Range<Integer>(
captureFormat.framerate.min / fpsUnitFactor,
captureFormat.framerate.max / fpsUnitFactor));
captureRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE,
CaptureRequest.CONTROL_AE_MODE_ON);
captureRequestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false);
captureRequestBuilder.addTarget(surface);
session.setRepeatingRequest(
captureRequestBuilder.build(), new CameraCaptureCallback(), cameraThreadHandler);
} catch (CameraAccessException e) {
reportError("Failed to start capture request. " + e);
return;
}
Logging.d(TAG, "Camera device successfully started.");
surfaceTextureHelper.startListening(Camera2Capturer.this);
capturerObserver.onCapturerStarted(true /* success */);
cameraStatistics = new CameraStatistics(surfaceTextureHelper, eventsHandler);
setCameraState(CameraState.RUNNING);
if (switchEventsHandler != null) {
switchEventsHandler.onCameraSwitchDone(isFrontCamera);
switchEventsHandler = null;
pendingCameraSwitchSemaphore.release();
}
}
}
final class CameraCaptureCallback extends CameraCaptureSession.CaptureCallback {
static final int MAX_CONSECUTIVE_CAMERA_CAPTURE_FAILURES = 10;
int consecutiveCameraCaptureFailures;
@Override
public void onCaptureFailed(
CameraCaptureSession session, CaptureRequest request, CaptureFailure failure) {
checkIsOnCameraThread();
++consecutiveCameraCaptureFailures;
if (consecutiveCameraCaptureFailures > MAX_CONSECUTIVE_CAMERA_CAPTURE_FAILURES) {
reportError("Capture failed " + consecutiveCameraCaptureFailures + " consecutive times.");
}
}
@Override
public void onCaptureCompleted(
CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
// TODO(sakal): This sometimes gets called after camera has stopped, investigate
checkIsOnCameraThread();
consecutiveCameraCaptureFailures = 0;
}
}
// Switch camera to the next valid camera id. This can only be called while
// the camera is running.
@Override
public void switchCamera(final CameraSwitchHandler switchEventsHandler) {
final String[] cameraIds;
try {
cameraIds = cameraManager.getCameraIdList();
} catch (CameraAccessException e) {
if (switchEventsHandler != null) {
switchEventsHandler.onCameraSwitchError("Could not get camera names: " + e);
}
return;
}
if (cameraIds.length < 2) {
if (switchEventsHandler != null) {
switchEventsHandler.onCameraSwitchError("No camera to switch to.");
}
return;
}
// Do not handle multiple camera switch request to avoid blocking camera thread by handling too
// many switch request from a queue. We have to be careful to always release this.
if (!pendingCameraSwitchSemaphore.tryAcquire()) {
Logging.w(TAG, "Ignoring camera switch request.");
if (switchEventsHandler != null) {
switchEventsHandler.onCameraSwitchError("Pending camera switch already in progress.");
}
return;
}
final String newCameraId;
final SurfaceTextureHelper surfaceTextureHelper;
final Context applicationContext;
final CapturerObserver capturerObserver;
final int requestedWidth;
final int requestedHeight;
final int requestedFramerate;
synchronized (cameraStateLock) {
waitForCameraToStartIfStarting();
if (cameraState != CameraState.RUNNING) {
Logging.e(TAG, "Calling swithCamera() on stopped camera.");
if (switchEventsHandler != null) {
switchEventsHandler.onCameraSwitchError("Camera is stopped.");
}
pendingCameraSwitchSemaphore.release();
return;
}
// Calculate new camera index and camera id. Camera is in state RUNNING so cameraName will
// not be edited.
final int currentCameraIndex = Arrays.asList(cameraIds).indexOf(cameraName);
if (currentCameraIndex == -1) {
Logging.e(TAG, "Couldn't find current camera id " + cameraName
+ " in list of camera ids: " + Arrays.toString(cameraIds));
}
final int newCameraIndex = (currentCameraIndex + 1) % cameraIds.length;
newCameraId = cameraIds[newCameraIndex];
// Remember parameters. These are not null since camera is in RUNNING state. They aren't
// edited either while camera is in RUNNING state.
surfaceTextureHelper = this.surfaceTextureHelper;
applicationContext = this.applicationContext;
capturerObserver = this.capturerObserver;
requestedWidth = this.requestedWidth;
requestedHeight = this.requestedHeight;
requestedFramerate = this.requestedFramerate;
this.switchEventsHandler = switchEventsHandler;
}
// Make the switch.
stopCapture();
setCameraName(newCameraId);
startCapture(requestedWidth, requestedHeight, requestedFramerate, surfaceTextureHelper,
applicationContext, capturerObserver);
// Note: switchEventsHandler will be called from onConfigured / reportError.
}
// Requests a new output format from the video capturer. Captured frames
// by the camera will be scaled/or dropped by the video capturer.
// It does not matter if width and height are flipped. I.E, |width| = 640, |height| = 480 produce
// the same result as |width| = 480, |height| = 640.
// TODO(magjed/perkj): Document what this function does. Change name?
@Override
public void onOutputFormatRequest(final int width, final int height, final int framerate) {
postOnCameraThread(new Runnable() {
@Override
public void run() {
if (capturerObserver == null) {
Logging.e(TAG, "Calling onOutputFormatRequest() on stopped camera.");
return;
}
Logging.d(TAG,
"onOutputFormatRequestOnCameraThread: " + width + "x" + height + "@" + framerate);
capturerObserver.onOutputFormatRequest(width, height, framerate);
}
});
}
// Reconfigure the camera to capture in a new format. This should only be called while the camera
// is running.
@Override
public void changeCaptureFormat(final int width, final int height, final int framerate) {
final SurfaceTextureHelper surfaceTextureHelper;
final Context applicationContext;
final CapturerObserver capturerObserver;
synchronized (cameraStateLock) {
waitForCameraToStartIfStarting();
if (cameraState != CameraState.RUNNING) {
Logging.e(TAG, "Calling changeCaptureFormat() on stopped camera.");
return;
}
requestedWidth = width;
requestedHeight = height;
requestedFramerate = framerate;
surfaceTextureHelper = this.surfaceTextureHelper;
applicationContext = this.applicationContext;
capturerObserver = this.capturerObserver;
}
// Make the switch.
stopCapture();
// TODO(magjed/sakal): Just recreate session.
startCapture(width, height, framerate,
surfaceTextureHelper, applicationContext, capturerObserver);
}
@Override
public List<CaptureFormat> getSupportedFormats() {
synchronized (cameraState) {
return Camera2Enumerator.getSupportedFormats(this.cameraManager, cameraName);
}
}
@Override
public void dispose() {
synchronized (cameraStateLock) {
waitForCameraToStopIfStopping();
if (cameraState != CameraState.IDLE) {
throw new IllegalStateException("Unexpected camera state for dispose: " + cameraState);
}
}
}
// Blocks until camera is known to be stopped.
@Override
public void stopCapture() {
final CountDownLatch cameraStoppingLatch = new CountDownLatch(1);
Logging.d(TAG, "stopCapture");
checkNotOnCameraThread();
synchronized (cameraStateLock) {
waitForCameraToStartIfStarting();
if (cameraState != CameraState.RUNNING) {
Logging.w(TAG, "stopCapture called for already stopped camera.");
return;
}
postOnCameraThread(new Runnable() {
@Override
public void run() {
Logging.d(TAG, "stopCaptureOnCameraThread");
// Stop capture.
closeAndRelease();
cameraStoppingLatch.countDown();
}
});
}
// Wait for the stopping to start
ThreadUtils.awaitUninterruptibly(cameraStoppingLatch);
Logging.d(TAG, "stopCapture done");
}
private void postOnCameraThread(Runnable runnable) {
postDelayedOnCameraThread(0 /* delayMs */, runnable);
}
private void postDelayedOnCameraThread(int delayMs, Runnable runnable) {
synchronized (cameraStateLock) {
if ((cameraState != CameraState.STARTING && cameraState != CameraState.RUNNING)
|| !cameraThreadHandler.postAtTime(
runnable, this /* token */, SystemClock.uptimeMillis() + delayMs)) {
Logging.w(TAG, "Runnable not scheduled even though it was requested.");
}
}
}
private int getDeviceOrientation() {
int orientation = 0;
WindowManager wm = (WindowManager) applicationContext.getSystemService(
Context.WINDOW_SERVICE);
switch(wm.getDefaultDisplay().getRotation()) {
case Surface.ROTATION_90:
orientation = 90;
break;
case Surface.ROTATION_180:
orientation = 180;
break;
case Surface.ROTATION_270:
orientation = 270;
break;
case Surface.ROTATION_0:
default:
orientation = 0;
break;
}
return orientation;
}
@Override
public void onTextureFrameAvailable(
int oesTextureId, float[] transformMatrix, long timestampNs) {
checkIsStrictlyOnCameraThread();
if (eventsHandler != null && !firstFrameReported) {
eventsHandler.onFirstFrameAvailable();
firstFrameReported = true;
}
int rotation;
if (isFrontCamera) {
// Undo the mirror that the OS "helps" us with.
// http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int)
rotation = cameraOrientation + getDeviceOrientation();
transformMatrix =
RendererCommon.multiplyMatrices(transformMatrix, RendererCommon.horizontalFlipMatrix());
} else {
rotation = cameraOrientation - getDeviceOrientation();
}
// Make sure |rotation| is between 0 and 360.
rotation = (360 + rotation % 360) % 360;
// Undo camera orientation - we report it as rotation instead.
transformMatrix = RendererCommon.rotateTextureMatrix(transformMatrix, -cameraOrientation);
cameraStatistics.addFrame();
capturerObserver.onTextureFrameCaptured(captureFormat.width, captureFormat.height, oesTextureId,
transformMatrix, rotation, timestampNs);
}
}

View File

@ -1,208 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
import android.annotation.TargetApi;
import android.content.Context;
import android.graphics.ImageFormat;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CameraMetadata;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.os.Build;
import android.os.SystemClock;
import android.util.Range;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@TargetApi(21)
public class Camera2Enumerator implements CameraEnumerator {
private final static String TAG = "Camera2Enumerator";
private final static double NANO_SECONDS_PER_SECOND = 1.0e9;
// Each entry contains the supported formats for a given camera index. The formats are enumerated
// lazily in getSupportedFormats(), and cached for future reference.
private static final Map<String, List<CaptureFormat>> cachedSupportedFormats =
new HashMap<String, List<CaptureFormat>>();
final Context context;
final CameraManager cameraManager;
public Camera2Enumerator(Context context) {
this.context = context;
this.cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
}
@Override
public String[] getDeviceNames() {
try {
return cameraManager.getCameraIdList();
} catch (CameraAccessException e) {
Logging.e(TAG, "Camera access exception: " + e);
return new String[] {};
}
}
@Override
public boolean isFrontFacing(String deviceName) {
CameraCharacteristics characteristics
= getCameraCharacteristics(deviceName);
return characteristics != null
&& characteristics.get(CameraCharacteristics.LENS_FACING)
== CameraMetadata.LENS_FACING_FRONT;
}
@Override
public boolean isBackFacing(String deviceName) {
CameraCharacteristics characteristics
= getCameraCharacteristics(deviceName);
return characteristics != null
&& characteristics.get(CameraCharacteristics.LENS_FACING)
== CameraMetadata.LENS_FACING_BACK;
}
@Override
public CameraVideoCapturer createCapturer(String deviceName,
CameraVideoCapturer.CameraEventsHandler eventsHandler) {
return new Camera2Capturer(context, deviceName, eventsHandler);
}
private CameraCharacteristics getCameraCharacteristics(String deviceName) {
try {
return cameraManager.getCameraCharacteristics(deviceName);
} catch (CameraAccessException e) {
Logging.e(TAG, "Camera access exception: " + e);
return null;
}
}
public static boolean isSupported() {
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP;
}
static List<CaptureFormat.FramerateRange> getSupportedFramerateRanges(
CameraCharacteristics cameraCharacteristics) {
final Range<Integer>[] fpsRanges =
cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
if (fpsRanges == null) {
return new ArrayList<CaptureFormat.FramerateRange>();
}
int maxFps = 0;
for (Range<Integer> fpsRange : fpsRanges) {
maxFps = Math.max(maxFps, fpsRange.getUpper());
}
int unitFactor = maxFps < 1000 ? 1000 : 1;
return convertFramerates(fpsRanges, unitFactor);
}
static List<Size> getSupportedSizes(
CameraCharacteristics cameraCharacteristics) {
final StreamConfigurationMap streamMap =
cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
final android.util.Size[] sizes = streamMap.getOutputSizes(SurfaceTexture.class);
if (sizes == null) {
Logging.e(TAG, "No supported camera output sizes.");
return new ArrayList<Size>();
}
return convertSizes(sizes);
}
static List<CaptureFormat> getSupportedFormats(Context context, String cameraId) {
return getSupportedFormats(
(CameraManager) context.getSystemService(Context.CAMERA_SERVICE), cameraId);
}
static List<CaptureFormat> getSupportedFormats(
CameraManager cameraManager, String cameraId) {
synchronized (cachedSupportedFormats) {
if (cachedSupportedFormats.containsKey(cameraId)) {
return cachedSupportedFormats.get(cameraId);
}
Logging.d(TAG, "Get supported formats for camera index " + cameraId + ".");
final long startTimeMs = SystemClock.elapsedRealtime();
final CameraCharacteristics cameraCharacteristics;
try {
cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId);
} catch (Exception ex) {
Logging.e(TAG, "getCameraCharacteristics(): " + ex);
return new ArrayList<CaptureFormat>();
}
final StreamConfigurationMap streamMap =
cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
List<CaptureFormat.FramerateRange> framerateRanges = getSupportedFramerateRanges(
cameraCharacteristics);
List<Size> sizes = getSupportedSizes(cameraCharacteristics);
int defaultMaxFps = 0;
for (CaptureFormat.FramerateRange framerateRange : framerateRanges) {
defaultMaxFps = Math.max(defaultMaxFps, framerateRange.max);
}
final List<CaptureFormat> formatList = new ArrayList<CaptureFormat>();
for (Size size : sizes) {
long minFrameDurationNs = 0;
try {
minFrameDurationNs = streamMap.getOutputMinFrameDuration(SurfaceTexture.class,
new android.util.Size(size.width, size.height));
} catch (Exception e) {
// getOutputMinFrameDuration() is not supported on all devices. Ignore silently.
}
final int maxFps = (minFrameDurationNs == 0)
? defaultMaxFps
: (int) Math.round(NANO_SECONDS_PER_SECOND / minFrameDurationNs) * 1000;
formatList.add(new CaptureFormat(size.width, size.height, 0, maxFps));
Logging.d(TAG, "Format: " + size.width + "x" + size.height + "@" + maxFps);
}
cachedSupportedFormats.put(cameraId, formatList);
final long endTimeMs = SystemClock.elapsedRealtime();
Logging.d(TAG, "Get supported formats for camera index " + cameraId + " done."
+ " Time spent: " + (endTimeMs - startTimeMs) + " ms.");
return formatList;
}
}
// Convert from android.util.Size to Size.
private static List<Size> convertSizes(android.util.Size[] cameraSizes) {
final List<Size> sizes = new ArrayList<Size>();
for (android.util.Size size : cameraSizes) {
sizes.add(new Size(size.getWidth(), size.getHeight()));
}
return sizes;
}
// Convert from android.util.Range<Integer> to CaptureFormat.FramerateRange.
private static List<CaptureFormat.FramerateRange> convertFramerates(
Range<Integer>[] arrayRanges, int unitFactor) {
final List<CaptureFormat.FramerateRange> ranges = new ArrayList<CaptureFormat.FramerateRange>();
for (Range<Integer> range : arrayRanges) {
ranges.add(new CaptureFormat.FramerateRange(
range.getLower() * unitFactor,
range.getUpper() * unitFactor));
}
return ranges;
}
}

View File

@ -1,237 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import static java.lang.Math.abs;
import android.graphics.ImageFormat;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
@SuppressWarnings("deprecation")
public class CameraEnumerationAndroid {
private final static String TAG = "CameraEnumerationAndroid";
public static class CaptureFormat {
// Class to represent a framerate range. The framerate varies because of lightning conditions.
// The values are multiplied by 1000, so 1000 represents one frame per second.
public static class FramerateRange {
public int min;
public int max;
public FramerateRange(int min, int max) {
this.min = min;
this.max = max;
}
@Override
public String toString() {
return "[" + (min / 1000.0f) + ":" + (max / 1000.0f) + "]";
}
@Override
public boolean equals(Object other) {
if (!(other instanceof FramerateRange)) {
return false;
}
final FramerateRange otherFramerate = (FramerateRange) other;
return min == otherFramerate.min && max == otherFramerate.max;
}
@Override
public int hashCode() {
// Use prime close to 2^16 to avoid collisions for normal values less than 2^16.
return 1 + 65537 * min + max;
}
}
public final int width;
public final int height;
public final FramerateRange framerate;
// TODO(hbos): If VideoCapturer.startCapture is updated to support other image formats then this
// needs to be updated and VideoCapturer.getSupportedFormats need to return CaptureFormats of
// all imageFormats.
public final int imageFormat = ImageFormat.NV21;
public CaptureFormat(int width, int height, int minFramerate, int maxFramerate) {
this.width = width;
this.height = height;
this.framerate = new FramerateRange(minFramerate, maxFramerate);
}
public CaptureFormat(int width, int height, FramerateRange framerate) {
this.width = width;
this.height = height;
this.framerate = framerate;
}
// Calculates the frame size of this capture format.
public int frameSize() {
return frameSize(width, height, imageFormat);
}
// Calculates the frame size of the specified image format. Currently only
// supporting ImageFormat.NV21.
// The size is width * height * number of bytes per pixel.
// http://developer.android.com/reference/android/hardware/Camera.html#addCallbackBuffer(byte[])
public static int frameSize(int width, int height, int imageFormat) {
if (imageFormat != ImageFormat.NV21) {
throw new UnsupportedOperationException("Don't know how to calculate "
+ "the frame size of non-NV21 image formats.");
}
return (width * height * ImageFormat.getBitsPerPixel(imageFormat)) / 8;
}
@Override
public String toString() {
return width + "x" + height + "@" + framerate;
}
@Override
public boolean equals(Object other) {
if (!(other instanceof CaptureFormat)) {
return false;
}
final CaptureFormat otherFormat = (CaptureFormat) other;
return width == otherFormat.width && height == otherFormat.height
&& framerate.equals(otherFormat.framerate);
}
@Override
public int hashCode() {
return 1 + (width * 65497 + height) * 251 + framerate.hashCode();
}
}
/**
* @deprecated
* Please use Camera1Enumerator.getDeviceNames() instead.
*/
@Deprecated
public static String[] getDeviceNames() {
return new Camera1Enumerator().getDeviceNames();
}
/**
* @deprecated
* Please use Camera1Enumerator.getDeviceNames().length instead.
*/
@Deprecated
public static int getDeviceCount() {
return new Camera1Enumerator().getDeviceNames().length;
}
/**
* @deprecated
* Please use Camera1Enumerator.getDeviceNames().get(index) instead.
*/
@Deprecated
public static String getDeviceName(int index) {
return new Camera1Enumerator().getDeviceName(index);
}
/**
* @deprecated
* Please use Camera1Enumerator.isFrontFacing(String deviceName) instead.
*/
@Deprecated
public static String getNameOfFrontFacingDevice() {
return getNameOfDevice(android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT);
}
/**
* @deprecated
* Please use Camera1Enumerator.isBackFacing(String deviceName) instead.
*/
@Deprecated
public static String getNameOfBackFacingDevice() {
return getNameOfDevice(android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK);
}
// Helper class for finding the closest supported format for the two functions below. It creates a
// comparator based on the difference to some requested parameters, where the element with the
// minimum difference is the element that is closest to the requested parameters.
private static abstract class ClosestComparator<T> implements Comparator<T> {
// Difference between supported and requested parameter.
abstract int diff(T supportedParameter);
@Override
public int compare(T t1, T t2) {
return diff(t1) - diff(t2);
}
}
// Prefer a fps range with an upper bound close to |framerate|. Also prefer a fps range with a low
// lower bound, to allow the framerate to fluctuate based on lightning conditions.
public static CaptureFormat.FramerateRange getClosestSupportedFramerateRange(
List<CaptureFormat.FramerateRange> supportedFramerates, final int requestedFps) {
return Collections.min(supportedFramerates,
new ClosestComparator<CaptureFormat.FramerateRange>() {
// Progressive penalty if the upper bound is further away than |MAX_FPS_DIFF_THRESHOLD|
// from requested.
private static final int MAX_FPS_DIFF_THRESHOLD = 5000;
private static final int MAX_FPS_LOW_DIFF_WEIGHT = 1;
private static final int MAX_FPS_HIGH_DIFF_WEIGHT = 3;
// Progressive penalty if the lower bound is bigger than |MIN_FPS_THRESHOLD|.
private static final int MIN_FPS_THRESHOLD = 8000;
private static final int MIN_FPS_LOW_VALUE_WEIGHT = 1;
private static final int MIN_FPS_HIGH_VALUE_WEIGHT = 4;
// Use one weight for small |value| less than |threshold|, and another weight above.
private int progressivePenalty(int value, int threshold, int lowWeight, int highWeight) {
return (value < threshold)
? value * lowWeight
: threshold * lowWeight + (value - threshold) * highWeight;
}
@Override
int diff(CaptureFormat.FramerateRange range) {
final int minFpsError = progressivePenalty(range.min,
MIN_FPS_THRESHOLD, MIN_FPS_LOW_VALUE_WEIGHT, MIN_FPS_HIGH_VALUE_WEIGHT);
final int maxFpsError = progressivePenalty(Math.abs(requestedFps * 1000 - range.max),
MAX_FPS_DIFF_THRESHOLD, MAX_FPS_LOW_DIFF_WEIGHT, MAX_FPS_HIGH_DIFF_WEIGHT);
return minFpsError + maxFpsError;
}
});
}
public static Size getClosestSupportedSize(
List<Size> supportedSizes, final int requestedWidth,
final int requestedHeight) {
return Collections.min(supportedSizes,
new ClosestComparator<Size>() {
@Override
int diff(Size size) {
return abs(requestedWidth - size.width) + abs(requestedHeight - size.height);
}
});
}
private static String getNameOfDevice(int facing) {
final android.hardware.Camera.CameraInfo info = new android.hardware.Camera.CameraInfo();
for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
try {
android.hardware.Camera.getCameraInfo(i, info);
if (info.facing == facing) {
return getDeviceName(i);
}
} catch (Exception e) {
Logging.e(TAG, "getCameraInfo() failed on index " + i, e);
}
}
return null;
}
}

View File

@ -1,20 +0,0 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
public interface CameraEnumerator {
public String[] getDeviceNames();
public boolean isFrontFacing(String deviceName);
public boolean isBackFacing(String deviceName);
public CameraVideoCapturer createCapturer(String deviceName,
CameraVideoCapturer.CameraEventsHandler eventsHandler);
}

View File

@ -1,128 +0,0 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/**
* Base interface for camera1 and camera2 implementations. Extends VideoCapturer with a
* switchCamera() function. Also provides subinterfaces for handling camera events, and a helper
* class for detecting camera freezes.
*/
public interface CameraVideoCapturer extends VideoCapturer {
/**
* Camera events handler - can be used to be notifed about camera events. The callbacks are
* executed from an arbitrary thread.
*/
public interface CameraEventsHandler {
// Camera error handler - invoked when camera can not be opened
// or any camera exception happens on camera thread.
void onCameraError(String errorDescription);
// Invoked when camera stops receiving frames.
void onCameraFreezed(String errorDescription);
// Callback invoked when camera is opening.
void onCameraOpening(int cameraId);
// Callback invoked when first camera frame is available after camera is started.
void onFirstFrameAvailable();
// Callback invoked when camera is closed.
void onCameraClosed();
}
/**
* Camera switch handler - one of these functions are invoked with the result of switchCamera().
* The callback may be called on an arbitrary thread.
*/
public interface CameraSwitchHandler {
// Invoked on success. |isFrontCamera| is true if the new camera is front facing.
void onCameraSwitchDone(boolean isFrontCamera);
// Invoked on failure, e.g. camera is stopped or only one camera available.
void onCameraSwitchError(String errorDescription);
}
/**
* Switch camera to the next valid camera id. This can only be called while the camera is running.
* This function can be called from any thread.
*/
void switchCamera(CameraSwitchHandler switchEventsHandler);
/**
* Helper class to log framerate and detect if the camera freezes. It will run periodic callbacks
* on the SurfaceTextureHelper thread passed in the ctor, and should only be operated from that
* thread.
*/
public static class CameraStatistics {
private final static String TAG = "CameraStatistics";
private final static int CAMERA_OBSERVER_PERIOD_MS = 2000;
private final static int CAMERA_FREEZE_REPORT_TIMOUT_MS = 4000;
private final SurfaceTextureHelper surfaceTextureHelper;
private final CameraEventsHandler eventsHandler;
private int frameCount;
private int freezePeriodCount;
// Camera observer - monitors camera framerate. Observer is executed on camera thread.
private final Runnable cameraObserver = new Runnable() {
@Override
public void run() {
final int cameraFps = Math.round(frameCount * 1000.0f / CAMERA_OBSERVER_PERIOD_MS);
Logging.d(TAG, "Camera fps: " + cameraFps +".");
if (frameCount == 0) {
++freezePeriodCount;
if (CAMERA_OBSERVER_PERIOD_MS * freezePeriodCount >= CAMERA_FREEZE_REPORT_TIMOUT_MS
&& eventsHandler != null) {
Logging.e(TAG, "Camera freezed.");
if (surfaceTextureHelper.isTextureInUse()) {
// This can only happen if we are capturing to textures.
eventsHandler.onCameraFreezed("Camera failure. Client must return video buffers.");
} else {
eventsHandler.onCameraFreezed("Camera failure.");
}
return;
}
} else {
freezePeriodCount = 0;
}
frameCount = 0;
surfaceTextureHelper.getHandler().postDelayed(this, CAMERA_OBSERVER_PERIOD_MS);
}
};
public CameraStatistics(
SurfaceTextureHelper surfaceTextureHelper, CameraEventsHandler eventsHandler) {
if (surfaceTextureHelper == null) {
throw new IllegalArgumentException("SurfaceTextureHelper is null");
}
this.surfaceTextureHelper = surfaceTextureHelper;
this.eventsHandler = eventsHandler;
this.frameCount = 0;
this.freezePeriodCount = 0;
surfaceTextureHelper.getHandler().postDelayed(cameraObserver, CAMERA_OBSERVER_PERIOD_MS);
}
private void checkThread() {
if (Thread.currentThread() != surfaceTextureHelper.getHandler().getLooper().getThread()) {
throw new IllegalStateException("Wrong thread");
}
}
public void addFrame() {
checkThread();
++frameCount;
}
public void release() {
checkThread();
surfaceTextureHelper.getHandler().removeCallbacks(cameraObserver);
}
}
}

View File

@ -1,126 +0,0 @@
/*
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import java.nio.ByteBuffer;
/** Java wrapper for a C++ DataChannelInterface. */
public class DataChannel {
/** Java wrapper for WebIDL RTCDataChannel. */
public static class Init {
public boolean ordered = true;
// Optional unsigned short in WebIDL, -1 means unspecified.
public int maxRetransmitTimeMs = -1;
// Optional unsigned short in WebIDL, -1 means unspecified.
public int maxRetransmits = -1;
public String protocol = "";
public boolean negotiated = false;
// Optional unsigned short in WebIDL, -1 means unspecified.
public int id = -1;
public Init() {}
// Called only by native code.
private Init(
boolean ordered, int maxRetransmitTimeMs, int maxRetransmits,
String protocol, boolean negotiated, int id) {
this.ordered = ordered;
this.maxRetransmitTimeMs = maxRetransmitTimeMs;
this.maxRetransmits = maxRetransmits;
this.protocol = protocol;
this.negotiated = negotiated;
this.id = id;
}
}
/** Java version of C++ DataBuffer. The atom of data in a DataChannel. */
public static class Buffer {
/** The underlying data. */
public final ByteBuffer data;
/**
* Indicates whether |data| contains UTF-8 text or "binary data"
* (i.e. anything else).
*/
public final boolean binary;
public Buffer(ByteBuffer data, boolean binary) {
this.data = data;
this.binary = binary;
}
}
/** Java version of C++ DataChannelObserver. */
public interface Observer {
/** The data channel's bufferedAmount has changed. */
public void onBufferedAmountChange(long previousAmount);
/** The data channel state has changed. */
public void onStateChange();
/**
* A data buffer was successfully received. NOTE: |buffer.data| will be
* freed once this function returns so callers who want to use the data
* asynchronously must make sure to copy it first.
*/
public void onMessage(Buffer buffer);
}
/** Keep in sync with DataChannelInterface::DataState. */
public enum State { CONNECTING, OPEN, CLOSING, CLOSED };
private final long nativeDataChannel;
private long nativeObserver;
public DataChannel(long nativeDataChannel) {
this.nativeDataChannel = nativeDataChannel;
}
/** Register |observer|, replacing any previously-registered observer. */
public void registerObserver(Observer observer) {
if (nativeObserver != 0) {
unregisterObserverNative(nativeObserver);
}
nativeObserver = registerObserverNative(observer);
}
private native long registerObserverNative(Observer observer);
/** Unregister the (only) observer. */
public void unregisterObserver() {
unregisterObserverNative(nativeObserver);
}
private native void unregisterObserverNative(long nativeObserver);
public native String label();
public native State state();
/**
* Return the number of bytes of application data (UTF-8 text and binary data)
* that have been queued using SendBuffer but have not yet been transmitted
* to the network.
*/
public native long bufferedAmount();
/** Close the channel. */
public native void close();
/** Send |data| to the remote peer; return success. */
public boolean send(Buffer buffer) {
// TODO(fischman): this could be cleverer about avoiding copies if the
// ByteBuffer is direct and/or is backed by an array.
byte[] data = new byte[buffer.data.remaining()];
buffer.data.get(data);
return sendNative(data, buffer.binary);
}
private native boolean sendNative(byte[] data, boolean binary);
/** Dispose of native resources attached to this channel. */
public native void dispose();
};

View File

@ -1,128 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.graphics.SurfaceTexture;
import android.view.Surface;
import javax.microedition.khronos.egl.EGL10;
/**
* Holds EGL state and utility methods for handling an egl 1.0 EGLContext, an EGLDisplay,
* and an EGLSurface.
*/
public abstract class EglBase {
// EGL wrapper for an actual EGLContext.
public static class Context {
}
// According to the documentation, EGL can be used from multiple threads at the same time if each
// thread has its own EGLContext, but in practice it deadlocks on some devices when doing this.
// Therefore, synchronize on this global lock before calling dangerous EGL functions that might
// deadlock. See https://bugs.chromium.org/p/webrtc/issues/detail?id=5702 for more info.
public static final Object lock = new Object();
// These constants are taken from EGL14.EGL_OPENGL_ES2_BIT and EGL14.EGL_CONTEXT_CLIENT_VERSION.
// https://android.googlesource.com/platform/frameworks/base/+/master/opengl/java/android/opengl/EGL14.java
// This is similar to how GlSurfaceView does:
// http://grepcode.com/file/repository.grepcode.com/java/ext/com.google.android/android/5.1.1_r1/android/opengl/GLSurfaceView.java#760
private static final int EGL_OPENGL_ES2_BIT = 4;
// Android-specific extension.
private static final int EGL_RECORDABLE_ANDROID = 0x3142;
public static final int[] CONFIG_PLAIN = {
EGL10.EGL_RED_SIZE, 8,
EGL10.EGL_GREEN_SIZE, 8,
EGL10.EGL_BLUE_SIZE, 8,
EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
EGL10.EGL_NONE
};
public static final int[] CONFIG_RGBA = {
EGL10.EGL_RED_SIZE, 8,
EGL10.EGL_GREEN_SIZE, 8,
EGL10.EGL_BLUE_SIZE, 8,
EGL10.EGL_ALPHA_SIZE, 8,
EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
EGL10.EGL_NONE
};
public static final int[] CONFIG_PIXEL_BUFFER = {
EGL10.EGL_RED_SIZE, 8,
EGL10.EGL_GREEN_SIZE, 8,
EGL10.EGL_BLUE_SIZE, 8,
EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
EGL10.EGL_SURFACE_TYPE, EGL10.EGL_PBUFFER_BIT,
EGL10.EGL_NONE
};
public static final int[] CONFIG_PIXEL_RGBA_BUFFER = {
EGL10.EGL_RED_SIZE, 8,
EGL10.EGL_GREEN_SIZE, 8,
EGL10.EGL_BLUE_SIZE, 8,
EGL10.EGL_ALPHA_SIZE, 8,
EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
EGL10.EGL_SURFACE_TYPE, EGL10.EGL_PBUFFER_BIT,
EGL10.EGL_NONE
};
public static final int[] CONFIG_RECORDABLE = {
EGL10.EGL_RED_SIZE, 8,
EGL10.EGL_GREEN_SIZE, 8,
EGL10.EGL_BLUE_SIZE, 8,
EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
EGL_RECORDABLE_ANDROID, 1,
EGL10.EGL_NONE
};
// Create a new context with the specified config attributes, sharing data with sharedContext.
// |sharedContext| can be null.
public static EglBase create(Context sharedContext, int[] configAttributes) {
return (EglBase14.isEGL14Supported()
&& (sharedContext == null || sharedContext instanceof EglBase14.Context))
? new EglBase14((EglBase14.Context) sharedContext, configAttributes)
: new EglBase10((EglBase10.Context) sharedContext, configAttributes);
}
public static EglBase create() {
return create(null, CONFIG_PLAIN);
}
public static EglBase create(Context sharedContext) {
return create(sharedContext, CONFIG_PLAIN);
}
public abstract void createSurface(Surface surface);
// Create EGLSurface from the Android SurfaceTexture.
public abstract void createSurface(SurfaceTexture surfaceTexture);
// Create dummy 1x1 pixel buffer surface so the context can be made current.
public abstract void createDummyPbufferSurface();
public abstract void createPbufferSurface(int width, int height);
public abstract Context getEglBaseContext();
public abstract boolean hasSurface();
public abstract int surfaceWidth();
public abstract int surfaceHeight();
public abstract void releaseSurface();
public abstract void release();
public abstract void makeCurrent();
// Detach the current EGL context, so that it can be made current on another thread.
public abstract void detachCurrent();
public abstract void swapBuffers();
}

View File

@ -1,301 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.graphics.Canvas;
import android.graphics.SurfaceTexture;
import android.graphics.Rect;
import android.view.Surface;
import android.view.SurfaceHolder;
import javax.microedition.khronos.egl.EGL10;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.egl.EGLContext;
import javax.microedition.khronos.egl.EGLDisplay;
import javax.microedition.khronos.egl.EGLSurface;
/**
* Holds EGL state and utility methods for handling an egl 1.0 EGLContext, an EGLDisplay,
* and an EGLSurface.
*/
final class EglBase10 extends EglBase {
// This constant is taken from EGL14.EGL_CONTEXT_CLIENT_VERSION.
private static final int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
private final EGL10 egl;
private EGLContext eglContext;
private EGLConfig eglConfig;
private EGLDisplay eglDisplay;
private EGLSurface eglSurface = EGL10.EGL_NO_SURFACE;
// EGL wrapper for an actual EGLContext.
public static class Context extends EglBase.Context {
private final EGLContext eglContext;
public Context(EGLContext eglContext) {
this.eglContext = eglContext;
}
}
// Create a new context with the specified config type, sharing data with sharedContext.
EglBase10(Context sharedContext, int[] configAttributes) {
this.egl = (EGL10) EGLContext.getEGL();
eglDisplay = getEglDisplay();
eglConfig = getEglConfig(eglDisplay, configAttributes);
eglContext = createEglContext(sharedContext, eglDisplay, eglConfig);
}
@Override
public void createSurface(Surface surface) {
/**
* We have to wrap Surface in a SurfaceHolder because for some reason eglCreateWindowSurface
* couldn't actually take a Surface object until API 17. Older versions fortunately just call
* SurfaceHolder.getSurface(), so we'll do that. No other methods are relevant.
*/
class FakeSurfaceHolder implements SurfaceHolder {
private final Surface surface;
FakeSurfaceHolder(Surface surface) {
this.surface = surface;
}
@Override
public void addCallback(Callback callback) {}
@Override
public void removeCallback(Callback callback) {}
@Override
public boolean isCreating() {
return false;
}
@Deprecated
@Override
public void setType(int i) {}
@Override
public void setFixedSize(int i, int i2) {}
@Override
public void setSizeFromLayout() {}
@Override
public void setFormat(int i) {}
@Override
public void setKeepScreenOn(boolean b) {}
@Override
public Canvas lockCanvas() {
return null;
}
@Override
public Canvas lockCanvas(Rect rect) {
return null;
}
@Override
public void unlockCanvasAndPost(Canvas canvas) {}
@Override
public Rect getSurfaceFrame() {
return null;
}
@Override
public Surface getSurface() {
return surface;
}
}
createSurfaceInternal(new FakeSurfaceHolder(surface));
}
// Create EGLSurface from the Android SurfaceTexture.
@Override
public void createSurface(SurfaceTexture surfaceTexture) {
createSurfaceInternal(surfaceTexture);
}
// Create EGLSurface from either a SurfaceHolder or a SurfaceTexture.
private void createSurfaceInternal(Object nativeWindow) {
if (!(nativeWindow instanceof SurfaceHolder) && !(nativeWindow instanceof SurfaceTexture)) {
throw new IllegalStateException("Input must be either a SurfaceHolder or SurfaceTexture");
}
checkIsNotReleased();
if (eglSurface != EGL10.EGL_NO_SURFACE) {
throw new RuntimeException("Already has an EGLSurface");
}
int[] surfaceAttribs = {EGL10.EGL_NONE};
eglSurface = egl.eglCreateWindowSurface(eglDisplay, eglConfig, nativeWindow, surfaceAttribs);
if (eglSurface == EGL10.EGL_NO_SURFACE) {
throw new RuntimeException("Failed to create window surface");
}
}
// Create dummy 1x1 pixel buffer surface so the context can be made current.
@Override
public void createDummyPbufferSurface() {
createPbufferSurface(1, 1);
}
@Override
public void createPbufferSurface(int width, int height) {
checkIsNotReleased();
if (eglSurface != EGL10.EGL_NO_SURFACE) {
throw new RuntimeException("Already has an EGLSurface");
}
int[] surfaceAttribs = {EGL10.EGL_WIDTH, width, EGL10.EGL_HEIGHT, height, EGL10.EGL_NONE};
eglSurface = egl.eglCreatePbufferSurface(eglDisplay, eglConfig, surfaceAttribs);
if (eglSurface == EGL10.EGL_NO_SURFACE) {
throw new RuntimeException(
"Failed to create pixel buffer surface with size: " + width + "x" + height);
}
}
@Override
public org.webrtc.EglBase.Context getEglBaseContext() {
return new EglBase10.Context(eglContext);
}
@Override
public boolean hasSurface() {
return eglSurface != EGL10.EGL_NO_SURFACE;
}
@Override
public int surfaceWidth() {
final int widthArray[] = new int[1];
egl.eglQuerySurface(eglDisplay, eglSurface, EGL10.EGL_WIDTH, widthArray);
return widthArray[0];
}
@Override
public int surfaceHeight() {
final int heightArray[] = new int[1];
egl.eglQuerySurface(eglDisplay, eglSurface, EGL10.EGL_HEIGHT, heightArray);
return heightArray[0];
}
@Override
public void releaseSurface() {
if (eglSurface != EGL10.EGL_NO_SURFACE) {
egl.eglDestroySurface(eglDisplay, eglSurface);
eglSurface = EGL10.EGL_NO_SURFACE;
}
}
private void checkIsNotReleased() {
if (eglDisplay == EGL10.EGL_NO_DISPLAY || eglContext == EGL10.EGL_NO_CONTEXT
|| eglConfig == null) {
throw new RuntimeException("This object has been released");
}
}
@Override
public void release() {
checkIsNotReleased();
releaseSurface();
detachCurrent();
egl.eglDestroyContext(eglDisplay, eglContext);
egl.eglTerminate(eglDisplay);
eglContext = EGL10.EGL_NO_CONTEXT;
eglDisplay = EGL10.EGL_NO_DISPLAY;
eglConfig = null;
}
@Override
public void makeCurrent() {
checkIsNotReleased();
if (eglSurface == EGL10.EGL_NO_SURFACE) {
throw new RuntimeException("No EGLSurface - can't make current");
}
synchronized (EglBase.lock) {
if (!egl.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
throw new RuntimeException("eglMakeCurrent failed");
}
}
}
// Detach the current EGL context, so that it can be made current on another thread.
@Override
public void detachCurrent() {
synchronized (EglBase.lock) {
if (!egl.eglMakeCurrent(
eglDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT)) {
throw new RuntimeException("eglDetachCurrent failed");
}
}
}
@Override
public void swapBuffers() {
checkIsNotReleased();
if (eglSurface == EGL10.EGL_NO_SURFACE) {
throw new RuntimeException("No EGLSurface - can't swap buffers");
}
synchronized (EglBase.lock) {
egl.eglSwapBuffers(eglDisplay, eglSurface);
}
}
// Return an EGLDisplay, or die trying.
private EGLDisplay getEglDisplay() {
EGLDisplay eglDisplay = egl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
if (eglDisplay == EGL10.EGL_NO_DISPLAY) {
throw new RuntimeException("Unable to get EGL10 display");
}
int[] version = new int[2];
if (!egl.eglInitialize(eglDisplay, version)) {
throw new RuntimeException("Unable to initialize EGL10");
}
return eglDisplay;
}
// Return an EGLConfig, or die trying.
private EGLConfig getEglConfig(EGLDisplay eglDisplay, int[] configAttributes) {
EGLConfig[] configs = new EGLConfig[1];
int[] numConfigs = new int[1];
if (!egl.eglChooseConfig(
eglDisplay, configAttributes, configs, configs.length, numConfigs)) {
throw new RuntimeException("eglChooseConfig failed");
}
if (numConfigs[0] <= 0) {
throw new RuntimeException("Unable to find any matching EGL config");
}
final EGLConfig eglConfig = configs[0];
if (eglConfig == null) {
throw new RuntimeException("eglChooseConfig returned null");
}
return eglConfig;
}
// Return an EGLConfig, or die trying.
private EGLContext createEglContext(
Context sharedContext, EGLDisplay eglDisplay, EGLConfig eglConfig) {
if (sharedContext != null && sharedContext.eglContext == EGL10.EGL_NO_CONTEXT) {
throw new RuntimeException("Invalid sharedContext");
}
int[] contextAttributes = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE};
EGLContext rootContext =
sharedContext == null ? EGL10.EGL_NO_CONTEXT : sharedContext.eglContext;
final EGLContext eglContext;
synchronized (EglBase.lock) {
eglContext = egl.eglCreateContext(eglDisplay, eglConfig, rootContext, contextAttributes);
}
if (eglContext == EGL10.EGL_NO_CONTEXT) {
throw new RuntimeException("Failed to create EGL context");
}
return eglContext;
}
}

View File

@ -1,258 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.annotation.TargetApi;
import android.graphics.SurfaceTexture;
import android.opengl.EGL14;
import android.opengl.EGLConfig;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.EGLExt;
import android.opengl.EGLSurface;
import android.view.Surface;
import org.webrtc.Logging;
/**
* Holds EGL state and utility methods for handling an EGL14 EGLContext, an EGLDisplay,
* and an EGLSurface.
*/
@TargetApi(18)
public final class EglBase14 extends EglBase {
private static final String TAG = "EglBase14";
private static final int EGLExt_SDK_VERSION = android.os.Build.VERSION_CODES.JELLY_BEAN_MR2;
private static final int CURRENT_SDK_VERSION = android.os.Build.VERSION.SDK_INT;
private EGLContext eglContext;
private EGLConfig eglConfig;
private EGLDisplay eglDisplay;
private EGLSurface eglSurface = EGL14.EGL_NO_SURFACE;
// EGL 1.4 is supported from API 17. But EGLExt that is used for setting presentation
// time stamp on a surface is supported from 18 so we require 18.
public static boolean isEGL14Supported() {
Logging.d(TAG, "SDK version: " + CURRENT_SDK_VERSION
+ ". isEGL14Supported: " + (CURRENT_SDK_VERSION >= EGLExt_SDK_VERSION));
return (CURRENT_SDK_VERSION >= EGLExt_SDK_VERSION);
}
public static class Context extends EglBase.Context {
private final android.opengl.EGLContext egl14Context;
public Context(android.opengl.EGLContext eglContext) {
this.egl14Context = eglContext;
}
}
// Create a new context with the specified config type, sharing data with sharedContext.
// |sharedContext| may be null.
public EglBase14(EglBase14.Context sharedContext, int[] configAttributes) {
eglDisplay = getEglDisplay();
eglConfig = getEglConfig(eglDisplay, configAttributes);
eglContext = createEglContext(sharedContext, eglDisplay, eglConfig);
}
// Create EGLSurface from the Android Surface.
@Override
public void createSurface(Surface surface) {
createSurfaceInternal(surface);
}
// Create EGLSurface from the Android SurfaceTexture.
@Override
public void createSurface(SurfaceTexture surfaceTexture) {
createSurfaceInternal(surfaceTexture);
}
// Create EGLSurface from either Surface or SurfaceTexture.
private void createSurfaceInternal(Object surface) {
if (!(surface instanceof Surface) && !(surface instanceof SurfaceTexture)) {
throw new IllegalStateException("Input must be either a Surface or SurfaceTexture");
}
checkIsNotReleased();
if (eglSurface != EGL14.EGL_NO_SURFACE) {
throw new RuntimeException("Already has an EGLSurface");
}
int[] surfaceAttribs = {EGL14.EGL_NONE};
eglSurface = EGL14.eglCreateWindowSurface(eglDisplay, eglConfig, surface, surfaceAttribs, 0);
if (eglSurface == EGL14.EGL_NO_SURFACE) {
throw new RuntimeException("Failed to create window surface");
}
}
@Override
public void createDummyPbufferSurface() {
createPbufferSurface(1, 1);
}
@Override
public void createPbufferSurface(int width, int height) {
checkIsNotReleased();
if (eglSurface != EGL14.EGL_NO_SURFACE) {
throw new RuntimeException("Already has an EGLSurface");
}
int[] surfaceAttribs = {EGL14.EGL_WIDTH, width, EGL14.EGL_HEIGHT, height, EGL14.EGL_NONE};
eglSurface = EGL14.eglCreatePbufferSurface(eglDisplay, eglConfig, surfaceAttribs, 0);
if (eglSurface == EGL14.EGL_NO_SURFACE) {
throw new RuntimeException(
"Failed to create pixel buffer surface with size: " + width + "x" + height);
}
}
@Override
public Context getEglBaseContext() {
return new EglBase14.Context(eglContext);
}
@Override
public boolean hasSurface() {
return eglSurface != EGL14.EGL_NO_SURFACE;
}
@Override
public int surfaceWidth() {
final int widthArray[] = new int[1];
EGL14.eglQuerySurface(eglDisplay, eglSurface, EGL14.EGL_WIDTH, widthArray, 0);
return widthArray[0];
}
@Override
public int surfaceHeight() {
final int heightArray[] = new int[1];
EGL14.eglQuerySurface(eglDisplay, eglSurface, EGL14.EGL_HEIGHT, heightArray, 0);
return heightArray[0];
}
@Override
public void releaseSurface() {
if (eglSurface != EGL14.EGL_NO_SURFACE) {
EGL14.eglDestroySurface(eglDisplay, eglSurface);
eglSurface = EGL14.EGL_NO_SURFACE;
}
}
private void checkIsNotReleased() {
if (eglDisplay == EGL14.EGL_NO_DISPLAY || eglContext == EGL14.EGL_NO_CONTEXT
|| eglConfig == null) {
throw new RuntimeException("This object has been released");
}
}
@Override
public void release() {
checkIsNotReleased();
releaseSurface();
detachCurrent();
EGL14.eglDestroyContext(eglDisplay, eglContext);
EGL14.eglReleaseThread();
EGL14.eglTerminate(eglDisplay);
eglContext = EGL14.EGL_NO_CONTEXT;
eglDisplay = EGL14.EGL_NO_DISPLAY;
eglConfig = null;
}
@Override
public void makeCurrent() {
checkIsNotReleased();
if (eglSurface == EGL14.EGL_NO_SURFACE) {
throw new RuntimeException("No EGLSurface - can't make current");
}
synchronized (EglBase.lock) {
if (!EGL14.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
throw new RuntimeException("eglMakeCurrent failed");
}
}
}
// Detach the current EGL context, so that it can be made current on another thread.
@Override
public void detachCurrent() {
synchronized (EglBase.lock) {
if (!EGL14.eglMakeCurrent(
eglDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT)) {
throw new RuntimeException("eglDetachCurrent failed");
}
}
}
@Override
public void swapBuffers() {
checkIsNotReleased();
if (eglSurface == EGL14.EGL_NO_SURFACE) {
throw new RuntimeException("No EGLSurface - can't swap buffers");
}
synchronized (EglBase.lock) {
EGL14.eglSwapBuffers(eglDisplay, eglSurface);
}
}
public void swapBuffers(long timeStampNs) {
checkIsNotReleased();
if (eglSurface == EGL14.EGL_NO_SURFACE) {
throw new RuntimeException("No EGLSurface - can't swap buffers");
}
synchronized (EglBase.lock) {
// See https://android.googlesource.com/platform/frameworks/native/+/tools_r22.2/opengl/specs/EGL_ANDROID_presentation_time.txt
EGLExt.eglPresentationTimeANDROID(eglDisplay, eglSurface, timeStampNs);
EGL14.eglSwapBuffers(eglDisplay, eglSurface);
}
}
// Return an EGLDisplay, or die trying.
private static EGLDisplay getEglDisplay() {
EGLDisplay eglDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
if (eglDisplay == EGL14.EGL_NO_DISPLAY) {
throw new RuntimeException("Unable to get EGL14 display");
}
int[] version = new int[2];
if (!EGL14.eglInitialize(eglDisplay, version, 0, version, 1)) {
throw new RuntimeException("Unable to initialize EGL14");
}
return eglDisplay;
}
// Return an EGLConfig, or die trying.
private static EGLConfig getEglConfig(EGLDisplay eglDisplay, int[] configAttributes) {
EGLConfig[] configs = new EGLConfig[1];
int[] numConfigs = new int[1];
if (!EGL14.eglChooseConfig(
eglDisplay, configAttributes, 0, configs, 0, configs.length, numConfigs, 0)) {
throw new RuntimeException("eglChooseConfig failed");
}
if (numConfigs[0] <= 0) {
throw new RuntimeException("Unable to find any matching EGL config");
}
final EGLConfig eglConfig = configs[0];
if (eglConfig == null) {
throw new RuntimeException("eglChooseConfig returned null");
}
return eglConfig;
}
// Return an EGLConfig, or die trying.
private static EGLContext createEglContext(
EglBase14.Context sharedContext, EGLDisplay eglDisplay, EGLConfig eglConfig) {
if (sharedContext != null && sharedContext.egl14Context == EGL14.EGL_NO_CONTEXT) {
throw new RuntimeException("Invalid sharedContext");
}
int[] contextAttributes = {EGL14.EGL_CONTEXT_CLIENT_VERSION, 2, EGL14.EGL_NONE};
EGLContext rootContext =
sharedContext == null ? EGL14.EGL_NO_CONTEXT : sharedContext.egl14Context;
final EGLContext eglContext;
synchronized (EglBase.lock) {
eglContext = EGL14.eglCreateContext(eglDisplay, eglConfig, rootContext, contextAttributes, 0);
}
if (eglContext == EGL14.EGL_NO_CONTEXT) {
throw new RuntimeException("Failed to create EGL context");
}
return eglContext;
}
}

View File

@ -1,216 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import org.webrtc.GlShader;
import org.webrtc.GlUtil;
import java.nio.ByteBuffer;
import java.nio.FloatBuffer;
import java.util.Arrays;
import java.util.IdentityHashMap;
import java.util.Map;
/**
* Helper class to draw an opaque quad on the target viewport location. Rotation, mirror, and
* cropping is specified using a 4x4 texture coordinate transform matrix. The frame input can either
* be an OES texture or YUV textures in I420 format. The GL state must be preserved between draw
* calls, this is intentional to maximize performance. The function release() must be called
* manually to free the resources held by this object.
*/
public class GlRectDrawer implements RendererCommon.GlDrawer {
// Simple vertex shader, used for both YUV and OES.
private static final String VERTEX_SHADER_STRING =
"varying vec2 interp_tc;\n"
+ "attribute vec4 in_pos;\n"
+ "attribute vec4 in_tc;\n"
+ "\n"
+ "uniform mat4 texMatrix;\n"
+ "\n"
+ "void main() {\n"
+ " gl_Position = in_pos;\n"
+ " interp_tc = (texMatrix * in_tc).xy;\n"
+ "}\n";
private static final String YUV_FRAGMENT_SHADER_STRING =
"precision mediump float;\n"
+ "varying vec2 interp_tc;\n"
+ "\n"
+ "uniform sampler2D y_tex;\n"
+ "uniform sampler2D u_tex;\n"
+ "uniform sampler2D v_tex;\n"
+ "\n"
+ "void main() {\n"
// CSC according to http://www.fourcc.org/fccyvrgb.php
+ " float y = texture2D(y_tex, interp_tc).r;\n"
+ " float u = texture2D(u_tex, interp_tc).r - 0.5;\n"
+ " float v = texture2D(v_tex, interp_tc).r - 0.5;\n"
+ " gl_FragColor = vec4(y + 1.403 * v, "
+ " y - 0.344 * u - 0.714 * v, "
+ " y + 1.77 * u, 1);\n"
+ "}\n";
private static final String RGB_FRAGMENT_SHADER_STRING =
"precision mediump float;\n"
+ "varying vec2 interp_tc;\n"
+ "\n"
+ "uniform sampler2D rgb_tex;\n"
+ "\n"
+ "void main() {\n"
+ " gl_FragColor = texture2D(rgb_tex, interp_tc);\n"
+ "}\n";
private static final String OES_FRAGMENT_SHADER_STRING =
"#extension GL_OES_EGL_image_external : require\n"
+ "precision mediump float;\n"
+ "varying vec2 interp_tc;\n"
+ "\n"
+ "uniform samplerExternalOES oes_tex;\n"
+ "\n"
+ "void main() {\n"
+ " gl_FragColor = texture2D(oes_tex, interp_tc);\n"
+ "}\n";
// Vertex coordinates in Normalized Device Coordinates, i.e. (-1, -1) is bottom-left and (1, 1) is
// top-right.
private static final FloatBuffer FULL_RECTANGLE_BUF =
GlUtil.createFloatBuffer(new float[] {
-1.0f, -1.0f, // Bottom left.
1.0f, -1.0f, // Bottom right.
-1.0f, 1.0f, // Top left.
1.0f, 1.0f, // Top right.
});
// Texture coordinates - (0, 0) is bottom-left and (1, 1) is top-right.
private static final FloatBuffer FULL_RECTANGLE_TEX_BUF =
GlUtil.createFloatBuffer(new float[] {
0.0f, 0.0f, // Bottom left.
1.0f, 0.0f, // Bottom right.
0.0f, 1.0f, // Top left.
1.0f, 1.0f // Top right.
});
private static class Shader {
public final GlShader glShader;
public final int texMatrixLocation;
public Shader(String fragmentShader) {
this.glShader = new GlShader(VERTEX_SHADER_STRING, fragmentShader);
this.texMatrixLocation = glShader.getUniformLocation("texMatrix");
}
}
// The keys are one of the fragments shaders above.
private final Map<String, Shader> shaders = new IdentityHashMap<String, Shader>();
/**
* Draw an OES texture frame with specified texture transformation matrix. Required resources are
* allocated at the first call to this function.
*/
@Override
public void drawOes(int oesTextureId, float[] texMatrix, int frameWidth, int frameHeight,
int viewportX, int viewportY, int viewportWidth, int viewportHeight) {
prepareShader(OES_FRAGMENT_SHADER_STRING, texMatrix);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
// updateTexImage() may be called from another thread in another EGL context, so we need to
// bind/unbind the texture in each draw call so that GLES understads it's a new texture.
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, oesTextureId);
drawRectangle(viewportX, viewportY, viewportWidth, viewportHeight);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
}
/**
* Draw a RGB(A) texture frame with specified texture transformation matrix. Required resources
* are allocated at the first call to this function.
*/
@Override
public void drawRgb(int textureId, float[] texMatrix, int frameWidth, int frameHeight,
int viewportX, int viewportY, int viewportWidth, int viewportHeight) {
prepareShader(RGB_FRAGMENT_SHADER_STRING, texMatrix);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
drawRectangle(viewportX, viewportY, viewportWidth, viewportHeight);
// Unbind the texture as a precaution.
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
}
/**
* Draw a YUV frame with specified texture transformation matrix. Required resources are
* allocated at the first call to this function.
*/
@Override
public void drawYuv(int[] yuvTextures, float[] texMatrix, int frameWidth, int frameHeight,
int viewportX, int viewportY, int viewportWidth, int viewportHeight) {
prepareShader(YUV_FRAGMENT_SHADER_STRING, texMatrix);
// Bind the textures.
for (int i = 0; i < 3; ++i) {
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
}
drawRectangle(viewportX, viewportY, viewportWidth, viewportHeight);
// Unbind the textures as a precaution..
for (int i = 0; i < 3; ++i) {
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
}
}
private void drawRectangle(int x, int y, int width, int height) {
// Draw quad.
GLES20.glViewport(x, y, width, height);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
}
private void prepareShader(String fragmentShader, float[] texMatrix) {
final Shader shader;
if (shaders.containsKey(fragmentShader)) {
shader = shaders.get(fragmentShader);
} else {
// Lazy allocation.
shader = new Shader(fragmentShader);
shaders.put(fragmentShader, shader);
shader.glShader.useProgram();
// Initialize fragment shader uniform values.
if (fragmentShader == YUV_FRAGMENT_SHADER_STRING) {
GLES20.glUniform1i(shader.glShader.getUniformLocation("y_tex"), 0);
GLES20.glUniform1i(shader.glShader.getUniformLocation("u_tex"), 1);
GLES20.glUniform1i(shader.glShader.getUniformLocation("v_tex"), 2);
} else if (fragmentShader == RGB_FRAGMENT_SHADER_STRING) {
GLES20.glUniform1i(shader.glShader.getUniformLocation("rgb_tex"), 0);
} else if (fragmentShader == OES_FRAGMENT_SHADER_STRING) {
GLES20.glUniform1i(shader.glShader.getUniformLocation("oes_tex"), 0);
} else {
throw new IllegalStateException("Unknown fragment shader: " + fragmentShader);
}
GlUtil.checkNoGLES2Error("Initialize fragment shader uniform values.");
// Initialize vertex shader attributes.
shader.glShader.setVertexAttribArray("in_pos", 2, FULL_RECTANGLE_BUF);
shader.glShader.setVertexAttribArray("in_tc", 2, FULL_RECTANGLE_TEX_BUF);
}
shader.glShader.useProgram();
// Copy the texture transformation matrix over.
GLES20.glUniformMatrix4fv(shader.texMatrixLocation, 1, false, texMatrix, 0);
}
/**
* Release all GLES resources. This needs to be done manually, otherwise the resources are leaked.
*/
@Override
public void release() {
for (Shader shader : shaders.values()) {
shader.glShader.release();
}
shaders.clear();
}
}

View File

@ -1,128 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.opengl.GLES20;
import org.webrtc.Logging;
import java.nio.FloatBuffer;
// Helper class for handling OpenGL shaders and shader programs.
public class GlShader {
private static final String TAG = "GlShader";
private static int compileShader(int shaderType, String source) {
final int shader = GLES20.glCreateShader(shaderType);
if (shader == 0) {
throw new RuntimeException("glCreateShader() failed. GLES20 error: " + GLES20.glGetError());
}
GLES20.glShaderSource(shader, source);
GLES20.glCompileShader(shader);
int[] compileStatus = new int[] {
GLES20.GL_FALSE
};
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
if (compileStatus[0] != GLES20.GL_TRUE) {
Logging.e(TAG, "Could not compile shader " + shaderType + ":" +
GLES20.glGetShaderInfoLog(shader));
throw new RuntimeException(GLES20.glGetShaderInfoLog(shader));
}
GlUtil.checkNoGLES2Error("compileShader");
return shader;
}
private int program;
public GlShader(String vertexSource, String fragmentSource) {
final int vertexShader = compileShader(GLES20.GL_VERTEX_SHADER, vertexSource);
final int fragmentShader = compileShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
program = GLES20.glCreateProgram();
if (program == 0) {
throw new RuntimeException("glCreateProgram() failed. GLES20 error: " + GLES20.glGetError());
}
GLES20.glAttachShader(program, vertexShader);
GLES20.glAttachShader(program, fragmentShader);
GLES20.glLinkProgram(program);
int[] linkStatus = new int[] {
GLES20.GL_FALSE
};
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
if (linkStatus[0] != GLES20.GL_TRUE) {
Logging.e(TAG, "Could not link program: " +
GLES20.glGetProgramInfoLog(program));
throw new RuntimeException(GLES20.glGetProgramInfoLog(program));
}
// According to the documentation of glLinkProgram():
// "After the link operation, applications are free to modify attached shader objects, compile
// attached shader objects, detach shader objects, delete shader objects, and attach additional
// shader objects. None of these operations affects the information log or the program that is
// part of the program object."
// But in practice, detaching shaders from the program seems to break some devices. Deleting the
// shaders are fine however - it will delete them when they are no longer attached to a program.
GLES20.glDeleteShader(vertexShader);
GLES20.glDeleteShader(fragmentShader);
GlUtil.checkNoGLES2Error("Creating GlShader");
}
public int getAttribLocation(String label) {
if (program == -1) {
throw new RuntimeException("The program has been released");
}
int location = GLES20.glGetAttribLocation(program, label);
if (location < 0) {
throw new RuntimeException("Could not locate '" + label + "' in program");
}
return location;
}
/**
* Enable and upload a vertex array for attribute |label|. The vertex data is specified in
* |buffer| with |dimension| number of components per vertex.
*/
public void setVertexAttribArray(String label, int dimension, FloatBuffer buffer) {
if (program == -1) {
throw new RuntimeException("The program has been released");
}
int location = getAttribLocation(label);
GLES20.glEnableVertexAttribArray(location);
GLES20.glVertexAttribPointer(location, dimension, GLES20.GL_FLOAT, false, 0, buffer);
GlUtil.checkNoGLES2Error("setVertexAttribArray");
}
public int getUniformLocation(String label) {
if (program == -1) {
throw new RuntimeException("The program has been released");
}
int location = GLES20.glGetUniformLocation(program, label);
if (location < 0) {
throw new RuntimeException("Could not locate uniform '" + label + "' in program");
}
return location;
}
public void useProgram() {
if (program == -1) {
throw new RuntimeException("The program has been released");
}
GLES20.glUseProgram(program);
GlUtil.checkNoGLES2Error("glUseProgram");
}
public void release() {
Logging.d(TAG, "Deleting shader.");
// Delete program, automatically detaching any shaders from it.
if (program != -1) {
GLES20.glDeleteProgram(program);
program = -1;
}
}
}

View File

@ -1,125 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.opengl.GLES20;
/**
* Helper class for handling OpenGL framebuffer with only color attachment and no depth or stencil
* buffer. Intended for simple tasks such as texture copy, texture downscaling, and texture color
* conversion.
*/
// TODO(magjed): Add unittests for this class.
public class GlTextureFrameBuffer {
private final int frameBufferId;
private final int textureId;
private final int pixelFormat;
private int width;
private int height;
/**
* Generate texture and framebuffer resources. An EGLContext must be bound on the current thread
* when calling this function. The framebuffer is not complete until setSize() is called.
*/
public GlTextureFrameBuffer(int pixelFormat) {
switch (pixelFormat) {
case GLES20.GL_LUMINANCE:
case GLES20.GL_RGB:
case GLES20.GL_RGBA:
this.pixelFormat = pixelFormat;
break;
default:
throw new IllegalArgumentException("Invalid pixel format: " + pixelFormat);
}
textureId = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
this.width = 0;
this.height = 0;
// Create framebuffer object and bind it.
final int frameBuffers[] = new int[1];
GLES20.glGenFramebuffers(1, frameBuffers, 0);
frameBufferId = frameBuffers[0];
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBufferId);
GlUtil.checkNoGLES2Error("Generate framebuffer");
// Attach the texture to the framebuffer as color attachment.
GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0,
GLES20.GL_TEXTURE_2D, textureId, 0);
GlUtil.checkNoGLES2Error("Attach texture to framebuffer");
// Restore normal framebuffer.
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
}
/**
* (Re)allocate texture. Will do nothing if the requested size equals the current size. An
* EGLContext must be bound on the current thread when calling this function. Must be called at
* least once before using the framebuffer. May be called multiple times to change size.
*/
public void setSize(int width, int height) {
if (width == 0 || height == 0) {
throw new IllegalArgumentException("Invalid size: " + width + "x" + height);
}
if (width == this.width && height == this.height) {
return;
}
this.width = width;
this.height = height;
// Bind our framebuffer.
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBufferId);
GlUtil.checkNoGLES2Error("glBindFramebuffer");
// Allocate texture.
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, pixelFormat, width, height, 0, pixelFormat,
GLES20.GL_UNSIGNED_BYTE, null);
// Check that the framebuffer is in a good state.
final int status = GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER);
if (status != GLES20.GL_FRAMEBUFFER_COMPLETE) {
throw new IllegalStateException("Framebuffer not complete, status: " + status);
}
// Restore normal framebuffer.
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
}
public int getWidth() {
return width;
}
public int getHeight() {
return height;
}
public int getFrameBufferId() {
return frameBufferId;
}
public int getTextureId() {
return textureId;
}
/**
* Release texture and framebuffer. An EGLContext must be bound on the current thread when calling
* this function. This object should not be used after this call.
*/
public void release() {
GLES20.glDeleteTextures(1, new int[] {textureId}, 0);
GLES20.glDeleteFramebuffers(1, new int[] {frameBufferId}, 0);
width = 0;
height = 0;
}
}

View File

@ -1,58 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.opengl.GLES20;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
/**
* Some OpenGL static utility functions.
*/
public class GlUtil {
private GlUtil() {}
// Assert that no OpenGL ES 2.0 error has been raised.
public static void checkNoGLES2Error(String msg) {
int error = GLES20.glGetError();
if (error != GLES20.GL_NO_ERROR) {
throw new RuntimeException(msg + ": GLES20 error: " + error);
}
}
public static FloatBuffer createFloatBuffer(float[] coords) {
// Allocate a direct ByteBuffer, using 4 bytes per float, and copy coords into it.
ByteBuffer bb = ByteBuffer.allocateDirect(coords.length * 4);
bb.order(ByteOrder.nativeOrder());
FloatBuffer fb = bb.asFloatBuffer();
fb.put(coords);
fb.position(0);
return fb;
}
/**
* Generate texture with standard parameters.
*/
public static int generateTexture(int target) {
final int textureArray[] = new int[1];
GLES20.glGenTextures(1, textureArray, 0);
final int textureId = textureArray[0];
GLES20.glBindTexture(target, textureId);
GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
checkNoGLES2Error("generateTexture");
return textureId;
}
}

View File

@ -1,31 +0,0 @@
/*
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/**
* Representation of a single ICE Candidate, mirroring
* {@code IceCandidateInterface} in the C++ API.
*/
public class IceCandidate {
public final String sdpMid;
public final int sdpMLineIndex;
public final String sdp;
public IceCandidate(String sdpMid, int sdpMLineIndex, String sdp) {
this.sdpMid = sdpMid;
this.sdpMLineIndex = sdpMLineIndex;
this.sdp = sdp;
}
public String toString() {
return sdpMid + ":" + sdpMLineIndex + ":" + sdp;
}
}

View File

@ -1,714 +0,0 @@
/*
* Copyright 2014 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecInfo.CodecCapabilities;
import android.media.MediaCodecList;
import android.media.MediaFormat;
import android.os.Build;
import android.os.SystemClock;
import android.view.Surface;
import org.webrtc.Logging;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.Queue;
import java.util.concurrent.TimeUnit;
// Java-side of peerconnection_jni.cc:MediaCodecVideoDecoder.
// This class is an implementation detail of the Java PeerConnection API.
@SuppressWarnings("deprecation")
public class MediaCodecVideoDecoder {
// This class is constructed, operated, and destroyed by its C++ incarnation,
// so the class and its methods have non-public visibility. The API this
// class exposes aims to mimic the webrtc::VideoDecoder API as closely as
// possibly to minimize the amount of translation work necessary.
private static final String TAG = "MediaCodecVideoDecoder";
private static final long MAX_DECODE_TIME_MS = 200;
// Tracks webrtc::VideoCodecType.
public enum VideoCodecType {
VIDEO_CODEC_VP8,
VIDEO_CODEC_VP9,
VIDEO_CODEC_H264
}
// Timeout for input buffer dequeue.
private static final int DEQUEUE_INPUT_TIMEOUT = 500000;
// Timeout for codec releasing.
private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000;
// Max number of output buffers queued before starting to drop decoded frames.
private static final int MAX_QUEUED_OUTPUTBUFFERS = 3;
// Active running decoder instance. Set in initDecode() (called from native code)
// and reset to null in release() call.
private static MediaCodecVideoDecoder runningInstance = null;
private static MediaCodecVideoDecoderErrorCallback errorCallback = null;
private static int codecErrors = 0;
// List of disabled codec types - can be set from application.
private static Set<String> hwDecoderDisabledTypes = new HashSet<String>();
private Thread mediaCodecThread;
private MediaCodec mediaCodec;
private ByteBuffer[] inputBuffers;
private ByteBuffer[] outputBuffers;
private static final String VP8_MIME_TYPE = "video/x-vnd.on2.vp8";
private static final String VP9_MIME_TYPE = "video/x-vnd.on2.vp9";
private static final String H264_MIME_TYPE = "video/avc";
// List of supported HW VP8 decoders.
private static final String[] supportedVp8HwCodecPrefixes =
{"OMX.qcom.", "OMX.Nvidia.", "OMX.Exynos.", "OMX.Intel." };
// List of supported HW VP9 decoders.
private static final String[] supportedVp9HwCodecPrefixes =
{"OMX.qcom.", "OMX.Exynos." };
// List of supported HW H.264 decoders.
private static final String[] supportedH264HwCodecPrefixes =
{"OMX.qcom.", "OMX.Intel.", "OMX.Exynos." };
// NV12 color format supported by QCOM codec, but not declared in MediaCodec -
// see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h
private static final int
COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04;
// Allowable color formats supported by codec - in order of preference.
private static final List<Integer> supportedColorList = Arrays.asList(
CodecCapabilities.COLOR_FormatYUV420Planar,
CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m);
private int colorFormat;
private int width;
private int height;
private int stride;
private int sliceHeight;
private boolean hasDecodedFirstFrame;
private final Queue<TimeStamps> decodeStartTimeMs = new LinkedList<TimeStamps>();
private boolean useSurface;
// The below variables are only used when decoding to a Surface.
private TextureListener textureListener;
private int droppedFrames;
private Surface surface = null;
private final Queue<DecodedOutputBuffer>
dequeuedSurfaceOutputBuffers = new LinkedList<DecodedOutputBuffer>();
// MediaCodec error handler - invoked when critical error happens which may prevent
// further use of media codec API. Now it means that one of media codec instances
// is hanging and can no longer be used in the next call.
public static interface MediaCodecVideoDecoderErrorCallback {
void onMediaCodecVideoDecoderCriticalError(int codecErrors);
}
public static void setErrorCallback(MediaCodecVideoDecoderErrorCallback errorCallback) {
Logging.d(TAG, "Set error callback");
MediaCodecVideoDecoder.errorCallback = errorCallback;
}
// Functions to disable HW decoding - can be called from applications for platforms
// which have known HW decoding problems.
public static void disableVp8HwCodec() {
Logging.w(TAG, "VP8 decoding is disabled by application.");
hwDecoderDisabledTypes.add(VP8_MIME_TYPE);
}
public static void disableVp9HwCodec() {
Logging.w(TAG, "VP9 decoding is disabled by application.");
hwDecoderDisabledTypes.add(VP9_MIME_TYPE);
}
public static void disableH264HwCodec() {
Logging.w(TAG, "H.264 decoding is disabled by application.");
hwDecoderDisabledTypes.add(H264_MIME_TYPE);
}
// Functions to query if HW decoding is supported.
public static boolean isVp8HwSupported() {
return !hwDecoderDisabledTypes.contains(VP8_MIME_TYPE) &&
(findDecoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes) != null);
}
public static boolean isVp9HwSupported() {
return !hwDecoderDisabledTypes.contains(VP9_MIME_TYPE) &&
(findDecoder(VP9_MIME_TYPE, supportedVp9HwCodecPrefixes) != null);
}
public static boolean isH264HwSupported() {
return !hwDecoderDisabledTypes.contains(H264_MIME_TYPE) &&
(findDecoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes) != null);
}
public static void printStackTrace() {
if (runningInstance != null && runningInstance.mediaCodecThread != null) {
StackTraceElement[] mediaCodecStackTraces = runningInstance.mediaCodecThread.getStackTrace();
if (mediaCodecStackTraces.length > 0) {
Logging.d(TAG, "MediaCodecVideoDecoder stacks trace:");
for (StackTraceElement stackTrace : mediaCodecStackTraces) {
Logging.d(TAG, stackTrace.toString());
}
}
}
}
// Helper struct for findDecoder() below.
private static class DecoderProperties {
public DecoderProperties(String codecName, int colorFormat) {
this.codecName = codecName;
this.colorFormat = colorFormat;
}
public final String codecName; // OpenMax component name for VP8 codec.
public final int colorFormat; // Color format supported by codec.
}
private static DecoderProperties findDecoder(
String mime, String[] supportedCodecPrefixes) {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
return null; // MediaCodec.setParameters is missing.
}
Logging.d(TAG, "Trying to find HW decoder for mime " + mime);
for (int i = 0; i < MediaCodecList.getCodecCount(); ++i) {
MediaCodecInfo info = MediaCodecList.getCodecInfoAt(i);
if (info.isEncoder()) {
continue;
}
String name = null;
for (String mimeType : info.getSupportedTypes()) {
if (mimeType.equals(mime)) {
name = info.getName();
break;
}
}
if (name == null) {
continue; // No HW support in this codec; try the next one.
}
Logging.d(TAG, "Found candidate decoder " + name);
// Check if this is supported decoder.
boolean supportedCodec = false;
for (String codecPrefix : supportedCodecPrefixes) {
if (name.startsWith(codecPrefix)) {
supportedCodec = true;
break;
}
}
if (!supportedCodec) {
continue;
}
// Check if codec supports either yuv420 or nv12.
CodecCapabilities capabilities =
info.getCapabilitiesForType(mime);
for (int colorFormat : capabilities.colorFormats) {
Logging.v(TAG, " Color: 0x" + Integer.toHexString(colorFormat));
}
for (int supportedColorFormat : supportedColorList) {
for (int codecColorFormat : capabilities.colorFormats) {
if (codecColorFormat == supportedColorFormat) {
// Found supported HW decoder.
Logging.d(TAG, "Found target decoder " + name +
". Color: 0x" + Integer.toHexString(codecColorFormat));
return new DecoderProperties(name, codecColorFormat);
}
}
}
}
Logging.d(TAG, "No HW decoder found for mime " + mime);
return null; // No HW decoder.
}
private void checkOnMediaCodecThread() throws IllegalStateException {
if (mediaCodecThread.getId() != Thread.currentThread().getId()) {
throw new IllegalStateException(
"MediaCodecVideoDecoder previously operated on " + mediaCodecThread +
" but is now called on " + Thread.currentThread());
}
}
// Pass null in |surfaceTextureHelper| to configure the codec for ByteBuffer output.
private boolean initDecode(
VideoCodecType type, int width, int height,
SurfaceTextureHelper surfaceTextureHelper) {
if (mediaCodecThread != null) {
throw new RuntimeException("initDecode: Forgot to release()?");
}
String mime = null;
useSurface = (surfaceTextureHelper != null);
String[] supportedCodecPrefixes = null;
if (type == VideoCodecType.VIDEO_CODEC_VP8) {
mime = VP8_MIME_TYPE;
supportedCodecPrefixes = supportedVp8HwCodecPrefixes;
} else if (type == VideoCodecType.VIDEO_CODEC_VP9) {
mime = VP9_MIME_TYPE;
supportedCodecPrefixes = supportedVp9HwCodecPrefixes;
} else if (type == VideoCodecType.VIDEO_CODEC_H264) {
mime = H264_MIME_TYPE;
supportedCodecPrefixes = supportedH264HwCodecPrefixes;
} else {
throw new RuntimeException("initDecode: Non-supported codec " + type);
}
DecoderProperties properties = findDecoder(mime, supportedCodecPrefixes);
if (properties == null) {
throw new RuntimeException("Cannot find HW decoder for " + type);
}
Logging.d(TAG, "Java initDecode: " + type + " : "+ width + " x " + height +
". Color: 0x" + Integer.toHexString(properties.colorFormat) +
". Use Surface: " + useSurface);
runningInstance = this; // Decoder is now running and can be queried for stack traces.
mediaCodecThread = Thread.currentThread();
try {
this.width = width;
this.height = height;
stride = width;
sliceHeight = height;
if (useSurface) {
textureListener = new TextureListener(surfaceTextureHelper);
surface = new Surface(surfaceTextureHelper.getSurfaceTexture());
}
MediaFormat format = MediaFormat.createVideoFormat(mime, width, height);
if (!useSurface) {
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat);
}
Logging.d(TAG, " Format: " + format);
mediaCodec = MediaCodecVideoEncoder.createByCodecName(properties.codecName);
if (mediaCodec == null) {
Logging.e(TAG, "Can not create media decoder");
return false;
}
mediaCodec.configure(format, surface, null, 0);
mediaCodec.start();
colorFormat = properties.colorFormat;
outputBuffers = mediaCodec.getOutputBuffers();
inputBuffers = mediaCodec.getInputBuffers();
decodeStartTimeMs.clear();
hasDecodedFirstFrame = false;
dequeuedSurfaceOutputBuffers.clear();
droppedFrames = 0;
Logging.d(TAG, "Input buffers: " + inputBuffers.length +
". Output buffers: " + outputBuffers.length);
return true;
} catch (IllegalStateException e) {
Logging.e(TAG, "initDecode failed", e);
return false;
}
}
// Resets the decoder so it can start decoding frames with new resolution.
// Flushes MediaCodec and clears decoder output buffers.
private void reset(int width, int height) {
if (mediaCodecThread == null || mediaCodec == null) {
throw new RuntimeException("Incorrect reset call for non-initialized decoder.");
}
Logging.d(TAG, "Java reset: " + width + " x " + height);
mediaCodec.flush();
this.width = width;
this.height = height;
decodeStartTimeMs.clear();
dequeuedSurfaceOutputBuffers.clear();
hasDecodedFirstFrame = false;
droppedFrames = 0;
}
private void release() {
Logging.d(TAG, "Java releaseDecoder. Total number of dropped frames: " + droppedFrames);
checkOnMediaCodecThread();
// Run Mediacodec stop() and release() on separate thread since sometime
// Mediacodec.stop() may hang.
final CountDownLatch releaseDone = new CountDownLatch(1);
Runnable runMediaCodecRelease = new Runnable() {
@Override
public void run() {
try {
Logging.d(TAG, "Java releaseDecoder on release thread");
mediaCodec.stop();
mediaCodec.release();
Logging.d(TAG, "Java releaseDecoder on release thread done");
} catch (Exception e) {
Logging.e(TAG, "Media decoder release failed", e);
}
releaseDone.countDown();
}
};
new Thread(runMediaCodecRelease).start();
if (!ThreadUtils.awaitUninterruptibly(releaseDone, MEDIA_CODEC_RELEASE_TIMEOUT_MS)) {
Logging.e(TAG, "Media decoder release timeout");
codecErrors++;
if (errorCallback != null) {
Logging.e(TAG, "Invoke codec error callback. Errors: " + codecErrors);
errorCallback.onMediaCodecVideoDecoderCriticalError(codecErrors);
}
}
mediaCodec = null;
mediaCodecThread = null;
runningInstance = null;
if (useSurface) {
surface.release();
surface = null;
textureListener.release();
}
Logging.d(TAG, "Java releaseDecoder done");
}
// Dequeue an input buffer and return its index, -1 if no input buffer is
// available, or -2 if the codec is no longer operative.
private int dequeueInputBuffer() {
checkOnMediaCodecThread();
try {
return mediaCodec.dequeueInputBuffer(DEQUEUE_INPUT_TIMEOUT);
} catch (IllegalStateException e) {
Logging.e(TAG, "dequeueIntputBuffer failed", e);
return -2;
}
}
private boolean queueInputBuffer(int inputBufferIndex, int size, long presentationTimeStamUs,
long timeStampMs, long ntpTimeStamp) {
checkOnMediaCodecThread();
try {
inputBuffers[inputBufferIndex].position(0);
inputBuffers[inputBufferIndex].limit(size);
decodeStartTimeMs.add(new TimeStamps(SystemClock.elapsedRealtime(), timeStampMs,
ntpTimeStamp));
mediaCodec.queueInputBuffer(inputBufferIndex, 0, size, presentationTimeStamUs, 0);
return true;
}
catch (IllegalStateException e) {
Logging.e(TAG, "decode failed", e);
return false;
}
}
private static class TimeStamps {
public TimeStamps(long decodeStartTimeMs, long timeStampMs, long ntpTimeStampMs) {
this.decodeStartTimeMs = decodeStartTimeMs;
this.timeStampMs = timeStampMs;
this.ntpTimeStampMs = ntpTimeStampMs;
}
// Time when this frame was queued for decoding.
private final long decodeStartTimeMs;
// Only used for bookkeeping in Java. Stores C++ inputImage._timeStamp value for input frame.
private final long timeStampMs;
// Only used for bookkeeping in Java. Stores C++ inputImage.ntp_time_ms_ value for input frame.
private final long ntpTimeStampMs;
}
// Helper struct for dequeueOutputBuffer() below.
private static class DecodedOutputBuffer {
public DecodedOutputBuffer(int index, int offset, int size, long presentationTimeStampMs,
long timeStampMs, long ntpTimeStampMs, long decodeTime, long endDecodeTime) {
this.index = index;
this.offset = offset;
this.size = size;
this.presentationTimeStampMs = presentationTimeStampMs;
this.timeStampMs = timeStampMs;
this.ntpTimeStampMs = ntpTimeStampMs;
this.decodeTimeMs = decodeTime;
this.endDecodeTimeMs = endDecodeTime;
}
private final int index;
private final int offset;
private final int size;
// Presentation timestamp returned in dequeueOutputBuffer call.
private final long presentationTimeStampMs;
// C++ inputImage._timeStamp value for output frame.
private final long timeStampMs;
// C++ inputImage.ntp_time_ms_ value for output frame.
private final long ntpTimeStampMs;
// Number of ms it took to decode this frame.
private final long decodeTimeMs;
// System time when this frame decoding finished.
private final long endDecodeTimeMs;
}
// Helper struct for dequeueTextureBuffer() below.
private static class DecodedTextureBuffer {
private final int textureID;
private final float[] transformMatrix;
// Presentation timestamp returned in dequeueOutputBuffer call.
private final long presentationTimeStampMs;
// C++ inputImage._timeStamp value for output frame.
private final long timeStampMs;
// C++ inputImage.ntp_time_ms_ value for output frame.
private final long ntpTimeStampMs;
// Number of ms it took to decode this frame.
private final long decodeTimeMs;
// Interval from when the frame finished decoding until this buffer has been created.
// Since there is only one texture, this interval depend on the time from when
// a frame is decoded and provided to C++ and until that frame is returned to the MediaCodec
// so that the texture can be updated with the next decoded frame.
private final long frameDelayMs;
// A DecodedTextureBuffer with zero |textureID| has special meaning and represents a frame
// that was dropped.
public DecodedTextureBuffer(int textureID, float[] transformMatrix,
long presentationTimeStampMs, long timeStampMs, long ntpTimeStampMs, long decodeTimeMs,
long frameDelay) {
this.textureID = textureID;
this.transformMatrix = transformMatrix;
this.presentationTimeStampMs = presentationTimeStampMs;
this.timeStampMs = timeStampMs;
this.ntpTimeStampMs = ntpTimeStampMs;
this.decodeTimeMs = decodeTimeMs;
this.frameDelayMs = frameDelay;
}
}
// Poll based texture listener.
private static class TextureListener
implements SurfaceTextureHelper.OnTextureFrameAvailableListener {
private final SurfaceTextureHelper surfaceTextureHelper;
// |newFrameLock| is used to synchronize arrival of new frames with wait()/notifyAll().
private final Object newFrameLock = new Object();
// |bufferToRender| is non-null when waiting for transition between addBufferToRender() to
// onTextureFrameAvailable().
private DecodedOutputBuffer bufferToRender;
private DecodedTextureBuffer renderedBuffer;
public TextureListener(SurfaceTextureHelper surfaceTextureHelper) {
this.surfaceTextureHelper = surfaceTextureHelper;
surfaceTextureHelper.startListening(this);
}
public void addBufferToRender(DecodedOutputBuffer buffer) {
if (bufferToRender != null) {
Logging.e(TAG,
"Unexpected addBufferToRender() called while waiting for a texture.");
throw new IllegalStateException("Waiting for a texture.");
}
bufferToRender = buffer;
}
public boolean isWaitingForTexture() {
synchronized (newFrameLock) {
return bufferToRender != null;
}
}
// Callback from |surfaceTextureHelper|. May be called on an arbitrary thread.
@Override
public void onTextureFrameAvailable(
int oesTextureId, float[] transformMatrix, long timestampNs) {
synchronized (newFrameLock) {
if (renderedBuffer != null) {
Logging.e(TAG,
"Unexpected onTextureFrameAvailable() called while already holding a texture.");
throw new IllegalStateException("Already holding a texture.");
}
// |timestampNs| is always zero on some Android versions.
renderedBuffer = new DecodedTextureBuffer(oesTextureId, transformMatrix,
bufferToRender.presentationTimeStampMs, bufferToRender.timeStampMs,
bufferToRender.ntpTimeStampMs, bufferToRender.decodeTimeMs,
SystemClock.elapsedRealtime() - bufferToRender.endDecodeTimeMs);
bufferToRender = null;
newFrameLock.notifyAll();
}
}
// Dequeues and returns a DecodedTextureBuffer if available, or null otherwise.
public DecodedTextureBuffer dequeueTextureBuffer(int timeoutMs) {
synchronized (newFrameLock) {
if (renderedBuffer == null && timeoutMs > 0 && isWaitingForTexture()) {
try {
newFrameLock.wait(timeoutMs);
} catch(InterruptedException e) {
// Restore the interrupted status by reinterrupting the thread.
Thread.currentThread().interrupt();
}
}
DecodedTextureBuffer returnedBuffer = renderedBuffer;
renderedBuffer = null;
return returnedBuffer;
}
}
public void release() {
// SurfaceTextureHelper.stopListening() will block until any onTextureFrameAvailable() in
// progress is done. Therefore, the call must be outside any synchronized
// statement that is also used in the onTextureFrameAvailable() above to avoid deadlocks.
surfaceTextureHelper.stopListening();
synchronized (newFrameLock) {
if (renderedBuffer != null) {
surfaceTextureHelper.returnTextureFrame();
renderedBuffer = null;
}
}
}
}
// Returns null if no decoded buffer is available, and otherwise a DecodedByteBuffer.
// Throws IllegalStateException if call is made on the wrong thread, if color format changes to an
// unsupported format, or if |mediaCodec| is not in the Executing state. Throws CodecException
// upon codec error.
private DecodedOutputBuffer dequeueOutputBuffer(int dequeueTimeoutMs) {
checkOnMediaCodecThread();
if (decodeStartTimeMs.isEmpty()) {
return null;
}
// Drain the decoder until receiving a decoded buffer or hitting
// MediaCodec.INFO_TRY_AGAIN_LATER.
final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
while (true) {
final int result = mediaCodec.dequeueOutputBuffer(
info, TimeUnit.MILLISECONDS.toMicros(dequeueTimeoutMs));
switch (result) {
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
outputBuffers = mediaCodec.getOutputBuffers();
Logging.d(TAG, "Decoder output buffers changed: " + outputBuffers.length);
if (hasDecodedFirstFrame) {
throw new RuntimeException("Unexpected output buffer change event.");
}
break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
MediaFormat format = mediaCodec.getOutputFormat();
Logging.d(TAG, "Decoder format changed: " + format.toString());
int new_width = format.getInteger(MediaFormat.KEY_WIDTH);
int new_height = format.getInteger(MediaFormat.KEY_HEIGHT);
if (hasDecodedFirstFrame && (new_width != width || new_height != height)) {
throw new RuntimeException("Unexpected size change. Configured " + width + "*" +
height + ". New " + new_width + "*" + new_height);
}
width = format.getInteger(MediaFormat.KEY_WIDTH);
height = format.getInteger(MediaFormat.KEY_HEIGHT);
if (!useSurface && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) {
colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
Logging.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat));
if (!supportedColorList.contains(colorFormat)) {
throw new IllegalStateException("Non supported color format: " + colorFormat);
}
}
if (format.containsKey("stride")) {
stride = format.getInteger("stride");
}
if (format.containsKey("slice-height")) {
sliceHeight = format.getInteger("slice-height");
}
Logging.d(TAG, "Frame stride and slice height: " + stride + " x " + sliceHeight);
stride = Math.max(width, stride);
sliceHeight = Math.max(height, sliceHeight);
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
return null;
default:
hasDecodedFirstFrame = true;
TimeStamps timeStamps = decodeStartTimeMs.remove();
long decodeTimeMs = SystemClock.elapsedRealtime() - timeStamps.decodeStartTimeMs;
if (decodeTimeMs > MAX_DECODE_TIME_MS) {
Logging.e(TAG, "Very high decode time: " + decodeTimeMs + "ms"
+ ". Q size: " + decodeStartTimeMs.size()
+ ". Might be caused by resuming H264 decoding after a pause.");
decodeTimeMs = MAX_DECODE_TIME_MS;
}
return new DecodedOutputBuffer(result,
info.offset,
info.size,
TimeUnit.MICROSECONDS.toMillis(info.presentationTimeUs),
timeStamps.timeStampMs,
timeStamps.ntpTimeStampMs,
decodeTimeMs,
SystemClock.elapsedRealtime());
}
}
}
// Returns null if no decoded buffer is available, and otherwise a DecodedTextureBuffer.
// Throws IllegalStateException if call is made on the wrong thread, if color format changes to an
// unsupported format, or if |mediaCodec| is not in the Executing state. Throws CodecException
// upon codec error. If |dequeueTimeoutMs| > 0, the oldest decoded frame will be dropped if
// a frame can't be returned.
private DecodedTextureBuffer dequeueTextureBuffer(int dequeueTimeoutMs) {
checkOnMediaCodecThread();
if (!useSurface) {
throw new IllegalStateException("dequeueTexture() called for byte buffer decoding.");
}
DecodedOutputBuffer outputBuffer = dequeueOutputBuffer(dequeueTimeoutMs);
if (outputBuffer != null) {
dequeuedSurfaceOutputBuffers.add(outputBuffer);
}
MaybeRenderDecodedTextureBuffer();
// Check if there is texture ready now by waiting max |dequeueTimeoutMs|.
DecodedTextureBuffer renderedBuffer = textureListener.dequeueTextureBuffer(dequeueTimeoutMs);
if (renderedBuffer != null) {
MaybeRenderDecodedTextureBuffer();
return renderedBuffer;
}
if ((dequeuedSurfaceOutputBuffers.size()
>= Math.min(MAX_QUEUED_OUTPUTBUFFERS, outputBuffers.length)
|| (dequeueTimeoutMs > 0 && !dequeuedSurfaceOutputBuffers.isEmpty()))) {
++droppedFrames;
// Drop the oldest frame still in dequeuedSurfaceOutputBuffers.
// The oldest frame is owned by |textureListener| and can't be dropped since
// mediaCodec.releaseOutputBuffer has already been called.
final DecodedOutputBuffer droppedFrame = dequeuedSurfaceOutputBuffers.remove();
if (dequeueTimeoutMs > 0) {
// TODO(perkj): Re-add the below log when VideoRenderGUI has been removed or fixed to
// return the one and only texture even if it does not render.
Logging.w(TAG, "Draining decoder. Dropping frame with TS: "
+ droppedFrame.presentationTimeStampMs +
". Total number of dropped frames: " + droppedFrames);
} else {
Logging.w(TAG, "Too many output buffers " + dequeuedSurfaceOutputBuffers.size() +
". Dropping frame with TS: " + droppedFrame.presentationTimeStampMs +
". Total number of dropped frames: " + droppedFrames);
}
mediaCodec.releaseOutputBuffer(droppedFrame.index, false /* render */);
return new DecodedTextureBuffer(0, null,
droppedFrame.presentationTimeStampMs, droppedFrame.timeStampMs,
droppedFrame.ntpTimeStampMs, droppedFrame.decodeTimeMs,
SystemClock.elapsedRealtime() - droppedFrame.endDecodeTimeMs);
}
return null;
}
private void MaybeRenderDecodedTextureBuffer() {
if (dequeuedSurfaceOutputBuffers.isEmpty() || textureListener.isWaitingForTexture()) {
return;
}
// Get the first frame in the queue and render to the decoder output surface.
final DecodedOutputBuffer buffer = dequeuedSurfaceOutputBuffers.remove();
textureListener.addBufferToRender(buffer);
mediaCodec.releaseOutputBuffer(buffer.index, true /* render */);
}
// Release a dequeued output byte buffer back to the codec for re-use. Should only be called for
// non-surface decoding.
// Throws IllegalStateException if the call is made on the wrong thread, if codec is configured
// for surface decoding, or if |mediaCodec| is not in the Executing state. Throws
// MediaCodec.CodecException upon codec error.
private void returnDecodedOutputBuffer(int index)
throws IllegalStateException, MediaCodec.CodecException {
checkOnMediaCodecThread();
if (useSurface) {
throw new IllegalStateException("returnDecodedOutputBuffer() called for surface decoding.");
}
mediaCodec.releaseOutputBuffer(index, false /* render */);
}
}

View File

@ -1,650 +0,0 @@
/*
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.annotation.TargetApi;
import android.media.MediaCodec;
import android.media.MediaCodecInfo.CodecCapabilities;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.media.MediaFormat;
import android.opengl.GLES20;
import android.os.Build;
import android.os.Bundle;
import android.view.Surface;
import org.webrtc.Logging;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
// Java-side of peerconnection_jni.cc:MediaCodecVideoEncoder.
// This class is an implementation detail of the Java PeerConnection API.
@TargetApi(19)
@SuppressWarnings("deprecation")
public class MediaCodecVideoEncoder {
// This class is constructed, operated, and destroyed by its C++ incarnation,
// so the class and its methods have non-public visibility. The API this
// class exposes aims to mimic the webrtc::VideoEncoder API as closely as
// possibly to minimize the amount of translation work necessary.
private static final String TAG = "MediaCodecVideoEncoder";
// Tracks webrtc::VideoCodecType.
public enum VideoCodecType {
VIDEO_CODEC_VP8,
VIDEO_CODEC_VP9,
VIDEO_CODEC_H264
}
private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000; // Timeout for codec releasing.
private static final int DEQUEUE_TIMEOUT = 0; // Non-blocking, no wait.
private static final int BITRATE_ADJUSTMENT_FPS = 30;
// Active running encoder instance. Set in initEncode() (called from native code)
// and reset to null in release() call.
private static MediaCodecVideoEncoder runningInstance = null;
private static MediaCodecVideoEncoderErrorCallback errorCallback = null;
private static int codecErrors = 0;
// List of disabled codec types - can be set from application.
private static Set<String> hwEncoderDisabledTypes = new HashSet<String>();
private Thread mediaCodecThread;
private MediaCodec mediaCodec;
private ByteBuffer[] outputBuffers;
private EglBase14 eglBase;
private int width;
private int height;
private Surface inputSurface;
private GlRectDrawer drawer;
private static final String VP8_MIME_TYPE = "video/x-vnd.on2.vp8";
private static final String VP9_MIME_TYPE = "video/x-vnd.on2.vp9";
private static final String H264_MIME_TYPE = "video/avc";
// Class describing supported media codec properties.
private static class MediaCodecProperties {
public final String codecPrefix;
// Minimum Android SDK required for this codec to be used.
public final int minSdk;
// Flag if encoder implementation does not use frame timestamps to calculate frame bitrate
// budget and instead is relying on initial fps configuration assuming that all frames are
// coming at fixed initial frame rate. Bitrate adjustment is required for this case.
public final boolean bitrateAdjustmentRequired;
MediaCodecProperties(
String codecPrefix, int minSdk, boolean bitrateAdjustmentRequired) {
this.codecPrefix = codecPrefix;
this.minSdk = minSdk;
this.bitrateAdjustmentRequired = bitrateAdjustmentRequired;
}
}
// List of supported HW VP8 encoders.
private static final MediaCodecProperties qcomVp8HwProperties = new MediaCodecProperties(
"OMX.qcom.", Build.VERSION_CODES.KITKAT, false /* bitrateAdjustmentRequired */);
private static final MediaCodecProperties exynosVp8HwProperties = new MediaCodecProperties(
"OMX.Exynos.", Build.VERSION_CODES.M, false /* bitrateAdjustmentRequired */);
private static final MediaCodecProperties intelVp8HwProperties = new MediaCodecProperties(
"OMX.Intel.", Build.VERSION_CODES.LOLLIPOP, false /* bitrateAdjustmentRequired */);
private static final MediaCodecProperties[] vp8HwList = new MediaCodecProperties[] {
qcomVp8HwProperties, exynosVp8HwProperties, intelVp8HwProperties
};
// List of supported HW VP9 encoders.
private static final MediaCodecProperties qcomVp9HwProperties = new MediaCodecProperties(
"OMX.qcom.", Build.VERSION_CODES.M, false /* bitrateAdjustmentRequired */);
private static final MediaCodecProperties exynosVp9HwProperties = new MediaCodecProperties(
"OMX.Exynos.", Build.VERSION_CODES.M, false /* bitrateAdjustmentRequired */);
private static final MediaCodecProperties[] vp9HwList = new MediaCodecProperties[] {
qcomVp9HwProperties, exynosVp9HwProperties
};
// List of supported HW H.264 encoders.
private static final MediaCodecProperties qcomH264HwProperties = new MediaCodecProperties(
"OMX.qcom.", Build.VERSION_CODES.KITKAT, false /* bitrateAdjustmentRequired */);
private static final MediaCodecProperties exynosH264HwProperties = new MediaCodecProperties(
"OMX.Exynos.", Build.VERSION_CODES.LOLLIPOP, true /* bitrateAdjustmentRequired */);
private static final MediaCodecProperties[] h264HwList = new MediaCodecProperties[] {
qcomH264HwProperties, exynosH264HwProperties
};
// List of devices with poor H.264 encoder quality.
private static final String[] H264_HW_EXCEPTION_MODELS = new String[] {
// HW H.264 encoder on below devices has poor bitrate control - actual
// bitrates deviates a lot from the target value.
"SAMSUNG-SGH-I337",
"Nexus 7",
"Nexus 4"
};
// Bitrate modes - should be in sync with OMX_VIDEO_CONTROLRATETYPE defined
// in OMX_Video.h
private static final int VIDEO_ControlRateConstant = 2;
// NV12 color format supported by QCOM codec, but not declared in MediaCodec -
// see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h
private static final int
COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04;
// Allowable color formats supported by codec - in order of preference.
private static final int[] supportedColorList = {
CodecCapabilities.COLOR_FormatYUV420Planar,
CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m
};
private static final int[] supportedSurfaceColorList = {
CodecCapabilities.COLOR_FormatSurface
};
private VideoCodecType type;
private int colorFormat; // Used by native code.
private boolean bitrateAdjustmentRequired;
// SPS and PPS NALs (Config frame) for H.264.
private ByteBuffer configData = null;
// MediaCodec error handler - invoked when critical error happens which may prevent
// further use of media codec API. Now it means that one of media codec instances
// is hanging and can no longer be used in the next call.
public static interface MediaCodecVideoEncoderErrorCallback {
void onMediaCodecVideoEncoderCriticalError(int codecErrors);
}
public static void setErrorCallback(MediaCodecVideoEncoderErrorCallback errorCallback) {
Logging.d(TAG, "Set error callback");
MediaCodecVideoEncoder.errorCallback = errorCallback;
}
// Functions to disable HW encoding - can be called from applications for platforms
// which have known HW decoding problems.
public static void disableVp8HwCodec() {
Logging.w(TAG, "VP8 encoding is disabled by application.");
hwEncoderDisabledTypes.add(VP8_MIME_TYPE);
}
public static void disableVp9HwCodec() {
Logging.w(TAG, "VP9 encoding is disabled by application.");
hwEncoderDisabledTypes.add(VP9_MIME_TYPE);
}
public static void disableH264HwCodec() {
Logging.w(TAG, "H.264 encoding is disabled by application.");
hwEncoderDisabledTypes.add(H264_MIME_TYPE);
}
// Functions to query if HW encoding is supported.
public static boolean isVp8HwSupported() {
return !hwEncoderDisabledTypes.contains(VP8_MIME_TYPE) &&
(findHwEncoder(VP8_MIME_TYPE, vp8HwList, supportedColorList) != null);
}
public static boolean isVp9HwSupported() {
return !hwEncoderDisabledTypes.contains(VP9_MIME_TYPE) &&
(findHwEncoder(VP9_MIME_TYPE, vp9HwList, supportedColorList) != null);
}
public static boolean isH264HwSupported() {
return !hwEncoderDisabledTypes.contains(H264_MIME_TYPE) &&
(findHwEncoder(H264_MIME_TYPE, h264HwList, supportedColorList) != null);
}
public static boolean isVp8HwSupportedUsingTextures() {
return !hwEncoderDisabledTypes.contains(VP8_MIME_TYPE) &&
(findHwEncoder(VP8_MIME_TYPE, vp8HwList, supportedSurfaceColorList) != null);
}
public static boolean isVp9HwSupportedUsingTextures() {
return !hwEncoderDisabledTypes.contains(VP9_MIME_TYPE) &&
(findHwEncoder(VP9_MIME_TYPE, vp9HwList, supportedSurfaceColorList) != null);
}
public static boolean isH264HwSupportedUsingTextures() {
return !hwEncoderDisabledTypes.contains(H264_MIME_TYPE) &&
(findHwEncoder(H264_MIME_TYPE, h264HwList, supportedSurfaceColorList) != null);
}
// Helper struct for findHwEncoder() below.
private static class EncoderProperties {
public EncoderProperties(String codecName, int colorFormat, boolean bitrateAdjustment) {
this.codecName = codecName;
this.colorFormat = colorFormat;
this.bitrateAdjustment = bitrateAdjustment;
}
public final String codecName; // OpenMax component name for HW codec.
public final int colorFormat; // Color format supported by codec.
public final boolean bitrateAdjustment; // true if bitrate adjustment workaround is required.
}
private static EncoderProperties findHwEncoder(
String mime, MediaCodecProperties[] supportedHwCodecProperties, int[] colorList) {
// MediaCodec.setParameters is missing for JB and below, so bitrate
// can not be adjusted dynamically.
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
return null;
}
// Check if device is in H.264 exception list.
if (mime.equals(H264_MIME_TYPE)) {
List<String> exceptionModels = Arrays.asList(H264_HW_EXCEPTION_MODELS);
if (exceptionModels.contains(Build.MODEL)) {
Logging.w(TAG, "Model: " + Build.MODEL + " has black listed H.264 encoder.");
return null;
}
}
for (int i = 0; i < MediaCodecList.getCodecCount(); ++i) {
MediaCodecInfo info = MediaCodecList.getCodecInfoAt(i);
if (!info.isEncoder()) {
continue;
}
String name = null;
for (String mimeType : info.getSupportedTypes()) {
if (mimeType.equals(mime)) {
name = info.getName();
break;
}
}
if (name == null) {
continue; // No HW support in this codec; try the next one.
}
Logging.v(TAG, "Found candidate encoder " + name);
// Check if this is supported HW encoder.
boolean supportedCodec = false;
boolean bitrateAdjustmentRequired = false;
for (MediaCodecProperties codecProperties : supportedHwCodecProperties) {
if (name.startsWith(codecProperties.codecPrefix)) {
if (Build.VERSION.SDK_INT < codecProperties.minSdk) {
Logging.w(TAG, "Codec " + name + " is disabled due to SDK version " +
Build.VERSION.SDK_INT);
continue;
}
if (codecProperties.bitrateAdjustmentRequired) {
Logging.w(TAG, "Codec " + name + " does not use frame timestamps.");
bitrateAdjustmentRequired = true;
}
supportedCodec = true;
break;
}
}
if (!supportedCodec) {
continue;
}
// Check if HW codec supports known color format.
CodecCapabilities capabilities = info.getCapabilitiesForType(mime);
for (int colorFormat : capabilities.colorFormats) {
Logging.v(TAG, " Color: 0x" + Integer.toHexString(colorFormat));
}
for (int supportedColorFormat : colorList) {
for (int codecColorFormat : capabilities.colorFormats) {
if (codecColorFormat == supportedColorFormat) {
// Found supported HW encoder.
Logging.d(TAG, "Found target encoder for mime " + mime + " : " + name +
". Color: 0x" + Integer.toHexString(codecColorFormat));
return new EncoderProperties(name, codecColorFormat, bitrateAdjustmentRequired);
}
}
}
}
return null; // No HW encoder.
}
private void checkOnMediaCodecThread() {
if (mediaCodecThread.getId() != Thread.currentThread().getId()) {
throw new RuntimeException(
"MediaCodecVideoEncoder previously operated on " + mediaCodecThread +
" but is now called on " + Thread.currentThread());
}
}
public static void printStackTrace() {
if (runningInstance != null && runningInstance.mediaCodecThread != null) {
StackTraceElement[] mediaCodecStackTraces = runningInstance.mediaCodecThread.getStackTrace();
if (mediaCodecStackTraces.length > 0) {
Logging.d(TAG, "MediaCodecVideoEncoder stacks trace:");
for (StackTraceElement stackTrace : mediaCodecStackTraces) {
Logging.d(TAG, stackTrace.toString());
}
}
}
}
static MediaCodec createByCodecName(String codecName) {
try {
// In the L-SDK this call can throw IOException so in order to work in
// both cases catch an exception.
return MediaCodec.createByCodecName(codecName);
} catch (Exception e) {
return null;
}
}
boolean initEncode(VideoCodecType type, int width, int height, int kbps, int fps,
EglBase14.Context sharedContext) {
final boolean useSurface = sharedContext != null;
Logging.d(TAG, "Java initEncode: " + type + " : " + width + " x " + height +
". @ " + kbps + " kbps. Fps: " + fps + ". Encode from texture : " + useSurface);
this.width = width;
this.height = height;
if (mediaCodecThread != null) {
throw new RuntimeException("Forgot to release()?");
}
EncoderProperties properties = null;
String mime = null;
int keyFrameIntervalSec = 0;
if (type == VideoCodecType.VIDEO_CODEC_VP8) {
mime = VP8_MIME_TYPE;
properties = findHwEncoder(
VP8_MIME_TYPE, vp8HwList, useSurface ? supportedSurfaceColorList : supportedColorList);
keyFrameIntervalSec = 100;
} else if (type == VideoCodecType.VIDEO_CODEC_VP9) {
mime = VP9_MIME_TYPE;
properties = findHwEncoder(
VP9_MIME_TYPE, vp9HwList, useSurface ? supportedSurfaceColorList : supportedColorList);
keyFrameIntervalSec = 100;
} else if (type == VideoCodecType.VIDEO_CODEC_H264) {
mime = H264_MIME_TYPE;
properties = findHwEncoder(
H264_MIME_TYPE, h264HwList, useSurface ? supportedSurfaceColorList : supportedColorList);
keyFrameIntervalSec = 20;
}
if (properties == null) {
throw new RuntimeException("Can not find HW encoder for " + type);
}
runningInstance = this; // Encoder is now running and can be queried for stack traces.
colorFormat = properties.colorFormat;
bitrateAdjustmentRequired = properties.bitrateAdjustment;
if (bitrateAdjustmentRequired) {
fps = BITRATE_ADJUSTMENT_FPS;
}
Logging.d(TAG, "Color format: " + colorFormat +
". Bitrate adjustment: " + bitrateAdjustmentRequired);
mediaCodecThread = Thread.currentThread();
try {
MediaFormat format = MediaFormat.createVideoFormat(mime, width, height);
format.setInteger(MediaFormat.KEY_BIT_RATE, 1000 * kbps);
format.setInteger("bitrate-mode", VIDEO_ControlRateConstant);
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat);
format.setInteger(MediaFormat.KEY_FRAME_RATE, fps);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, keyFrameIntervalSec);
Logging.d(TAG, " Format: " + format);
mediaCodec = createByCodecName(properties.codecName);
this.type = type;
if (mediaCodec == null) {
Logging.e(TAG, "Can not create media encoder");
return false;
}
mediaCodec.configure(
format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
if (useSurface) {
eglBase = new EglBase14(sharedContext, EglBase.CONFIG_RECORDABLE);
// Create an input surface and keep a reference since we must release the surface when done.
inputSurface = mediaCodec.createInputSurface();
eglBase.createSurface(inputSurface);
drawer = new GlRectDrawer();
}
mediaCodec.start();
outputBuffers = mediaCodec.getOutputBuffers();
Logging.d(TAG, "Output buffers: " + outputBuffers.length);
} catch (IllegalStateException e) {
Logging.e(TAG, "initEncode failed", e);
return false;
}
return true;
}
ByteBuffer[] getInputBuffers() {
ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
Logging.d(TAG, "Input buffers: " + inputBuffers.length);
return inputBuffers;
}
boolean encodeBuffer(
boolean isKeyframe, int inputBuffer, int size,
long presentationTimestampUs) {
checkOnMediaCodecThread();
try {
if (isKeyframe) {
// Ideally MediaCodec would honor BUFFER_FLAG_SYNC_FRAME so we could
// indicate this in queueInputBuffer() below and guarantee _this_ frame
// be encoded as a key frame, but sadly that flag is ignored. Instead,
// we request a key frame "soon".
Logging.d(TAG, "Sync frame request");
Bundle b = new Bundle();
b.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
mediaCodec.setParameters(b);
}
mediaCodec.queueInputBuffer(
inputBuffer, 0, size, presentationTimestampUs, 0);
return true;
}
catch (IllegalStateException e) {
Logging.e(TAG, "encodeBuffer failed", e);
return false;
}
}
boolean encodeTexture(boolean isKeyframe, int oesTextureId, float[] transformationMatrix,
long presentationTimestampUs) {
checkOnMediaCodecThread();
try {
if (isKeyframe) {
Logging.d(TAG, "Sync frame request");
Bundle b = new Bundle();
b.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
mediaCodec.setParameters(b);
}
eglBase.makeCurrent();
// TODO(perkj): glClear() shouldn't be necessary since every pixel is covered anyway,
// but it's a workaround for bug webrtc:5147.
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
drawer.drawOes(oesTextureId, transformationMatrix, width, height, 0, 0, width, height);
eglBase.swapBuffers(TimeUnit.MICROSECONDS.toNanos(presentationTimestampUs));
return true;
}
catch (RuntimeException e) {
Logging.e(TAG, "encodeTexture failed", e);
return false;
}
}
void release() {
Logging.d(TAG, "Java releaseEncoder");
checkOnMediaCodecThread();
// Run Mediacodec stop() and release() on separate thread since sometime
// Mediacodec.stop() may hang.
final CountDownLatch releaseDone = new CountDownLatch(1);
Runnable runMediaCodecRelease = new Runnable() {
@Override
public void run() {
try {
Logging.d(TAG, "Java releaseEncoder on release thread");
mediaCodec.stop();
mediaCodec.release();
Logging.d(TAG, "Java releaseEncoder on release thread done");
} catch (Exception e) {
Logging.e(TAG, "Media encoder release failed", e);
}
releaseDone.countDown();
}
};
new Thread(runMediaCodecRelease).start();
if (!ThreadUtils.awaitUninterruptibly(releaseDone, MEDIA_CODEC_RELEASE_TIMEOUT_MS)) {
Logging.e(TAG, "Media encoder release timeout");
codecErrors++;
if (errorCallback != null) {
Logging.e(TAG, "Invoke codec error callback. Errors: " + codecErrors);
errorCallback.onMediaCodecVideoEncoderCriticalError(codecErrors);
}
}
mediaCodec = null;
mediaCodecThread = null;
if (drawer != null) {
drawer.release();
drawer = null;
}
if (eglBase != null) {
eglBase.release();
eglBase = null;
}
if (inputSurface != null) {
inputSurface.release();
inputSurface = null;
}
runningInstance = null;
Logging.d(TAG, "Java releaseEncoder done");
}
private boolean setRates(int kbps, int frameRate) {
checkOnMediaCodecThread();
int codecBitrate = 1000 * kbps;
if (bitrateAdjustmentRequired && frameRate > 0) {
codecBitrate = BITRATE_ADJUSTMENT_FPS * codecBitrate / frameRate;
Logging.v(TAG, "setRates: " + kbps + " -> " + (codecBitrate / 1000)
+ " kbps. Fps: " + frameRate);
} else {
Logging.v(TAG, "setRates: " + kbps);
}
try {
Bundle params = new Bundle();
params.putInt(MediaCodec.PARAMETER_KEY_VIDEO_BITRATE, codecBitrate);
mediaCodec.setParameters(params);
return true;
} catch (IllegalStateException e) {
Logging.e(TAG, "setRates failed", e);
return false;
}
}
// Dequeue an input buffer and return its index, -1 if no input buffer is
// available, or -2 if the codec is no longer operative.
int dequeueInputBuffer() {
checkOnMediaCodecThread();
try {
return mediaCodec.dequeueInputBuffer(DEQUEUE_TIMEOUT);
} catch (IllegalStateException e) {
Logging.e(TAG, "dequeueIntputBuffer failed", e);
return -2;
}
}
// Helper struct for dequeueOutputBuffer() below.
static class OutputBufferInfo {
public OutputBufferInfo(
int index, ByteBuffer buffer,
boolean isKeyFrame, long presentationTimestampUs) {
this.index = index;
this.buffer = buffer;
this.isKeyFrame = isKeyFrame;
this.presentationTimestampUs = presentationTimestampUs;
}
public final int index;
public final ByteBuffer buffer;
public final boolean isKeyFrame;
public final long presentationTimestampUs;
}
// Dequeue and return an output buffer, or null if no output is ready. Return
// a fake OutputBufferInfo with index -1 if the codec is no longer operable.
OutputBufferInfo dequeueOutputBuffer() {
checkOnMediaCodecThread();
try {
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
int result = mediaCodec.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT);
// Check if this is config frame and save configuration data.
if (result >= 0) {
boolean isConfigFrame =
(info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0;
if (isConfigFrame) {
Logging.d(TAG, "Config frame generated. Offset: " + info.offset +
". Size: " + info.size);
configData = ByteBuffer.allocateDirect(info.size);
outputBuffers[result].position(info.offset);
outputBuffers[result].limit(info.offset + info.size);
configData.put(outputBuffers[result]);
// Release buffer back.
mediaCodec.releaseOutputBuffer(result, false);
// Query next output.
result = mediaCodec.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT);
}
}
if (result >= 0) {
// MediaCodec doesn't care about Buffer position/remaining/etc so we can
// mess with them to get a slice and avoid having to pass extra
// (BufferInfo-related) parameters back to C++.
ByteBuffer outputBuffer = outputBuffers[result].duplicate();
outputBuffer.position(info.offset);
outputBuffer.limit(info.offset + info.size);
// Check key frame flag.
boolean isKeyFrame =
(info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
if (isKeyFrame) {
Logging.d(TAG, "Sync frame generated");
}
if (isKeyFrame && type == VideoCodecType.VIDEO_CODEC_H264) {
Logging.d(TAG, "Appending config frame of size " + configData.capacity() +
" to output buffer with offset " + info.offset + ", size " +
info.size);
// For H.264 key frame append SPS and PPS NALs at the start
ByteBuffer keyFrameBuffer = ByteBuffer.allocateDirect(
configData.capacity() + info.size);
configData.rewind();
keyFrameBuffer.put(configData);
keyFrameBuffer.put(outputBuffer);
keyFrameBuffer.position(0);
return new OutputBufferInfo(result, keyFrameBuffer,
isKeyFrame, info.presentationTimeUs);
} else {
return new OutputBufferInfo(result, outputBuffer.slice(),
isKeyFrame, info.presentationTimeUs);
}
} else if (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
outputBuffers = mediaCodec.getOutputBuffers();
return dequeueOutputBuffer();
} else if (result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
return dequeueOutputBuffer();
} else if (result == MediaCodec.INFO_TRY_AGAIN_LATER) {
return null;
}
throw new RuntimeException("dequeueOutputBuffer: " + result);
} catch (IllegalStateException e) {
Logging.e(TAG, "dequeueOutputBuffer failed", e);
return new OutputBufferInfo(-1, null, false, -1);
}
}
// Release a dequeued output buffer back to the codec for re-use. Return
// false if the codec is no longer operable.
boolean releaseOutputBuffer(int index) {
checkOnMediaCodecThread();
try {
mediaCodec.releaseOutputBuffer(index, false);
return true;
} catch (IllegalStateException e) {
Logging.e(TAG, "releaseOutputBuffer failed", e);
return false;
}
}
}

View File

@ -1,84 +0,0 @@
/*
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import java.util.LinkedList;
import java.util.List;
/**
* Description of media constraints for {@code MediaStream} and
* {@code PeerConnection}.
*/
public class MediaConstraints {
/** Simple String key/value pair. */
public static class KeyValuePair {
private final String key;
private final String value;
public KeyValuePair(String key, String value) {
this.key = key;
this.value = value;
}
public String getKey() {
return key;
}
public String getValue() {
return value;
}
public String toString() {
return key + ": " + value;
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
KeyValuePair that = (KeyValuePair)other;
return key.equals(that.key) && value.equals(that.value);
}
@Override
public int hashCode() {
return key.hashCode() + value.hashCode();
}
}
public final List<KeyValuePair> mandatory;
public final List<KeyValuePair> optional;
public MediaConstraints() {
mandatory = new LinkedList<KeyValuePair>();
optional = new LinkedList<KeyValuePair>();
}
private static String stringifyKeyValuePairList(List<KeyValuePair> list) {
StringBuilder builder = new StringBuilder("[");
for (KeyValuePair pair : list) {
if (builder.length() > 1) {
builder.append(", ");
}
builder.append(pair.toString());
}
return builder.append("]").toString();
}
public String toString() {
return "mandatory: " + stringifyKeyValuePairList(mandatory) +
", optional: " + stringifyKeyValuePairList(optional);
}
}

View File

@ -1,38 +0,0 @@
/*
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/** Java wrapper for a C++ MediaSourceInterface. */
public class MediaSource {
/** Tracks MediaSourceInterface.SourceState */
public enum State {
INITIALIZING, LIVE, ENDED, MUTED
}
final long nativeSource; // Package-protected for PeerConnectionFactory.
public MediaSource(long nativeSource) {
this.nativeSource = nativeSource;
}
public State state() {
return nativeState(nativeSource);
}
public void dispose() {
free(nativeSource);
}
private static native State nativeState(long pointer);
private static native void free(long nativeSource);
}

View File

@ -1,111 +0,0 @@
/*
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import java.util.LinkedList;
/** Java wrapper for a C++ MediaStreamInterface. */
public class MediaStream {
public final LinkedList<AudioTrack> audioTracks;
public final LinkedList<VideoTrack> videoTracks;
public final LinkedList<VideoTrack> preservedVideoTracks;
// Package-protected for PeerConnection.
final long nativeStream;
public MediaStream(long nativeStream) {
audioTracks = new LinkedList<AudioTrack>();
videoTracks = new LinkedList<VideoTrack>();
preservedVideoTracks = new LinkedList<VideoTrack>();
this.nativeStream = nativeStream;
}
public boolean addTrack(AudioTrack track) {
if (nativeAddAudioTrack(nativeStream, track.nativeTrack)) {
audioTracks.add(track);
return true;
}
return false;
}
public boolean addTrack(VideoTrack track) {
if (nativeAddVideoTrack(nativeStream, track.nativeTrack)) {
videoTracks.add(track);
return true;
}
return false;
}
// Tracks added in addTrack() call will be auto released once MediaStream.dispose()
// is called. If video track need to be preserved after MediaStream is destroyed it
// should be added to MediaStream using addPreservedTrack() call.
public boolean addPreservedTrack(VideoTrack track) {
if (nativeAddVideoTrack(nativeStream, track.nativeTrack)) {
preservedVideoTracks.add(track);
return true;
}
return false;
}
public boolean removeTrack(AudioTrack track) {
audioTracks.remove(track);
return nativeRemoveAudioTrack(nativeStream, track.nativeTrack);
}
public boolean removeTrack(VideoTrack track) {
videoTracks.remove(track);
preservedVideoTracks.remove(track);
return nativeRemoveVideoTrack(nativeStream, track.nativeTrack);
}
public void dispose() {
// Remove and release previously added audio and video tracks.
while (!audioTracks.isEmpty()) {
AudioTrack track = audioTracks.getFirst();
removeTrack(track);
track.dispose();
}
while (!videoTracks.isEmpty()) {
VideoTrack track = videoTracks.getFirst();
removeTrack(track);
track.dispose();
}
// Remove, but do not release preserved video tracks.
while (!preservedVideoTracks.isEmpty()) {
removeTrack(preservedVideoTracks.getFirst());
}
free(nativeStream);
}
public String label() {
return nativeLabel(nativeStream);
}
public String toString() {
return "[" + label() + ":A=" + audioTracks.size() +
":V=" + videoTracks.size() + "]";
}
private static native boolean nativeAddAudioTrack(
long nativeStream, long nativeAudioTrack);
private static native boolean nativeAddVideoTrack(
long nativeStream, long nativeVideoTrack);
private static native boolean nativeRemoveAudioTrack(
long nativeStream, long nativeAudioTrack);
private static native boolean nativeRemoveVideoTrack(
long nativeStream, long nativeVideoTrack);
private static native String nativeLabel(long nativeStream);
private static native void free(long nativeStream);
}

View File

@ -1,60 +0,0 @@
/*
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/** Java wrapper for a C++ MediaStreamTrackInterface. */
public class MediaStreamTrack {
/** Tracks MediaStreamTrackInterface.TrackState */
public enum State { LIVE, ENDED }
final long nativeTrack;
public MediaStreamTrack(long nativeTrack) {
this.nativeTrack = nativeTrack;
}
public String id() {
return nativeId(nativeTrack);
}
public String kind() {
return nativeKind(nativeTrack);
}
public boolean enabled() {
return nativeEnabled(nativeTrack);
}
public boolean setEnabled(boolean enable) {
return nativeSetEnabled(nativeTrack, enable);
}
public State state() {
return nativeState(nativeTrack);
}
public void dispose() {
free(nativeTrack);
}
private static native String nativeId(long nativeTrack);
private static native String nativeKind(long nativeTrack);
private static native boolean nativeEnabled(long nativeTrack);
private static native boolean nativeSetEnabled(
long nativeTrack, boolean enabled);
private static native State nativeState(long nativeTrack);
private static native void free(long nativeTrack);
}

View File

@ -1,77 +0,0 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import java.util.HashMap;
import java.util.Map;
// Java-side of androidmetrics_jni.cc.
//
// Rtc histograms can be queried through the API, getAndReset().
// The returned map holds the name of a histogram and its samples.
//
// Example of |map| with one histogram:
// |name|: "WebRTC.Video.InputFramesPerSecond"
// |min|: 1
// |max|: 100
// |bucketCount|: 50
// |samples|: [30]:1
//
// Most histograms are not updated frequently (e.g. most video metrics are an
// average over the call and recorded when a stream is removed).
// The metrics can for example be retrieved when a peer connection is closed.
public class Metrics {
static {
System.loadLibrary("jingle_peerconnection_so");
}
public final Map<String, HistogramInfo> map =
new HashMap<String, HistogramInfo>(); // <name, HistogramInfo>
/**
* Class holding histogram information.
*/
public static class HistogramInfo {
public final int min;
public final int max;
public final int bucketCount;
public final Map<Integer, Integer> samples =
new HashMap<Integer, Integer>(); // <value, # of events>
public HistogramInfo(int min, int max, int bucketCount) {
this.min = min;
this.max = max;
this.bucketCount = bucketCount;
}
public void addSample(int value, int numEvents) {
samples.put(value, numEvents);
}
}
private void add(String name, HistogramInfo info) {
map.put(name, info);
}
// Enables gathering of metrics (which can be fetched with getAndReset()).
// Must be called before PeerConnectionFactory is created.
public static void enable() {
nativeEnable();
}
// Gets and clears native histograms.
public static Metrics getAndReset() {
return nativeGetAndReset();
}
private static native void nativeEnable();
private static native Metrics nativeGetAndReset();
}

View File

@ -1,252 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import static org.webrtc.NetworkMonitorAutoDetect.ConnectionType;
import static org.webrtc.NetworkMonitorAutoDetect.INVALID_NET_ID;
import static org.webrtc.NetworkMonitorAutoDetect.NetworkInformation;
import org.webrtc.Logging;
import android.content.Context;
import java.util.ArrayList;
import java.util.List;
/**
* Borrowed from Chromium's src/net/android/java/src/org/chromium/net/NetworkChangeNotifier.java
*
* Triggers updates to the underlying network state from OS networking events.
*
* WARNING: This class is not thread-safe.
*/
public class NetworkMonitor {
/**
* Alerted when the connection type of the network changes.
* The alert is fired on the UI thread.
*/
public interface NetworkObserver {
public void onConnectionTypeChanged(ConnectionType connectionType);
}
private static final String TAG = "NetworkMonitor";
private static NetworkMonitor instance;
private final Context applicationContext;
// Native observers of the connection type changes.
private final ArrayList<Long> nativeNetworkObservers;
// Java observers of the connection type changes.
private final ArrayList<NetworkObserver> networkObservers;
// Object that detects the connection type changes.
private NetworkMonitorAutoDetect autoDetector;
private ConnectionType currentConnectionType = ConnectionType.CONNECTION_UNKNOWN;
private NetworkMonitor(Context context) {
assertIsTrue(context != null);
applicationContext =
context.getApplicationContext() == null ? context : context.getApplicationContext();
nativeNetworkObservers = new ArrayList<Long>();
networkObservers = new ArrayList<NetworkObserver>();
}
/**
* Initializes the singleton once.
* Called from the native code.
*/
public static NetworkMonitor init(Context context) {
if (!isInitialized()) {
instance = new NetworkMonitor(context);
}
return instance;
}
public static boolean isInitialized() {
return instance != null;
}
/**
* Returns the singleton instance.
*/
public static NetworkMonitor getInstance() {
return instance;
}
/**
* Enables auto detection of the current network state based on notifications from the system.
* Note that passing true here requires the embedding app have the platform ACCESS_NETWORK_STATE
* permission.
*
* @param shouldAutoDetect true if the NetworkMonitor should listen for system changes in
* network connectivity.
*/
public static void setAutoDetectConnectivityState(boolean shouldAutoDetect) {
getInstance().setAutoDetectConnectivityStateInternal(shouldAutoDetect);
}
private static void assertIsTrue(boolean condition) {
if (!condition) {
throw new AssertionError("Expected to be true");
}
}
// Called by the native code.
private void startMonitoring(long nativeObserver) {
Logging.d(TAG, "Start monitoring from native observer " + nativeObserver);
nativeNetworkObservers.add(nativeObserver);
setAutoDetectConnectivityStateInternal(true);
}
// Called by the native code.
private void stopMonitoring(long nativeObserver) {
Logging.d(TAG, "Stop monitoring from native observer " + nativeObserver);
setAutoDetectConnectivityStateInternal(false);
nativeNetworkObservers.remove(nativeObserver);
}
private ConnectionType getCurrentConnectionType() {
return currentConnectionType;
}
private int getCurrentDefaultNetId() {
return autoDetector == null ? INVALID_NET_ID : autoDetector.getDefaultNetId();
}
private void destroyAutoDetector() {
if (autoDetector != null) {
autoDetector.destroy();
autoDetector = null;
}
}
private void setAutoDetectConnectivityStateInternal(boolean shouldAutoDetect) {
if (!shouldAutoDetect) {
destroyAutoDetector();
return;
}
if (autoDetector == null) {
autoDetector = new NetworkMonitorAutoDetect(
new NetworkMonitorAutoDetect.Observer() {
@Override
public void onConnectionTypeChanged(ConnectionType newConnectionType) {
updateCurrentConnectionType(newConnectionType);
}
@Override
public void onNetworkConnect(NetworkInformation networkInfo) {
notifyObserversOfNetworkConnect(networkInfo);
}
@Override
public void onNetworkDisconnect(int networkHandle) {
notifyObserversOfNetworkDisconnect(networkHandle);
}
},
applicationContext);
final NetworkMonitorAutoDetect.NetworkState networkState =
autoDetector.getCurrentNetworkState();
updateCurrentConnectionType(NetworkMonitorAutoDetect.getConnectionType(networkState));
updateActiveNetworkList();
}
}
private void updateCurrentConnectionType(ConnectionType newConnectionType) {
currentConnectionType = newConnectionType;
notifyObserversOfConnectionTypeChange(newConnectionType);
}
/**
* Alerts all observers of a connection change.
*/
private void notifyObserversOfConnectionTypeChange(ConnectionType newConnectionType) {
for (long nativeObserver : nativeNetworkObservers) {
nativeNotifyConnectionTypeChanged(nativeObserver);
}
for (NetworkObserver observer : networkObservers) {
observer.onConnectionTypeChanged(newConnectionType);
}
}
private void notifyObserversOfNetworkConnect(NetworkInformation networkInfo) {
for (long nativeObserver : nativeNetworkObservers) {
nativeNotifyOfNetworkConnect(nativeObserver, networkInfo);
}
}
private void notifyObserversOfNetworkDisconnect(int networkHandle) {
for (long nativeObserver : nativeNetworkObservers) {
nativeNotifyOfNetworkDisconnect(nativeObserver, networkHandle);
}
}
private void updateActiveNetworkList() {
List<NetworkInformation> networkInfoList = autoDetector.getActiveNetworkList();
if (networkInfoList == null || networkInfoList.size() == 0) {
return;
}
NetworkInformation[] networkInfos = new NetworkInformation[networkInfoList.size()];
networkInfos = networkInfoList.toArray(networkInfos);
for (long nativeObserver : nativeNetworkObservers) {
nativeNotifyOfActiveNetworkList(nativeObserver, networkInfos);
}
}
/**
* Adds an observer for any connection type changes.
*/
public static void addNetworkObserver(NetworkObserver observer) {
getInstance().addNetworkObserverInternal(observer);
}
private void addNetworkObserverInternal(NetworkObserver observer) {
networkObservers.add(observer);
}
/**
* Removes an observer for any connection type changes.
*/
public static void removeNetworkObserver(NetworkObserver observer) {
getInstance().removeNetworkObserverInternal(observer);
}
private void removeNetworkObserverInternal(NetworkObserver observer) {
networkObservers.remove(observer);
}
/**
* Checks if there currently is connectivity.
*/
public static boolean isOnline() {
ConnectionType connectionType = getInstance().getCurrentConnectionType();
return connectionType != ConnectionType.CONNECTION_NONE;
}
private native void nativeNotifyConnectionTypeChanged(long nativePtr);
private native void nativeNotifyOfNetworkConnect(long nativePtr, NetworkInformation networkInfo);
private native void nativeNotifyOfNetworkDisconnect(long nativePtr, int networkHandle);
private native void nativeNotifyOfActiveNetworkList(long nativePtr,
NetworkInformation[] networkInfos);
// For testing only.
static void resetInstanceForTests(Context context) {
instance = new NetworkMonitor(context);
}
// For testing only.
public static NetworkMonitorAutoDetect getAutoDetectorForTest() {
return getInstance().autoDetector;
}
}

View File

@ -1,622 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import static android.net.NetworkCapabilities.NET_CAPABILITY_INTERNET;
import static android.net.NetworkCapabilities.TRANSPORT_CELLULAR;
import org.webrtc.Logging;
import android.Manifest.permission;
import android.annotation.SuppressLint;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.pm.PackageManager;
import android.net.ConnectivityManager;
import android.net.ConnectivityManager.NetworkCallback;
import android.net.LinkAddress;
import android.net.LinkProperties;
import android.net.Network;
import android.net.NetworkCapabilities;
import android.net.NetworkInfo;
import android.net.NetworkRequest;
import android.net.wifi.WifiInfo;
import android.net.wifi.WifiManager;
import android.os.Build;
import android.telephony.TelephonyManager;
import java.util.ArrayList;
import java.util.List;
/**
* Borrowed from Chromium's
* src/net/android/java/src/org/chromium/net/NetworkChangeNotifierAutoDetect.java
*
* Used by the NetworkMonitor to listen to platform changes in connectivity.
* Note that use of this class requires that the app have the platform
* ACCESS_NETWORK_STATE permission.
*/
public class NetworkMonitorAutoDetect extends BroadcastReceiver {
public static enum ConnectionType {
CONNECTION_UNKNOWN,
CONNECTION_ETHERNET,
CONNECTION_WIFI,
CONNECTION_4G,
CONNECTION_3G,
CONNECTION_2G,
CONNECTION_UNKNOWN_CELLULAR,
CONNECTION_BLUETOOTH,
CONNECTION_NONE
}
public static class IPAddress {
public final byte[] address;
public IPAddress (byte[] address) {
this.address = address;
}
}
/** Java version of NetworkMonitor.NetworkInformation */
public static class NetworkInformation{
public final String name;
public final ConnectionType type;
public final int handle;
public final IPAddress[] ipAddresses;
public NetworkInformation(String name, ConnectionType type, int handle,
IPAddress[] addresses) {
this.name = name;
this.type = type;
this.handle = handle;
this.ipAddresses = addresses;
}
};
static class NetworkState {
private final boolean connected;
// Defined from ConnectivityManager.TYPE_XXX for non-mobile; for mobile, it is
// further divided into 2G, 3G, or 4G from the subtype.
private final int type;
// Defined from NetworkInfo.subtype, which is one of the TelephonyManager.NETWORK_TYPE_XXXs.
// Will be useful to find the maximum bandwidth.
private final int subtype;
public NetworkState(boolean connected, int type, int subtype) {
this.connected = connected;
this.type = type;
this.subtype = subtype;
}
public boolean isConnected() {
return connected;
}
public int getNetworkType() {
return type;
}
public int getNetworkSubType() {
return subtype;
}
}
/**
* The methods in this class get called when the network changes if the callback
* is registered with a proper network request. It is only available in Android Lollipop
* and above.
*/
@SuppressLint("NewApi")
private class SimpleNetworkCallback extends NetworkCallback {
@Override
public void onAvailable(Network network) {
Logging.d(TAG, "Network becomes available: " + network.toString());
onNetworkChanged(network);
}
@Override
public void onCapabilitiesChanged(
Network network, NetworkCapabilities networkCapabilities) {
// A capabilities change may indicate the ConnectionType has changed,
// so forward the new NetworkInformation along to the observer.
Logging.d(TAG, "capabilities changed: " + networkCapabilities.toString());
onNetworkChanged(network);
}
@Override
public void onLinkPropertiesChanged(Network network, LinkProperties linkProperties) {
// A link property change may indicate the IP address changes.
// so forward the new NetworkInformation to the observer.
Logging.d(TAG, "link properties changed: " + linkProperties.toString());
onNetworkChanged(network);
}
@Override
public void onLosing(Network network, int maxMsToLive) {
// Tell the network is going to lose in MaxMsToLive milliseconds.
// We may use this signal later.
Logging.d(TAG,
"Network " + network.toString() + " is about to lose in " + maxMsToLive + "ms");
}
@Override
public void onLost(Network network) {
Logging.d(TAG, "Network " + network.toString() + " is disconnected");
observer.onNetworkDisconnect(networkToNetId(network));
}
private void onNetworkChanged(Network network) {
NetworkInformation networkInformation = connectivityManagerDelegate.networkToInfo(network);
if (networkInformation != null) {
observer.onNetworkConnect(networkInformation);
}
}
}
/** Queries the ConnectivityManager for information about the current connection. */
static class ConnectivityManagerDelegate {
/**
* Note: In some rare Android systems connectivityManager is null. We handle that
* gracefully below.
*/
private final ConnectivityManager connectivityManager;
ConnectivityManagerDelegate(Context context) {
connectivityManager =
(ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE);
}
// For testing.
ConnectivityManagerDelegate() {
// All the methods below should be overridden.
connectivityManager = null;
}
/**
* Returns connection type and status information about the current
* default network.
*/
NetworkState getNetworkState() {
if (connectivityManager == null) {
return new NetworkState(false, -1, -1);
}
return getNetworkState(connectivityManager.getActiveNetworkInfo());
}
/**
* Returns connection type and status information about |network|.
* Only callable on Lollipop and newer releases.
*/
@SuppressLint("NewApi")
NetworkState getNetworkState(Network network) {
if (connectivityManager == null) {
return new NetworkState(false, -1, -1);
}
return getNetworkState(connectivityManager.getNetworkInfo(network));
}
/**
* Returns connection type and status information gleaned from networkInfo.
*/
NetworkState getNetworkState(NetworkInfo networkInfo) {
if (networkInfo == null || !networkInfo.isConnected()) {
return new NetworkState(false, -1, -1);
}
return new NetworkState(true, networkInfo.getType(), networkInfo.getSubtype());
}
/**
* Returns all connected networks.
* Only callable on Lollipop and newer releases.
*/
@SuppressLint("NewApi")
Network[] getAllNetworks() {
if (connectivityManager == null) {
return new Network[0];
}
return connectivityManager.getAllNetworks();
}
List<NetworkInformation> getActiveNetworkList() {
if (!supportNetworkCallback()) {
return null;
}
ArrayList<NetworkInformation> netInfoList = new ArrayList<NetworkInformation>();
for (Network network : getAllNetworks()) {
NetworkInformation info = networkToInfo(network);
if (info != null) {
netInfoList.add(info);
}
}
return netInfoList;
}
/**
* Returns the NetID of the current default network. Returns
* INVALID_NET_ID if no current default network connected.
* Only callable on Lollipop and newer releases.
*/
@SuppressLint("NewApi")
int getDefaultNetId() {
if (!supportNetworkCallback()) {
return INVALID_NET_ID;
}
// Android Lollipop had no API to get the default network; only an
// API to return the NetworkInfo for the default network. To
// determine the default network one can find the network with
// type matching that of the default network.
final NetworkInfo defaultNetworkInfo = connectivityManager.getActiveNetworkInfo();
if (defaultNetworkInfo == null) {
return INVALID_NET_ID;
}
final Network[] networks = getAllNetworks();
int defaultNetId = INVALID_NET_ID;
for (Network network : networks) {
if (!hasInternetCapability(network)) {
continue;
}
final NetworkInfo networkInfo = connectivityManager.getNetworkInfo(network);
if (networkInfo != null && networkInfo.getType() == defaultNetworkInfo.getType()) {
// There should not be multiple connected networks of the
// same type. At least as of Android Marshmallow this is
// not supported. If this becomes supported this assertion
// may trigger. At that point we could consider using
// ConnectivityManager.getDefaultNetwork() though this
// may give confusing results with VPNs and is only
// available with Android Marshmallow.
assert defaultNetId == INVALID_NET_ID;
defaultNetId = networkToNetId(network);
}
}
return defaultNetId;
}
@SuppressLint("NewApi")
private NetworkInformation networkToInfo(Network network) {
LinkProperties linkProperties = connectivityManager.getLinkProperties(network);
// getLinkProperties will return null if the network is unknown.
if (linkProperties == null) {
Logging.w(TAG, "Detected unknown network: " + network.toString());
return null;
}
if (linkProperties.getInterfaceName() == null) {
Logging.w(TAG, "Null interface name for network " + network.toString());
return null;
}
NetworkState networkState = getNetworkState(network);
ConnectionType connectionType = getConnectionType(networkState);
if (connectionType == ConnectionType.CONNECTION_NONE) {
// This may not be an error. The OS may signal a network event with connection type
// NONE when the network disconnects.
Logging.d(TAG, "Network " + network.toString() + " is disconnected");
return null;
}
// Some android device may return a CONNECTION_UNKNOWN_CELLULAR or CONNECTION_UNKNOWN type,
// which appears to be usable. Just log them here.
if (connectionType == ConnectionType.CONNECTION_UNKNOWN
|| connectionType == ConnectionType.CONNECTION_UNKNOWN_CELLULAR) {
Logging.d(TAG, "Network " + network.toString() + " connection type is " + connectionType
+ " because it has type " + networkState.getNetworkType()
+ " and subtype " + networkState.getNetworkSubType());
}
NetworkInformation networkInformation = new NetworkInformation(
linkProperties.getInterfaceName(),
connectionType,
networkToNetId(network),
getIPAddresses(linkProperties));
return networkInformation;
}
/**
* Returns true if {@code network} can provide Internet access. Can be used to
* ignore specialized networks (e.g. IMS, FOTA).
*/
@SuppressLint("NewApi")
boolean hasInternetCapability(Network network) {
if (connectivityManager == null) {
return false;
}
final NetworkCapabilities capabilities =
connectivityManager.getNetworkCapabilities(network);
return capabilities != null && capabilities.hasCapability(NET_CAPABILITY_INTERNET);
}
/** Only callable on Lollipop and newer releases. */
@SuppressLint("NewApi")
public void registerNetworkCallback(NetworkCallback networkCallback) {
connectivityManager.registerNetworkCallback(
new NetworkRequest.Builder().addCapability(NET_CAPABILITY_INTERNET).build(),
networkCallback);
}
/** Only callable on Lollipop and newer releases. */
@SuppressLint("NewApi")
public void requestMobileNetwork(NetworkCallback networkCallback) {
NetworkRequest.Builder builder = new NetworkRequest.Builder();
builder.addCapability(NET_CAPABILITY_INTERNET).addTransportType(TRANSPORT_CELLULAR);
connectivityManager.requestNetwork(builder.build(), networkCallback);
}
@SuppressLint("NewApi")
IPAddress[] getIPAddresses(LinkProperties linkProperties) {
IPAddress[] ipAddresses = new IPAddress[linkProperties.getLinkAddresses().size()];
int i = 0;
for (LinkAddress linkAddress : linkProperties.getLinkAddresses()) {
ipAddresses[i] = new IPAddress(linkAddress.getAddress().getAddress());
++i;
}
return ipAddresses;
}
@SuppressLint("NewApi")
public void releaseCallback(NetworkCallback networkCallback) {
if (supportNetworkCallback()) {
Logging.d(TAG, "Unregister network callback");
connectivityManager.unregisterNetworkCallback(networkCallback);
}
}
public boolean supportNetworkCallback() {
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP && connectivityManager != null;
}
}
/** Queries the WifiManager for SSID of the current Wifi connection. */
static class WifiManagerDelegate {
private final Context context;
WifiManagerDelegate(Context context) {
this.context = context;
}
// For testing.
WifiManagerDelegate() {
// All the methods below should be overridden.
context = null;
}
String getWifiSSID() {
final Intent intent = context.registerReceiver(null,
new IntentFilter(WifiManager.NETWORK_STATE_CHANGED_ACTION));
if (intent != null) {
final WifiInfo wifiInfo = intent.getParcelableExtra(WifiManager.EXTRA_WIFI_INFO);
if (wifiInfo != null) {
final String ssid = wifiInfo.getSSID();
if (ssid != null) {
return ssid;
}
}
}
return "";
}
}
static final int INVALID_NET_ID = -1;
private static final String TAG = "NetworkMonitorAutoDetect";
// Observer for the connection type change.
private final Observer observer;
private final IntentFilter intentFilter;
private final Context context;
// Used to request mobile network. It does not do anything except for keeping
// the callback for releasing the request.
private final NetworkCallback mobileNetworkCallback;
// Used to receive updates on all networks.
private final NetworkCallback allNetworkCallback;
// connectivityManagerDelegate and wifiManagerDelegate are only non-final for testing.
private ConnectivityManagerDelegate connectivityManagerDelegate;
private WifiManagerDelegate wifiManagerDelegate;
private boolean isRegistered;
private ConnectionType connectionType;
private String wifiSSID;
/**
* Observer interface by which observer is notified of network changes.
*/
public static interface Observer {
/**
* Called when default network changes.
*/
public void onConnectionTypeChanged(ConnectionType newConnectionType);
public void onNetworkConnect(NetworkInformation networkInfo);
public void onNetworkDisconnect(int networkHandle);
}
/**
* Constructs a NetworkMonitorAutoDetect. Should only be called on UI thread.
*/
@SuppressLint("NewApi")
public NetworkMonitorAutoDetect(Observer observer, Context context) {
this.observer = observer;
this.context = context;
connectivityManagerDelegate = new ConnectivityManagerDelegate(context);
wifiManagerDelegate = new WifiManagerDelegate(context);
final NetworkState networkState = connectivityManagerDelegate.getNetworkState();
connectionType = getConnectionType(networkState);
wifiSSID = getWifiSSID(networkState);
intentFilter = new IntentFilter(ConnectivityManager.CONNECTIVITY_ACTION);
registerReceiver();
if (connectivityManagerDelegate.supportNetworkCallback()) {
// On Android 6.0.0, the WRITE_SETTINGS permission is necessary for
// requestNetwork, so it will fail. This was fixed in Android 6.0.1.
NetworkCallback tempNetworkCallback = new NetworkCallback();
try {
connectivityManagerDelegate.requestMobileNetwork(tempNetworkCallback);
} catch (java.lang.SecurityException e) {
Logging.w(TAG, "Unable to obtain permission to request a cellular network.");
tempNetworkCallback = null;
}
mobileNetworkCallback = tempNetworkCallback;
allNetworkCallback = new SimpleNetworkCallback();
connectivityManagerDelegate.registerNetworkCallback(allNetworkCallback);
} else {
mobileNetworkCallback = null;
allNetworkCallback = null;
}
}
/**
* Allows overriding the ConnectivityManagerDelegate for tests.
*/
void setConnectivityManagerDelegateForTests(ConnectivityManagerDelegate delegate) {
connectivityManagerDelegate = delegate;
}
/**
* Allows overriding the WifiManagerDelegate for tests.
*/
void setWifiManagerDelegateForTests(WifiManagerDelegate delegate) {
wifiManagerDelegate = delegate;
}
/**
* Returns whether the object has registered to receive network connectivity intents.
* Visible for testing.
*/
boolean isReceiverRegisteredForTesting() {
return isRegistered;
}
List<NetworkInformation> getActiveNetworkList() {
return connectivityManagerDelegate.getActiveNetworkList();
}
public void destroy() {
if (allNetworkCallback != null) {
connectivityManagerDelegate.releaseCallback(allNetworkCallback);
}
if (mobileNetworkCallback != null) {
connectivityManagerDelegate.releaseCallback(mobileNetworkCallback);
}
unregisterReceiver();
}
/**
* Registers a BroadcastReceiver in the given context.
*/
private void registerReceiver() {
if (isRegistered) return;
isRegistered = true;
context.registerReceiver(this, intentFilter);
}
/**
* Unregisters the BroadcastReceiver in the given context.
*/
private void unregisterReceiver() {
if (!isRegistered) return;
isRegistered = false;
context.unregisterReceiver(this);
}
public NetworkState getCurrentNetworkState() {
return connectivityManagerDelegate.getNetworkState();
}
/**
* Returns NetID of device's current default connected network used for
* communication.
* Only implemented on Lollipop and newer releases, returns INVALID_NET_ID
* when not implemented.
*/
public int getDefaultNetId() {
return connectivityManagerDelegate.getDefaultNetId();
}
public static ConnectionType getConnectionType(NetworkState networkState) {
if (!networkState.isConnected()) {
return ConnectionType.CONNECTION_NONE;
}
switch (networkState.getNetworkType()) {
case ConnectivityManager.TYPE_ETHERNET:
return ConnectionType.CONNECTION_ETHERNET;
case ConnectivityManager.TYPE_WIFI:
return ConnectionType.CONNECTION_WIFI;
case ConnectivityManager.TYPE_WIMAX:
return ConnectionType.CONNECTION_4G;
case ConnectivityManager.TYPE_BLUETOOTH:
return ConnectionType.CONNECTION_BLUETOOTH;
case ConnectivityManager.TYPE_MOBILE:
// Use information from TelephonyManager to classify the connection.
switch (networkState.getNetworkSubType()) {
case TelephonyManager.NETWORK_TYPE_GPRS:
case TelephonyManager.NETWORK_TYPE_EDGE:
case TelephonyManager.NETWORK_TYPE_CDMA:
case TelephonyManager.NETWORK_TYPE_1xRTT:
case TelephonyManager.NETWORK_TYPE_IDEN:
return ConnectionType.CONNECTION_2G;
case TelephonyManager.NETWORK_TYPE_UMTS:
case TelephonyManager.NETWORK_TYPE_EVDO_0:
case TelephonyManager.NETWORK_TYPE_EVDO_A:
case TelephonyManager.NETWORK_TYPE_HSDPA:
case TelephonyManager.NETWORK_TYPE_HSUPA:
case TelephonyManager.NETWORK_TYPE_HSPA:
case TelephonyManager.NETWORK_TYPE_EVDO_B:
case TelephonyManager.NETWORK_TYPE_EHRPD:
case TelephonyManager.NETWORK_TYPE_HSPAP:
return ConnectionType.CONNECTION_3G;
case TelephonyManager.NETWORK_TYPE_LTE:
return ConnectionType.CONNECTION_4G;
default:
return ConnectionType.CONNECTION_UNKNOWN_CELLULAR;
}
default:
return ConnectionType.CONNECTION_UNKNOWN;
}
}
private String getWifiSSID(NetworkState networkState) {
if (getConnectionType(networkState) != ConnectionType.CONNECTION_WIFI) return "";
return wifiManagerDelegate.getWifiSSID();
}
// BroadcastReceiver
@Override
public void onReceive(Context context, Intent intent) {
final NetworkState networkState = getCurrentNetworkState();
if (ConnectivityManager.CONNECTIVITY_ACTION.equals(intent.getAction())) {
connectionTypeChanged(networkState);
}
}
private void connectionTypeChanged(NetworkState networkState) {
ConnectionType newConnectionType = getConnectionType(networkState);
String newWifiSSID = getWifiSSID(networkState);
if (newConnectionType == connectionType && newWifiSSID.equals(wifiSSID)) return;
connectionType = newConnectionType;
wifiSSID = newWifiSSID;
Logging.d(TAG, "Network connectivity changed, type is: " + connectionType);
observer.onConnectionTypeChanged(newConnectionType);
}
/**
* Extracts NetID of network. Only available on Lollipop and newer releases.
*/
@SuppressLint("NewApi")
private static int networkToNetId(Network network) {
// NOTE(pauljensen): This depends on Android framework implementation details.
// Fortunately this functionality is unlikely to ever change.
// TODO(honghaiz): When we update to Android M SDK, use Network.getNetworkHandle().
return Integer.parseInt(network.toString());
}
}

View File

@ -1 +0,0 @@
magjed@webrtc.org

View File

@ -1,306 +0,0 @@
/*
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
/**
* Java-land version of the PeerConnection APIs; wraps the C++ API
* http://www.webrtc.org/reference/native-apis, which in turn is inspired by the
* JS APIs: http://dev.w3.org/2011/webrtc/editor/webrtc.html and
* http://www.w3.org/TR/mediacapture-streams/
*/
public class PeerConnection {
static {
System.loadLibrary("jingle_peerconnection_so");
}
/** Tracks PeerConnectionInterface::IceGatheringState */
public enum IceGatheringState { NEW, GATHERING, COMPLETE };
/** Tracks PeerConnectionInterface::IceConnectionState */
public enum IceConnectionState {
NEW, CHECKING, CONNECTED, COMPLETED, FAILED, DISCONNECTED, CLOSED
};
/** Tracks PeerConnectionInterface::SignalingState */
public enum SignalingState {
STABLE, HAVE_LOCAL_OFFER, HAVE_LOCAL_PRANSWER, HAVE_REMOTE_OFFER,
HAVE_REMOTE_PRANSWER, CLOSED
};
/** Java version of PeerConnectionObserver. */
public static interface Observer {
/** Triggered when the SignalingState changes. */
public void onSignalingChange(SignalingState newState);
/** Triggered when the IceConnectionState changes. */
public void onIceConnectionChange(IceConnectionState newState);
/** Triggered when the ICE connection receiving status changes. */
public void onIceConnectionReceivingChange(boolean receiving);
/** Triggered when the IceGatheringState changes. */
public void onIceGatheringChange(IceGatheringState newState);
/** Triggered when a new ICE candidate has been found. */
public void onIceCandidate(IceCandidate candidate);
/** Triggered when some ICE candidates have been removed. */
public void onIceCandidatesRemoved(IceCandidate[] candidates);
/** Triggered when media is received on a new stream from remote peer. */
public void onAddStream(MediaStream stream);
/** Triggered when a remote peer close a stream. */
public void onRemoveStream(MediaStream stream);
/** Triggered when a remote peer opens a DataChannel. */
public void onDataChannel(DataChannel dataChannel);
/** Triggered when renegotiation is necessary. */
public void onRenegotiationNeeded();
}
/** Java version of PeerConnectionInterface.IceServer. */
public static class IceServer {
public final String uri;
public final String username;
public final String password;
/** Convenience constructor for STUN servers. */
public IceServer(String uri) {
this(uri, "", "");
}
public IceServer(String uri, String username, String password) {
this.uri = uri;
this.username = username;
this.password = password;
}
public String toString() {
return uri + "[" + username + ":" + password + "]";
}
}
/** Java version of PeerConnectionInterface.IceTransportsType */
public enum IceTransportsType {
NONE, RELAY, NOHOST, ALL
};
/** Java version of PeerConnectionInterface.BundlePolicy */
public enum BundlePolicy {
BALANCED, MAXBUNDLE, MAXCOMPAT
};
/** Java version of PeerConnectionInterface.RtcpMuxPolicy */
public enum RtcpMuxPolicy {
NEGOTIATE, REQUIRE
};
/** Java version of PeerConnectionInterface.TcpCandidatePolicy */
public enum TcpCandidatePolicy {
ENABLED, DISABLED
};
/** Java version of PeerConnectionInterface.CandidateNetworkPolicy */
public enum CandidateNetworkPolicy {
ALL, LOW_COST
};
/** Java version of rtc::KeyType */
public enum KeyType {
RSA, ECDSA
}
/** Java version of PeerConnectionInterface.ContinualGatheringPolicy */
public enum ContinualGatheringPolicy {
GATHER_ONCE, GATHER_CONTINUALLY
}
/** Java version of PeerConnectionInterface.RTCConfiguration */
public static class RTCConfiguration {
public IceTransportsType iceTransportsType;
public List<IceServer> iceServers;
public BundlePolicy bundlePolicy;
public RtcpMuxPolicy rtcpMuxPolicy;
public TcpCandidatePolicy tcpCandidatePolicy;
public CandidateNetworkPolicy candidateNetworkPolicy;
public int audioJitterBufferMaxPackets;
public boolean audioJitterBufferFastAccelerate;
public int iceConnectionReceivingTimeout;
public int iceBackupCandidatePairPingInterval;
public KeyType keyType;
public ContinualGatheringPolicy continualGatheringPolicy;
public int iceCandidatePoolSize;
public RTCConfiguration(List<IceServer> iceServers) {
iceTransportsType = IceTransportsType.ALL;
bundlePolicy = BundlePolicy.BALANCED;
rtcpMuxPolicy = RtcpMuxPolicy.NEGOTIATE;
tcpCandidatePolicy = TcpCandidatePolicy.ENABLED;
candidateNetworkPolicy = candidateNetworkPolicy.ALL;
this.iceServers = iceServers;
audioJitterBufferMaxPackets = 50;
audioJitterBufferFastAccelerate = false;
iceConnectionReceivingTimeout = -1;
iceBackupCandidatePairPingInterval = -1;
keyType = KeyType.ECDSA;
continualGatheringPolicy = ContinualGatheringPolicy.GATHER_ONCE;
iceCandidatePoolSize = 0;
}
};
private final List<MediaStream> localStreams;
private final long nativePeerConnection;
private final long nativeObserver;
private List<RtpSender> senders;
private List<RtpReceiver> receivers;
PeerConnection(long nativePeerConnection, long nativeObserver) {
this.nativePeerConnection = nativePeerConnection;
this.nativeObserver = nativeObserver;
localStreams = new LinkedList<MediaStream>();
senders = new LinkedList<RtpSender>();
receivers = new LinkedList<RtpReceiver>();
}
// JsepInterface.
public native SessionDescription getLocalDescription();
public native SessionDescription getRemoteDescription();
public native DataChannel createDataChannel(
String label, DataChannel.Init init);
public native void createOffer(
SdpObserver observer, MediaConstraints constraints);
public native void createAnswer(
SdpObserver observer, MediaConstraints constraints);
public native void setLocalDescription(
SdpObserver observer, SessionDescription sdp);
public native void setRemoteDescription(
SdpObserver observer, SessionDescription sdp);
public native boolean setConfiguration(RTCConfiguration config);
public boolean addIceCandidate(IceCandidate candidate) {
return nativeAddIceCandidate(
candidate.sdpMid, candidate.sdpMLineIndex, candidate.sdp);
}
public boolean removeIceCandidates(final IceCandidate[] candidates) {
return nativeRemoveIceCandidates(candidates);
}
public boolean addStream(MediaStream stream) {
boolean ret = nativeAddLocalStream(stream.nativeStream);
if (!ret) {
return false;
}
localStreams.add(stream);
return true;
}
public void removeStream(MediaStream stream) {
nativeRemoveLocalStream(stream.nativeStream);
localStreams.remove(stream);
}
public RtpSender createSender(String kind, String stream_id) {
RtpSender new_sender = nativeCreateSender(kind, stream_id);
if (new_sender != null) {
senders.add(new_sender);
}
return new_sender;
}
// Note that calling getSenders will dispose of the senders previously
// returned (and same goes for getReceivers).
public List<RtpSender> getSenders() {
for (RtpSender sender : senders) {
sender.dispose();
}
senders = nativeGetSenders();
return Collections.unmodifiableList(senders);
}
public List<RtpReceiver> getReceivers() {
for (RtpReceiver receiver : receivers) {
receiver.dispose();
}
receivers = nativeGetReceivers();
return Collections.unmodifiableList(receivers);
}
public boolean getStats(StatsObserver observer, MediaStreamTrack track) {
return nativeGetStats(observer, (track == null) ? 0 : track.nativeTrack);
}
// TODO(fischman): add support for DTMF-related methods once that API
// stabilizes.
public native SignalingState signalingState();
public native IceConnectionState iceConnectionState();
public native IceGatheringState iceGatheringState();
public native void close();
public void dispose() {
close();
for (MediaStream stream : localStreams) {
nativeRemoveLocalStream(stream.nativeStream);
stream.dispose();
}
localStreams.clear();
for (RtpSender sender : senders) {
sender.dispose();
}
senders.clear();
for (RtpReceiver receiver : receivers) {
receiver.dispose();
}
receivers.clear();
freePeerConnection(nativePeerConnection);
freeObserver(nativeObserver);
}
private static native void freePeerConnection(long nativePeerConnection);
private static native void freeObserver(long nativeObserver);
private native boolean nativeAddIceCandidate(
String sdpMid, int sdpMLineIndex, String iceCandidateSdp);
private native boolean nativeRemoveIceCandidates(final IceCandidate[] candidates);
private native boolean nativeAddLocalStream(long nativeStream);
private native void nativeRemoveLocalStream(long nativeStream);
private native boolean nativeGetStats(
StatsObserver observer, long nativeTrack);
private native RtpSender nativeCreateSender(String kind, String stream_id);
private native List<RtpSender> nativeGetSenders();
private native List<RtpReceiver> nativeGetReceivers();
}

View File

@ -1,293 +0,0 @@
/*
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import java.util.List;
/**
* Java wrapper for a C++ PeerConnectionFactoryInterface. Main entry point to
* the PeerConnection API for clients.
*/
public class PeerConnectionFactory {
static {
System.loadLibrary("jingle_peerconnection_so");
}
private static final String TAG = "PeerConnectionFactory";
private final long nativeFactory;
private static Thread networkThread;
private static Thread workerThread;
private static Thread signalingThread;
private EglBase localEglbase;
private EglBase remoteEglbase;
public static class Options {
// Keep in sync with webrtc/base/network.h!
static final int ADAPTER_TYPE_UNKNOWN = 0;
static final int ADAPTER_TYPE_ETHERNET = 1 << 0;
static final int ADAPTER_TYPE_WIFI = 1 << 1;
static final int ADAPTER_TYPE_CELLULAR = 1 << 2;
static final int ADAPTER_TYPE_VPN = 1 << 3;
static final int ADAPTER_TYPE_LOOPBACK = 1 << 4;
public int networkIgnoreMask;
public boolean disableEncryption;
public boolean disableNetworkMonitor;
}
// |context| is an android.content.Context object, but we keep it untyped here
// to allow building on non-Android platforms.
// Callers may specify either |initializeAudio| or |initializeVideo| as false
// to skip initializing the respective engine (and avoid the need for the
// respective permissions).
// |renderEGLContext| can be provided to suport HW video decoding to
// texture and will be used to create a shared EGL context on video
// decoding thread.
public static native boolean initializeAndroidGlobals(
Object context, boolean initializeAudio, boolean initializeVideo,
boolean videoHwAcceleration);
// Field trial initialization. Must be called before PeerConnectionFactory
// is created.
public static native void initializeFieldTrials(String fieldTrialsInitString);
// Internal tracing initialization. Must be called before PeerConnectionFactory is created to
// prevent racing with tracing code.
public static native void initializeInternalTracer();
// Internal tracing shutdown, called to prevent resource leaks. Must be called after
// PeerConnectionFactory is gone to prevent races with code performing tracing.
public static native void shutdownInternalTracer();
// Start/stop internal capturing of internal tracing.
public static native boolean startInternalTracingCapture(String tracing_filename);
public static native void stopInternalTracingCapture();
@Deprecated
public PeerConnectionFactory() {
this(null);
}
public PeerConnectionFactory(Options options) {
nativeFactory = nativeCreatePeerConnectionFactory(options);
if (nativeFactory == 0) {
throw new RuntimeException("Failed to initialize PeerConnectionFactory!");
}
}
public PeerConnection createPeerConnection(
PeerConnection.RTCConfiguration rtcConfig,
MediaConstraints constraints,
PeerConnection.Observer observer) {
long nativeObserver = nativeCreateObserver(observer);
if (nativeObserver == 0) {
return null;
}
long nativePeerConnection = nativeCreatePeerConnection(
nativeFactory, rtcConfig, constraints, nativeObserver);
if (nativePeerConnection == 0) {
return null;
}
return new PeerConnection(nativePeerConnection, nativeObserver);
}
public PeerConnection createPeerConnection(
List<PeerConnection.IceServer> iceServers,
MediaConstraints constraints,
PeerConnection.Observer observer) {
PeerConnection.RTCConfiguration rtcConfig =
new PeerConnection.RTCConfiguration(iceServers);
return createPeerConnection(rtcConfig, constraints, observer);
}
public MediaStream createLocalMediaStream(String label) {
return new MediaStream(
nativeCreateLocalMediaStream(nativeFactory, label));
}
// The VideoSource takes ownership of |capturer|, so capturer.release() should not be called
// manually after this.
public VideoSource createVideoSource(
VideoCapturer capturer, MediaConstraints constraints) {
final EglBase.Context eglContext =
localEglbase == null ? null : localEglbase.getEglBaseContext();
return new VideoSource(nativeCreateVideoSource(nativeFactory,
eglContext, capturer, constraints));
}
public VideoTrack createVideoTrack(String id, VideoSource source) {
return new VideoTrack(nativeCreateVideoTrack(
nativeFactory, id, source.nativeSource));
}
public AudioSource createAudioSource(MediaConstraints constraints) {
return new AudioSource(nativeCreateAudioSource(nativeFactory, constraints));
}
public AudioTrack createAudioTrack(String id, AudioSource source) {
return new AudioTrack(nativeCreateAudioTrack(
nativeFactory, id, source.nativeSource));
}
// Starts recording an AEC dump. Ownership of the file is transfered to the
// native code. If an AEC dump is already in progress, it will be stopped and
// a new one will start using the provided file.
public boolean startAecDump(int file_descriptor, int filesize_limit_bytes) {
return nativeStartAecDump(nativeFactory, file_descriptor, filesize_limit_bytes);
}
// Stops recording an AEC dump. If no AEC dump is currently being recorded,
// this call will have no effect.
public void stopAecDump() {
nativeStopAecDump(nativeFactory);
}
// Starts recording an RTC event log. Ownership of the file is transfered to
// the native code. If an RTC event log is already being recorded, it will be
// stopped and a new one will start using the provided file.
public boolean startRtcEventLog(int file_descriptor) {
return startRtcEventLog(file_descriptor, -1);
}
// Same as above, but allows setting an upper limit to the size of the
// generated logfile.
public boolean startRtcEventLog(int file_descriptor,
int filesize_limit_bytes) {
return nativeStartRtcEventLog(nativeFactory,
file_descriptor,
filesize_limit_bytes);
}
// Stops recording an RTC event log. If no RTC event log is currently being
// recorded, this call will have no effect.
public void stopRtcEventLog() {
nativeStopRtcEventLog(nativeFactory);
}
@Deprecated
public void setOptions(Options options) {
nativeSetOptions(nativeFactory, options);
}
/** Set the EGL context used by HW Video encoding and decoding.
*
* @param localEglContext Must be the same as used by VideoCapturerAndroid and any local video
* renderer.
* @param remoteEglContext Must be the same as used by any remote video renderer.
*/
public void setVideoHwAccelerationOptions(EglBase.Context localEglContext,
EglBase.Context remoteEglContext) {
if (localEglbase != null) {
Logging.w(TAG, "Egl context already set.");
localEglbase.release();
}
if (remoteEglbase != null) {
Logging.w(TAG, "Egl context already set.");
remoteEglbase.release();
}
localEglbase = EglBase.create(localEglContext);
remoteEglbase = EglBase.create(remoteEglContext);
nativeSetVideoHwAccelerationOptions(nativeFactory, localEglbase.getEglBaseContext(),
remoteEglbase.getEglBaseContext());
}
public void dispose() {
nativeFreeFactory(nativeFactory);
networkThread = null;
workerThread = null;
signalingThread = null;
if (localEglbase != null)
localEglbase.release();
if (remoteEglbase != null)
remoteEglbase.release();
}
public void threadsCallbacks() {
nativeThreadsCallbacks(nativeFactory);
}
private static void printStackTrace(Thread thread, String threadName) {
if (thread != null) {
StackTraceElement[] stackTraces = thread.getStackTrace();
if (stackTraces.length > 0) {
Logging.d(TAG, threadName + " stacks trace:");
for (StackTraceElement stackTrace : stackTraces) {
Logging.d(TAG, stackTrace.toString());
}
}
}
}
public static void printStackTraces() {
printStackTrace(networkThread, "Network thread");
printStackTrace(workerThread, "Worker thread");
printStackTrace(signalingThread, "Signaling thread");
}
private static void onNetworkThreadReady() {
networkThread = Thread.currentThread();
Logging.d(TAG, "onNetworkThreadReady");
}
private static void onWorkerThreadReady() {
workerThread = Thread.currentThread();
Logging.d(TAG, "onWorkerThreadReady");
}
private static void onSignalingThreadReady() {
signalingThread = Thread.currentThread();
Logging.d(TAG, "onSignalingThreadReady");
}
private static native long nativeCreatePeerConnectionFactory(Options options);
private static native long nativeCreateObserver(
PeerConnection.Observer observer);
private static native long nativeCreatePeerConnection(
long nativeFactory, PeerConnection.RTCConfiguration rtcConfig,
MediaConstraints constraints, long nativeObserver);
private static native long nativeCreateLocalMediaStream(
long nativeFactory, String label);
private static native long nativeCreateVideoSource(
long nativeFactory, EglBase.Context eglContext, VideoCapturer videoCapturer,
MediaConstraints constraints);
private static native long nativeCreateVideoTrack(
long nativeFactory, String id, long nativeVideoSource);
private static native long nativeCreateAudioSource(
long nativeFactory, MediaConstraints constraints);
private static native long nativeCreateAudioTrack(
long nativeFactory, String id, long nativeSource);
private static native boolean nativeStartAecDump(
long nativeFactory, int file_descriptor, int filesize_limit_bytes);
private static native void nativeStopAecDump(long nativeFactory);
private static native boolean nativeStartRtcEventLog(long nativeFactory,
int file_descriptor,
int filesize_limit_bytes);
private static native void nativeStopRtcEventLog(long nativeFactory);
@Deprecated
public native void nativeSetOptions(long nativeFactory, Options options);
private static native void nativeSetVideoHwAccelerationOptions(
long nativeFactory, Object localEGLContext, Object remoteEGLContext);
private static native void nativeThreadsCallbacks(long nativeFactory);
private static native void nativeFreeFactory(long nativeFactory);
}

View File

@ -1,246 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.graphics.Point;
import android.opengl.GLES20;
import android.opengl.Matrix;
import java.nio.ByteBuffer;
/**
* Static helper functions for renderer implementations.
*/
public class RendererCommon {
/** Interface for reporting rendering events. */
public static interface RendererEvents {
/**
* Callback fired once first frame is rendered.
*/
public void onFirstFrameRendered();
/**
* Callback fired when rendered frame resolution or rotation has changed.
*/
public void onFrameResolutionChanged(int videoWidth, int videoHeight, int rotation);
}
/** Interface for rendering frames on an EGLSurface. */
public static interface GlDrawer {
/**
* Functions for drawing frames with different sources. The rendering surface target is
* implied by the current EGL context of the calling thread and requires no explicit argument.
* The coordinates specify the viewport location on the surface target.
*/
void drawOes(int oesTextureId, float[] texMatrix, int frameWidth, int frameHeight,
int viewportX, int viewportY, int viewportWidth, int viewportHeight);
void drawRgb(int textureId, float[] texMatrix, int frameWidth, int frameHeight,
int viewportX, int viewportY, int viewportWidth, int viewportHeight);
void drawYuv(int[] yuvTextures, float[] texMatrix, int frameWidth, int frameHeight,
int viewportX, int viewportY, int viewportWidth, int viewportHeight);
/**
* Release all GL resources. This needs to be done manually, otherwise resources may leak.
*/
void release();
}
/**
* Helper class for uploading YUV bytebuffer frames to textures that handles stride > width. This
* class keeps an internal ByteBuffer to avoid unnecessary allocations for intermediate copies.
*/
public static class YuvUploader {
// Intermediate copy buffer for uploading yuv frames that are not packed, i.e. stride > width.
// TODO(magjed): Investigate when GL_UNPACK_ROW_LENGTH is available, or make a custom shader
// that handles stride and compare performance with intermediate copy.
private ByteBuffer copyBuffer;
/**
* Upload |planes| into |outputYuvTextures|, taking stride into consideration.
* |outputYuvTextures| must have been generated in advance.
*/
public void uploadYuvData(
int[] outputYuvTextures, int width, int height, int[] strides, ByteBuffer[] planes) {
final int[] planeWidths = new int[] {width, width / 2, width / 2};
final int[] planeHeights = new int[] {height, height / 2, height / 2};
// Make a first pass to see if we need a temporary copy buffer.
int copyCapacityNeeded = 0;
for (int i = 0; i < 3; ++i) {
if (strides[i] > planeWidths[i]) {
copyCapacityNeeded = Math.max(copyCapacityNeeded, planeWidths[i] * planeHeights[i]);
}
}
// Allocate copy buffer if necessary.
if (copyCapacityNeeded > 0
&& (copyBuffer == null || copyBuffer.capacity() < copyCapacityNeeded)) {
copyBuffer = ByteBuffer.allocateDirect(copyCapacityNeeded);
}
// Upload each plane.
for (int i = 0; i < 3; ++i) {
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, outputYuvTextures[i]);
// GLES only accepts packed data, i.e. stride == planeWidth.
final ByteBuffer packedByteBuffer;
if (strides[i] == planeWidths[i]) {
// Input is packed already.
packedByteBuffer = planes[i];
} else {
VideoRenderer.nativeCopyPlane(
planes[i], planeWidths[i], planeHeights[i], strides[i], copyBuffer, planeWidths[i]);
packedByteBuffer = copyBuffer;
}
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, planeWidths[i],
planeHeights[i], 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, packedByteBuffer);
}
}
}
// Types of video scaling:
// SCALE_ASPECT_FIT - video frame is scaled to fit the size of the view by
// maintaining the aspect ratio (black borders may be displayed).
// SCALE_ASPECT_FILL - video frame is scaled to fill the size of the view by
// maintaining the aspect ratio. Some portion of the video frame may be
// clipped.
// SCALE_ASPECT_BALANCED - Compromise between FIT and FILL. Video frame will fill as much as
// possible of the view while maintaining aspect ratio, under the constraint that at least
// |BALANCED_VISIBLE_FRACTION| of the frame content will be shown.
public static enum ScalingType { SCALE_ASPECT_FIT, SCALE_ASPECT_FILL, SCALE_ASPECT_BALANCED }
// The minimum fraction of the frame content that will be shown for |SCALE_ASPECT_BALANCED|.
// This limits excessive cropping when adjusting display size.
private static float BALANCED_VISIBLE_FRACTION = 0.5625f;
public static final float[] identityMatrix() {
return new float[] {
1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
0, 0, 0, 1};
}
// Matrix with transform y' = 1 - y.
public static final float[] verticalFlipMatrix() {
return new float[] {
1, 0, 0, 0,
0, -1, 0, 0,
0, 0, 1, 0,
0, 1, 0, 1};
}
// Matrix with transform x' = 1 - x.
public static final float[] horizontalFlipMatrix() {
return new float[] {
-1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
1, 0, 0, 1};
}
/**
* Returns texture matrix that will have the effect of rotating the frame |rotationDegree|
* clockwise when rendered.
*/
public static float[] rotateTextureMatrix(float[] textureMatrix, float rotationDegree) {
final float[] rotationMatrix = new float[16];
Matrix.setRotateM(rotationMatrix, 0, rotationDegree, 0, 0, 1);
adjustOrigin(rotationMatrix);
return multiplyMatrices(textureMatrix, rotationMatrix);
}
/**
* Returns new matrix with the result of a * b.
*/
public static float[] multiplyMatrices(float[] a, float[] b) {
final float[] resultMatrix = new float[16];
Matrix.multiplyMM(resultMatrix, 0, a, 0, b, 0);
return resultMatrix;
}
/**
* Returns layout transformation matrix that applies an optional mirror effect and compensates
* for video vs display aspect ratio.
*/
public static float[] getLayoutMatrix(
boolean mirror, float videoAspectRatio, float displayAspectRatio) {
float scaleX = 1;
float scaleY = 1;
// Scale X or Y dimension so that video and display size have same aspect ratio.
if (displayAspectRatio > videoAspectRatio) {
scaleY = videoAspectRatio / displayAspectRatio;
} else {
scaleX = displayAspectRatio / videoAspectRatio;
}
// Apply optional horizontal flip.
if (mirror) {
scaleX *= -1;
}
final float matrix[] = new float[16];
Matrix.setIdentityM(matrix, 0);
Matrix.scaleM(matrix, 0, scaleX, scaleY, 1);
adjustOrigin(matrix);
return matrix;
}
/**
* Calculate display size based on scaling type, video aspect ratio, and maximum display size.
*/
public static Point getDisplaySize(ScalingType scalingType, float videoAspectRatio,
int maxDisplayWidth, int maxDisplayHeight) {
return getDisplaySize(convertScalingTypeToVisibleFraction(scalingType), videoAspectRatio,
maxDisplayWidth, maxDisplayHeight);
}
/**
* Move |matrix| transformation origin to (0.5, 0.5). This is the origin for texture coordinates
* that are in the range 0 to 1.
*/
private static void adjustOrigin(float[] matrix) {
// Note that OpenGL is using column-major order.
// Pre translate with -0.5 to move coordinates to range [-0.5, 0.5].
matrix[12] -= 0.5f * (matrix[0] + matrix[4]);
matrix[13] -= 0.5f * (matrix[1] + matrix[5]);
// Post translate with 0.5 to move coordinates to range [0, 1].
matrix[12] += 0.5f;
matrix[13] += 0.5f;
}
/**
* Each scaling type has a one-to-one correspondence to a numeric minimum fraction of the video
* that must remain visible.
*/
private static float convertScalingTypeToVisibleFraction(ScalingType scalingType) {
switch (scalingType) {
case SCALE_ASPECT_FIT:
return 1.0f;
case SCALE_ASPECT_FILL:
return 0.0f;
case SCALE_ASPECT_BALANCED:
return BALANCED_VISIBLE_FRACTION;
default:
throw new IllegalArgumentException();
}
}
/**
* Calculate display size based on minimum fraction of the video that must remain visible,
* video aspect ratio, and maximum display size.
*/
private static Point getDisplaySize(float minVisibleFraction, float videoAspectRatio,
int maxDisplayWidth, int maxDisplayHeight) {
// If there is no constraint on the amount of cropping, fill the allowed display area.
if (minVisibleFraction == 0 || videoAspectRatio == 0) {
return new Point(maxDisplayWidth, maxDisplayHeight);
}
// Each dimension is constrained on max display size and how much we are allowed to crop.
final int width = Math.min(maxDisplayWidth,
Math.round(maxDisplayHeight / minVisibleFraction * videoAspectRatio));
final int height = Math.min(maxDisplayHeight,
Math.round(maxDisplayWidth / minVisibleFraction / videoAspectRatio));
return new Point(width, height);
}
}

View File

@ -1,41 +0,0 @@
/*
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import java.util.List;
import java.util.LinkedList;
/**
* The parameters for an {@code RtpSender}, as defined in
* http://w3c.github.io/webrtc-pc/#rtcrtpsender-interface.
*/
public class RtpParameters {
public static class Encoding {
public boolean active = true;
// A null value means "no maximum bitrate".
public Integer maxBitrateBps;
}
public static class Codec {
int payloadType;
String mimeType;
int clockRate;
int channels = 1;
}
public final LinkedList<Encoding> encodings;
public final LinkedList<Codec> codecs;
public RtpParameters() {
encodings = new LinkedList<Encoding>();
codecs = new LinkedList<Codec>();
}
}

View File

@ -1,59 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/** Java wrapper for a C++ RtpReceiverInterface. */
public class RtpReceiver {
final long nativeRtpReceiver;
private MediaStreamTrack cachedTrack;
public RtpReceiver(long nativeRtpReceiver) {
this.nativeRtpReceiver = nativeRtpReceiver;
long track = nativeGetTrack(nativeRtpReceiver);
// We can assume that an RtpReceiver always has an associated track.
cachedTrack = new MediaStreamTrack(track);
}
public MediaStreamTrack track() {
return cachedTrack;
}
public boolean setParameters(RtpParameters parameters) {
return nativeSetParameters(nativeRtpReceiver, parameters);
}
public RtpParameters getParameters() {
return nativeGetParameters(nativeRtpReceiver);
}
public String id() {
return nativeId(nativeRtpReceiver);
}
public void dispose() {
cachedTrack.dispose();
free(nativeRtpReceiver);
}
// This should increment the reference count of the track.
// Will be released in dispose().
private static native long nativeGetTrack(long nativeRtpReceiver);
private static native boolean nativeSetParameters(long nativeRtpReceiver,
RtpParameters parameters);
private static native RtpParameters nativeGetParameters(long nativeRtpReceiver);
private static native String nativeId(long nativeRtpReceiver);
private static native void free(long nativeRtpReceiver);
};

View File

@ -1,84 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/** Java wrapper for a C++ RtpSenderInterface. */
public class RtpSender {
final long nativeRtpSender;
private MediaStreamTrack cachedTrack;
private boolean ownsTrack = true;
public RtpSender(long nativeRtpSender) {
this.nativeRtpSender = nativeRtpSender;
long track = nativeGetTrack(nativeRtpSender);
// It may be possible for an RtpSender to be created without a track.
cachedTrack = (track == 0) ? null : new MediaStreamTrack(track);
}
// If |takeOwnership| is true, the RtpSender takes ownership of the track
// from the caller, and will auto-dispose of it when no longer needed.
// |takeOwnership| should only be used if the caller owns the track; it is
// not appropriate when the track is owned by, for example, another RtpSender
// or a MediaStream.
public boolean setTrack(MediaStreamTrack track, boolean takeOwnership) {
if (!nativeSetTrack(nativeRtpSender,
(track == null) ? 0 : track.nativeTrack)) {
return false;
}
if (cachedTrack != null && ownsTrack) {
cachedTrack.dispose();
}
cachedTrack = track;
ownsTrack = takeOwnership;
return true;
}
public MediaStreamTrack track() {
return cachedTrack;
}
public boolean setParameters(RtpParameters parameters) {
return nativeSetParameters(nativeRtpSender, parameters);
}
public RtpParameters getParameters() {
return nativeGetParameters(nativeRtpSender);
}
public String id() {
return nativeId(nativeRtpSender);
}
public void dispose() {
if (cachedTrack != null && ownsTrack) {
cachedTrack.dispose();
}
free(nativeRtpSender);
}
private static native boolean nativeSetTrack(long nativeRtpSender,
long nativeTrack);
// This should increment the reference count of the track.
// Will be released in dispose() or setTrack().
private static native long nativeGetTrack(long nativeRtpSender);
private static native boolean nativeSetParameters(long nativeRtpSender,
RtpParameters parameters);
private static native RtpParameters nativeGetParameters(long nativeRtpSender);
private static native String nativeId(long nativeRtpSender);
private static native void free(long nativeRtpSender);
}
;

View File

@ -1,26 +0,0 @@
/*
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/** Interface for observing SDP-related events. */
public interface SdpObserver {
/** Called on success of Create{Offer,Answer}(). */
public void onCreateSuccess(SessionDescription sdp);
/** Called on success of Set{Local,Remote}Description(). */
public void onSetSuccess();
/** Called on error of Create{Offer,Answer}(). */
public void onCreateFailure(String error);
/** Called on error of Set{Local,Remote}Description(). */
public void onSetFailure(String error);
}

View File

@ -1,40 +0,0 @@
/*
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/**
* Description of an RFC 4566 Session.
* SDPs are passed as serialized Strings in Java-land and are materialized
* to SessionDescriptionInterface as appropriate in the JNI layer.
*/
public class SessionDescription {
/** Java-land enum version of SessionDescriptionInterface's type() string. */
public static enum Type {
OFFER, PRANSWER, ANSWER;
public String canonicalForm() {
return name().toLowerCase();
}
public static Type fromCanonicalForm(String canonical) {
return Type.valueOf(Type.class, canonical.toUpperCase());
}
}
public final Type type;
public final String description;
public SessionDescription(Type type, String description) {
this.type = type;
this.description = description;
}
}

View File

@ -1,17 +0,0 @@
/*
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/** Interface for observing Stats reports (see webrtc::StatsObservers). */
public interface StatsObserver {
/** Called when the reports are ready.*/
public void onComplete(StatsReport[] reports);
}

View File

@ -1,55 +0,0 @@
/*
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/** Java version of webrtc::StatsReport. */
public class StatsReport {
/** Java version of webrtc::StatsReport::Value. */
public static class Value {
public final String name;
public final String value;
public Value(String name, String value) {
this.name = name;
this.value = value;
}
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("[").append(name).append(": ").append(value).append("]");
return builder.toString();
}
}
public final String id;
public final String type;
// Time since 1970-01-01T00:00:00Z in milliseconds.
public final double timestamp;
public final Value[] values;
public StatsReport(String id, String type, double timestamp, Value[] values) {
this.id = id;
this.type = type;
this.timestamp = timestamp;
this.values = values;
}
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("id: ").append(id).append(", type: ").append(type)
.append(", timestamp: ").append(timestamp).append(", values: ");
for (int i = 0; i < values.length; ++i) {
builder.append(values[i].toString()).append(", ");
}
return builder.toString();
}
}

View File

@ -1,499 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.graphics.SurfaceTexture;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.os.Build;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.SystemClock;
import java.nio.ByteBuffer;
import java.nio.FloatBuffer;
import java.util.concurrent.Callable;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
/**
* Helper class to create and synchronize access to a SurfaceTexture. The caller will get notified
* of new frames in onTextureFrameAvailable(), and should call returnTextureFrame() when done with
* the frame. Only one texture frame can be in flight at once, so returnTextureFrame() must be
* called in order to receive a new frame. Call stopListening() to stop receiveing new frames. Call
* dispose to release all resources once the texture frame is returned.
* Note that there is a C++ counter part of this class that optionally can be used. It is used for
* wrapping texture frames into webrtc::VideoFrames and also handles calling returnTextureFrame()
* when the webrtc::VideoFrame is no longer used.
*/
class SurfaceTextureHelper {
private static final String TAG = "SurfaceTextureHelper";
/**
* Callback interface for being notified that a new texture frame is available. The calls will be
* made on a dedicated thread with a bound EGLContext. The thread will be the same throughout the
* lifetime of the SurfaceTextureHelper instance, but different from the thread calling the
* SurfaceTextureHelper constructor. The callee is not allowed to make another EGLContext current
* on the calling thread.
*/
public interface OnTextureFrameAvailableListener {
abstract void onTextureFrameAvailable(
int oesTextureId, float[] transformMatrix, long timestampNs);
}
/**
* Construct a new SurfaceTextureHelper sharing OpenGL resources with |sharedContext|. A dedicated
* thread and handler is created for handling the SurfaceTexture. May return null if EGL fails to
* initialize a pixel buffer surface and make it current.
*/
public static SurfaceTextureHelper create(
final String threadName, final EglBase.Context sharedContext) {
final HandlerThread thread = new HandlerThread(threadName);
thread.start();
final Handler handler = new Handler(thread.getLooper());
// The onFrameAvailable() callback will be executed on the SurfaceTexture ctor thread. See:
// http://grepcode.com/file/repository.grepcode.com/java/ext/com.google.android/android/5.1.1_r1/android/graphics/SurfaceTexture.java#195.
// Therefore, in order to control the callback thread on API lvl < 21, the SurfaceTextureHelper
// is constructed on the |handler| thread.
return ThreadUtils.invokeAtFrontUninterruptibly(handler, new Callable<SurfaceTextureHelper>() {
@Override
public SurfaceTextureHelper call() {
try {
return new SurfaceTextureHelper(sharedContext, handler);
} catch (RuntimeException e) {
Logging.e(TAG, threadName + " create failure", e);
return null;
}
}
});
}
// State for YUV conversion, instantiated on demand.
static private class YuvConverter {
private final EglBase eglBase;
private final GlShader shader;
private boolean released = false;
// Vertex coordinates in Normalized Device Coordinates, i.e.
// (-1, -1) is bottom-left and (1, 1) is top-right.
private static final FloatBuffer DEVICE_RECTANGLE =
GlUtil.createFloatBuffer(new float[] {
-1.0f, -1.0f, // Bottom left.
1.0f, -1.0f, // Bottom right.
-1.0f, 1.0f, // Top left.
1.0f, 1.0f, // Top right.
});
// Texture coordinates - (0, 0) is bottom-left and (1, 1) is top-right.
private static final FloatBuffer TEXTURE_RECTANGLE =
GlUtil.createFloatBuffer(new float[] {
0.0f, 0.0f, // Bottom left.
1.0f, 0.0f, // Bottom right.
0.0f, 1.0f, // Top left.
1.0f, 1.0f // Top right.
});
private static final String VERTEX_SHADER =
"varying vec2 interp_tc;\n"
+ "attribute vec4 in_pos;\n"
+ "attribute vec4 in_tc;\n"
+ "\n"
+ "uniform mat4 texMatrix;\n"
+ "\n"
+ "void main() {\n"
+ " gl_Position = in_pos;\n"
+ " interp_tc = (texMatrix * in_tc).xy;\n"
+ "}\n";
private static final String FRAGMENT_SHADER =
"#extension GL_OES_EGL_image_external : require\n"
+ "precision mediump float;\n"
+ "varying vec2 interp_tc;\n"
+ "\n"
+ "uniform samplerExternalOES oesTex;\n"
// Difference in texture coordinate corresponding to one
// sub-pixel in the x direction.
+ "uniform vec2 xUnit;\n"
// Color conversion coefficients, including constant term
+ "uniform vec4 coeffs;\n"
+ "\n"
+ "void main() {\n"
// Since the alpha read from the texture is always 1, this could
// be written as a mat4 x vec4 multiply. However, that seems to
// give a worse framerate, possibly because the additional
// multiplies by 1.0 consume resources. TODO(nisse): Could also
// try to do it as a vec3 x mat3x4, followed by an add in of a
// constant vector.
+ " gl_FragColor.r = coeffs.a + dot(coeffs.rgb,\n"
+ " texture2D(oesTex, interp_tc - 1.5 * xUnit).rgb);\n"
+ " gl_FragColor.g = coeffs.a + dot(coeffs.rgb,\n"
+ " texture2D(oesTex, interp_tc - 0.5 * xUnit).rgb);\n"
+ " gl_FragColor.b = coeffs.a + dot(coeffs.rgb,\n"
+ " texture2D(oesTex, interp_tc + 0.5 * xUnit).rgb);\n"
+ " gl_FragColor.a = coeffs.a + dot(coeffs.rgb,\n"
+ " texture2D(oesTex, interp_tc + 1.5 * xUnit).rgb);\n"
+ "}\n";
private int texMatrixLoc;
private int xUnitLoc;
private int coeffsLoc;;
YuvConverter (EglBase.Context sharedContext) {
eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PIXEL_RGBA_BUFFER);
eglBase.createDummyPbufferSurface();
eglBase.makeCurrent();
shader = new GlShader(VERTEX_SHADER, FRAGMENT_SHADER);
shader.useProgram();
texMatrixLoc = shader.getUniformLocation("texMatrix");
xUnitLoc = shader.getUniformLocation("xUnit");
coeffsLoc = shader.getUniformLocation("coeffs");
GLES20.glUniform1i(shader.getUniformLocation("oesTex"), 0);
GlUtil.checkNoGLES2Error("Initialize fragment shader uniform values.");
// Initialize vertex shader attributes.
shader.setVertexAttribArray("in_pos", 2, DEVICE_RECTANGLE);
// If the width is not a multiple of 4 pixels, the texture
// will be scaled up slightly and clipped at the right border.
shader.setVertexAttribArray("in_tc", 2, TEXTURE_RECTANGLE);
eglBase.detachCurrent();
}
synchronized void convert(ByteBuffer buf,
int width, int height, int stride, int textureId, float [] transformMatrix) {
if (released) {
throw new IllegalStateException(
"YuvConverter.convert called on released object");
}
// We draw into a buffer laid out like
//
// +---------+
// | |
// | Y |
// | |
// | |
// +----+----+
// | U | V |
// | | |
// +----+----+
//
// In memory, we use the same stride for all of Y, U and V. The
// U data starts at offset |height| * |stride| from the Y data,
// and the V data starts at at offset |stride/2| from the U
// data, with rows of U and V data alternating.
//
// Now, it would have made sense to allocate a pixel buffer with
// a single byte per pixel (EGL10.EGL_COLOR_BUFFER_TYPE,
// EGL10.EGL_LUMINANCE_BUFFER,), but that seems to be
// unsupported by devices. So do the following hack: Allocate an
// RGBA buffer, of width |stride|/4. To render each of these
// large pixels, sample the texture at 4 different x coordinates
// and store the results in the four components.
//
// Since the V data needs to start on a boundary of such a
// larger pixel, it is not sufficient that |stride| is even, it
// has to be a multiple of 8 pixels.
if (stride % 8 != 0) {
throw new IllegalArgumentException(
"Invalid stride, must be a multiple of 8");
}
if (stride < width){
throw new IllegalArgumentException(
"Invalid stride, must >= width");
}
int y_width = (width+3) / 4;
int uv_width = (width+7) / 8;
int uv_height = (height+1)/2;
int total_height = height + uv_height;
int size = stride * total_height;
if (buf.capacity() < size) {
throw new IllegalArgumentException("YuvConverter.convert called with too small buffer");
}
// Produce a frame buffer starting at top-left corner, not
// bottom-left.
transformMatrix =
RendererCommon.multiplyMatrices(transformMatrix,
RendererCommon.verticalFlipMatrix());
// Create new pBuffferSurface with the correct size if needed.
if (eglBase.hasSurface()) {
if (eglBase.surfaceWidth() != stride/4 ||
eglBase.surfaceHeight() != total_height){
eglBase.releaseSurface();
eglBase.createPbufferSurface(stride/4, total_height);
}
} else {
eglBase.createPbufferSurface(stride/4, total_height);
}
eglBase.makeCurrent();
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId);
GLES20.glUniformMatrix4fv(texMatrixLoc, 1, false, transformMatrix, 0);
// Draw Y
GLES20.glViewport(0, 0, y_width, height);
// Matrix * (1;0;0;0) / width. Note that opengl uses column major order.
GLES20.glUniform2f(xUnitLoc,
transformMatrix[0] / width,
transformMatrix[1] / width);
// Y'UV444 to RGB888, see
// https://en.wikipedia.org/wiki/YUV#Y.27UV444_to_RGB888_conversion.
// We use the ITU-R coefficients for U and V */
GLES20.glUniform4f(coeffsLoc, 0.299f, 0.587f, 0.114f, 0.0f);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
// Draw U
GLES20.glViewport(0, height, uv_width, uv_height);
// Matrix * (1;0;0;0) / (width / 2). Note that opengl uses column major order.
GLES20.glUniform2f(xUnitLoc,
2.0f * transformMatrix[0] / width,
2.0f * transformMatrix[1] / width);
GLES20.glUniform4f(coeffsLoc, -0.169f, -0.331f, 0.499f, 0.5f);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
// Draw V
GLES20.glViewport(stride/8, height, uv_width, uv_height);
GLES20.glUniform4f(coeffsLoc, 0.499f, -0.418f, -0.0813f, 0.5f);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
GLES20.glReadPixels(0, 0, stride/4, total_height, GLES20.GL_RGBA,
GLES20.GL_UNSIGNED_BYTE, buf);
GlUtil.checkNoGLES2Error("YuvConverter.convert");
// Unbind texture. Reportedly needed on some devices to get
// the texture updated from the camera.
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
eglBase.detachCurrent();
}
synchronized void release() {
released = true;
eglBase.makeCurrent();
shader.release();
eglBase.release();
}
}
private final Handler handler;
private final EglBase eglBase;
private final SurfaceTexture surfaceTexture;
private final int oesTextureId;
private YuvConverter yuvConverter;
// These variables are only accessed from the |handler| thread.
private OnTextureFrameAvailableListener listener;
// The possible states of this class.
private boolean hasPendingTexture = false;
private volatile boolean isTextureInUse = false;
private boolean isQuitting = false;
// |pendingListener| is set in setListener() and the runnable is posted to the handler thread.
// setListener() is not allowed to be called again before stopListening(), so this is thread safe.
private OnTextureFrameAvailableListener pendingListener;
final Runnable setListenerRunnable = new Runnable() {
@Override
public void run() {
Logging.d(TAG, "Setting listener to " + pendingListener);
listener = pendingListener;
pendingListener = null;
// May have a pending frame from the previous capture session - drop it.
if (hasPendingTexture) {
// Calling updateTexImage() is neccessary in order to receive new frames.
updateTexImage();
hasPendingTexture = false;
}
}
};
private SurfaceTextureHelper(EglBase.Context sharedContext, Handler handler) {
if (handler.getLooper().getThread() != Thread.currentThread()) {
throw new IllegalStateException("SurfaceTextureHelper must be created on the handler thread");
}
this.handler = handler;
eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PIXEL_BUFFER);
try {
// Both these statements have been observed to fail on rare occasions, see BUG=webrtc:5682.
eglBase.createDummyPbufferSurface();
eglBase.makeCurrent();
} catch (RuntimeException e) {
// Clean up before rethrowing the exception.
eglBase.release();
handler.getLooper().quit();
throw e;
}
oesTextureId = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
surfaceTexture = new SurfaceTexture(oesTextureId);
surfaceTexture.setOnFrameAvailableListener(new SurfaceTexture.OnFrameAvailableListener() {
@Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
hasPendingTexture = true;
tryDeliverTextureFrame();
}
});
}
private YuvConverter getYuvConverter() {
// yuvConverter is assigned once
if (yuvConverter != null)
return yuvConverter;
synchronized(this) {
if (yuvConverter == null)
yuvConverter = new YuvConverter(eglBase.getEglBaseContext());
return yuvConverter;
}
}
/**
* Start to stream textures to the given |listener|. If you need to change listener, you need to
* call stopListening() first.
*/
public void startListening(final OnTextureFrameAvailableListener listener) {
if (this.listener != null || this.pendingListener != null) {
throw new IllegalStateException("SurfaceTextureHelper listener has already been set.");
}
this.pendingListener = listener;
handler.post(setListenerRunnable);
}
/**
* Stop listening. The listener set in startListening() is guaranteded to not receive any more
* onTextureFrameAvailable() callbacks after this function returns.
*/
public void stopListening() {
Logging.d(TAG, "stopListening()");
handler.removeCallbacks(setListenerRunnable);
ThreadUtils.invokeAtFrontUninterruptibly(handler, new Runnable() {
@Override
public void run() {
listener = null;
pendingListener = null;
}
});
}
/**
* Retrieve the underlying SurfaceTexture. The SurfaceTexture should be passed in to a video
* producer such as a camera or decoder.
*/
public SurfaceTexture getSurfaceTexture() {
return surfaceTexture;
}
/**
* Retrieve the handler that calls onTextureFrameAvailable(). This handler is valid until
* dispose() is called.
*/
public Handler getHandler() {
return handler;
}
/**
* Call this function to signal that you are done with the frame received in
* onTextureFrameAvailable(). Only one texture frame can be in flight at once, so you must call
* this function in order to receive a new frame.
*/
public void returnTextureFrame() {
handler.post(new Runnable() {
@Override public void run() {
isTextureInUse = false;
if (isQuitting) {
release();
} else {
tryDeliverTextureFrame();
}
}
});
}
public boolean isTextureInUse() {
return isTextureInUse;
}
/**
* Call disconnect() to stop receiving frames. OpenGL resources are released and the handler is
* stopped when the texture frame has been returned by a call to returnTextureFrame(). You are
* guaranteed to not receive any more onTextureFrameAvailable() after this function returns.
*/
public void dispose() {
Logging.d(TAG, "dispose()");
ThreadUtils.invokeAtFrontUninterruptibly(handler, new Runnable() {
@Override
public void run() {
isQuitting = true;
if (!isTextureInUse) {
release();
}
}
});
}
public void textureToYUV(ByteBuffer buf,
int width, int height, int stride, int textureId, float [] transformMatrix) {
if (textureId != oesTextureId)
throw new IllegalStateException("textureToByteBuffer called with unexpected textureId");
getYuvConverter().convert(buf, width, height, stride, textureId, transformMatrix);
}
private void updateTexImage() {
// SurfaceTexture.updateTexImage apparently can compete and deadlock with eglSwapBuffers,
// as observed on Nexus 5. Therefore, synchronize it with the EGL functions.
// See https://bugs.chromium.org/p/webrtc/issues/detail?id=5702 for more info.
synchronized (EglBase.lock) {
surfaceTexture.updateTexImage();
}
}
private void tryDeliverTextureFrame() {
if (handler.getLooper().getThread() != Thread.currentThread()) {
throw new IllegalStateException("Wrong thread.");
}
if (isQuitting || !hasPendingTexture || isTextureInUse || listener == null) {
return;
}
isTextureInUse = true;
hasPendingTexture = false;
updateTexImage();
final float[] transformMatrix = new float[16];
surfaceTexture.getTransformMatrix(transformMatrix);
final long timestampNs = (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH)
? surfaceTexture.getTimestamp()
: TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
listener.onTextureFrameAvailable(oesTextureId, transformMatrix, timestampNs);
}
private void release() {
if (handler.getLooper().getThread() != Thread.currentThread()) {
throw new IllegalStateException("Wrong thread.");
}
if (isTextureInUse || !isQuitting) {
throw new IllegalStateException("Unexpected release.");
}
synchronized (this) {
if (yuvConverter != null)
yuvConverter.release();
}
GLES20.glDeleteTextures(1, new int[] {oesTextureId}, 0);
surfaceTexture.release();
eglBase.release();
handler.getLooper().quit();
}
}

View File

@ -1,565 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.content.Context;
import android.content.res.Resources.NotFoundException;
import android.graphics.Point;
import android.opengl.GLES20;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.AttributeSet;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import org.webrtc.Logging;
import java.util.concurrent.CountDownLatch;
import javax.microedition.khronos.egl.EGLContext;
/**
* Implements org.webrtc.VideoRenderer.Callbacks by displaying the video stream on a SurfaceView.
* renderFrame() is asynchronous to avoid blocking the calling thread.
* This class is thread safe and handles access from potentially four different threads:
* Interaction from the main app in init, release, setMirror, and setScalingtype.
* Interaction from C++ rtc::VideoSinkInterface in renderFrame.
* Interaction from the Activity lifecycle in surfaceCreated, surfaceChanged, and surfaceDestroyed.
* Interaction with the layout framework in onMeasure and onSizeChanged.
*/
public class SurfaceViewRenderer extends SurfaceView
implements SurfaceHolder.Callback, VideoRenderer.Callbacks {
private static final String TAG = "SurfaceViewRenderer";
// Dedicated render thread.
private HandlerThread renderThread;
// |renderThreadHandler| is a handler for communicating with |renderThread|, and is synchronized
// on |handlerLock|.
private final Object handlerLock = new Object();
private Handler renderThreadHandler;
// EGL and GL resources for drawing YUV/OES textures. After initilization, these are only accessed
// from the render thread.
private EglBase eglBase;
private final RendererCommon.YuvUploader yuvUploader = new RendererCommon.YuvUploader();
private RendererCommon.GlDrawer drawer;
// Texture ids for YUV frames. Allocated on first arrival of a YUV frame.
private int[] yuvTextures = null;
// Pending frame to render. Serves as a queue with size 1. Synchronized on |frameLock|.
private final Object frameLock = new Object();
private VideoRenderer.I420Frame pendingFrame;
// These variables are synchronized on |layoutLock|.
private final Object layoutLock = new Object();
// These dimension values are used to keep track of the state in these functions: onMeasure(),
// onLayout(), and surfaceChanged(). A new layout is triggered with requestLayout(). This happens
// internally when the incoming frame size changes. requestLayout() can also be triggered
// externally. The layout change is a two pass process: first onMeasure() is called in a top-down
// traversal of the View tree, followed by an onLayout() pass that is also top-down. During the
// onLayout() pass, each parent is responsible for positioning its children using the sizes
// computed in the measure pass.
// |desiredLayoutsize| is the layout size we have requested in onMeasure() and are waiting for to
// take effect.
private Point desiredLayoutSize = new Point();
// |layoutSize|/|surfaceSize| is the actual current layout/surface size. They are updated in
// onLayout() and surfaceChanged() respectively.
private final Point layoutSize = new Point();
// TODO(magjed): Enable hardware scaler with SurfaceHolder.setFixedSize(). This will decouple
// layout and surface size.
private final Point surfaceSize = new Point();
// |isSurfaceCreated| keeps track of the current status in surfaceCreated()/surfaceDestroyed().
private boolean isSurfaceCreated;
// Last rendered frame dimensions, or 0 if no frame has been rendered yet.
private int frameWidth;
private int frameHeight;
private int frameRotation;
// |scalingType| determines how the video will fill the allowed layout area in onMeasure().
private RendererCommon.ScalingType scalingType = RendererCommon.ScalingType.SCALE_ASPECT_BALANCED;
// If true, mirrors the video stream horizontally.
private boolean mirror;
// Callback for reporting renderer events.
private RendererCommon.RendererEvents rendererEvents;
// These variables are synchronized on |statisticsLock|.
private final Object statisticsLock = new Object();
// Total number of video frames received in renderFrame() call.
private int framesReceived;
// Number of video frames dropped by renderFrame() because previous frame has not been rendered
// yet.
private int framesDropped;
// Number of rendered video frames.
private int framesRendered;
// Time in ns when the first video frame was rendered.
private long firstFrameTimeNs;
// Time in ns spent in renderFrameOnRenderThread() function.
private long renderTimeNs;
// Runnable for posting frames to render thread.
private final Runnable renderFrameRunnable = new Runnable() {
@Override public void run() {
renderFrameOnRenderThread();
}
};
// Runnable for clearing Surface to black.
private final Runnable makeBlackRunnable = new Runnable() {
@Override public void run() {
makeBlack();
}
};
/**
* Standard View constructor. In order to render something, you must first call init().
*/
public SurfaceViewRenderer(Context context) {
super(context);
getHolder().addCallback(this);
}
/**
* Standard View constructor. In order to render something, you must first call init().
*/
public SurfaceViewRenderer(Context context, AttributeSet attrs) {
super(context, attrs);
getHolder().addCallback(this);
}
/**
* Initialize this class, sharing resources with |sharedContext|. It is allowed to call init() to
* reinitialize the renderer after a previous init()/release() cycle.
*/
public void init(
EglBase.Context sharedContext, RendererCommon.RendererEvents rendererEvents) {
init(sharedContext, rendererEvents, EglBase.CONFIG_PLAIN, new GlRectDrawer());
}
/**
* Initialize this class, sharing resources with |sharedContext|. The custom |drawer| will be used
* for drawing frames on the EGLSurface. This class is responsible for calling release() on
* |drawer|. It is allowed to call init() to reinitialize the renderer after a previous
* init()/release() cycle.
*/
public void init(EglBase.Context sharedContext, RendererCommon.RendererEvents rendererEvents,
int[] configAttributes, RendererCommon.GlDrawer drawer) {
synchronized (handlerLock) {
if (renderThreadHandler != null) {
throw new IllegalStateException(getResourceName() + "Already initialized");
}
Logging.d(TAG, getResourceName() + "Initializing.");
this.rendererEvents = rendererEvents;
this.drawer = drawer;
renderThread = new HandlerThread(TAG);
renderThread.start();
eglBase = EglBase.create(sharedContext, configAttributes);
renderThreadHandler = new Handler(renderThread.getLooper());
}
tryCreateEglSurface();
}
/**
* Create and make an EGLSurface current if both init() and surfaceCreated() have been called.
*/
public void tryCreateEglSurface() {
// |renderThreadHandler| is only created after |eglBase| is created in init(), so the
// following code will only execute if eglBase != null.
runOnRenderThread(new Runnable() {
@Override public void run() {
synchronized (layoutLock) {
if (isSurfaceCreated && !eglBase.hasSurface()) {
eglBase.createSurface(getHolder().getSurface());
eglBase.makeCurrent();
// Necessary for YUV frames with odd width.
GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1);
}
}
}
});
}
/**
* Block until any pending frame is returned and all GL resources released, even if an interrupt
* occurs. If an interrupt occurs during release(), the interrupt flag will be set. This function
* should be called before the Activity is destroyed and the EGLContext is still valid. If you
* don't call this function, the GL resources might leak.
*/
public void release() {
final CountDownLatch eglCleanupBarrier = new CountDownLatch(1);
synchronized (handlerLock) {
if (renderThreadHandler == null) {
Logging.d(TAG, getResourceName() + "Already released");
return;
}
// Release EGL and GL resources on render thread.
// TODO(magjed): This might not be necessary - all OpenGL resources are automatically deleted
// when the EGL context is lost. It might be dangerous to delete them manually in
// Activity.onDestroy().
renderThreadHandler.postAtFrontOfQueue(new Runnable() {
@Override public void run() {
drawer.release();
drawer = null;
if (yuvTextures != null) {
GLES20.glDeleteTextures(3, yuvTextures, 0);
yuvTextures = null;
}
// Clear last rendered image to black.
makeBlack();
eglBase.release();
eglBase = null;
eglCleanupBarrier.countDown();
}
});
// Don't accept any more frames or messages to the render thread.
renderThreadHandler = null;
}
// Make sure the EGL/GL cleanup posted above is executed.
ThreadUtils.awaitUninterruptibly(eglCleanupBarrier);
renderThread.quit();
synchronized (frameLock) {
if (pendingFrame != null) {
VideoRenderer.renderFrameDone(pendingFrame);
pendingFrame = null;
}
}
// The |renderThread| cleanup is not safe to cancel and we need to wait until it's done.
ThreadUtils.joinUninterruptibly(renderThread);
renderThread = null;
// Reset statistics and event reporting.
synchronized (layoutLock) {
frameWidth = 0;
frameHeight = 0;
frameRotation = 0;
rendererEvents = null;
}
resetStatistics();
}
/**
* Reset statistics. This will reset the logged statistics in logStatistics(), and
* RendererEvents.onFirstFrameRendered() will be called for the next frame.
*/
public void resetStatistics() {
synchronized (statisticsLock) {
framesReceived = 0;
framesDropped = 0;
framesRendered = 0;
firstFrameTimeNs = 0;
renderTimeNs = 0;
}
}
/**
* Set if the video stream should be mirrored or not.
*/
public void setMirror(final boolean mirror) {
synchronized (layoutLock) {
this.mirror = mirror;
}
}
/**
* Set how the video will fill the allowed layout area.
*/
public void setScalingType(RendererCommon.ScalingType scalingType) {
synchronized (layoutLock) {
this.scalingType = scalingType;
}
}
// VideoRenderer.Callbacks interface.
@Override
public void renderFrame(VideoRenderer.I420Frame frame) {
synchronized (statisticsLock) {
++framesReceived;
}
synchronized (handlerLock) {
if (renderThreadHandler == null) {
Logging.d(TAG, getResourceName()
+ "Dropping frame - Not initialized or already released.");
VideoRenderer.renderFrameDone(frame);
return;
}
synchronized (frameLock) {
if (pendingFrame != null) {
// Drop old frame.
synchronized (statisticsLock) {
++framesDropped;
}
VideoRenderer.renderFrameDone(pendingFrame);
}
pendingFrame = frame;
updateFrameDimensionsAndReportEvents(frame);
renderThreadHandler.post(renderFrameRunnable);
}
}
}
// Returns desired layout size given current measure specification and video aspect ratio.
private Point getDesiredLayoutSize(int widthSpec, int heightSpec) {
synchronized (layoutLock) {
final int maxWidth = getDefaultSize(Integer.MAX_VALUE, widthSpec);
final int maxHeight = getDefaultSize(Integer.MAX_VALUE, heightSpec);
final Point size =
RendererCommon.getDisplaySize(scalingType, frameAspectRatio(), maxWidth, maxHeight);
if (MeasureSpec.getMode(widthSpec) == MeasureSpec.EXACTLY) {
size.x = maxWidth;
}
if (MeasureSpec.getMode(heightSpec) == MeasureSpec.EXACTLY) {
size.y = maxHeight;
}
return size;
}
}
// View layout interface.
@Override
protected void onMeasure(int widthSpec, int heightSpec) {
synchronized (layoutLock) {
if (frameWidth == 0 || frameHeight == 0) {
super.onMeasure(widthSpec, heightSpec);
return;
}
desiredLayoutSize = getDesiredLayoutSize(widthSpec, heightSpec);
if (desiredLayoutSize.x != getMeasuredWidth() || desiredLayoutSize.y != getMeasuredHeight()) {
// Clear the surface asap before the layout change to avoid stretched video and other
// render artifacs. Don't wait for it to finish because the IO thread should never be
// blocked, so it's a best-effort attempt.
synchronized (handlerLock) {
if (renderThreadHandler != null) {
renderThreadHandler.postAtFrontOfQueue(makeBlackRunnable);
}
}
}
setMeasuredDimension(desiredLayoutSize.x, desiredLayoutSize.y);
}
}
@Override
protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
synchronized (layoutLock) {
layoutSize.x = right - left;
layoutSize.y = bottom - top;
}
// Might have a pending frame waiting for a layout of correct size.
runOnRenderThread(renderFrameRunnable);
}
// SurfaceHolder.Callback interface.
@Override
public void surfaceCreated(final SurfaceHolder holder) {
Logging.d(TAG, getResourceName() + "Surface created.");
synchronized (layoutLock) {
isSurfaceCreated = true;
}
tryCreateEglSurface();
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
Logging.d(TAG, getResourceName() + "Surface destroyed.");
synchronized (layoutLock) {
isSurfaceCreated = false;
surfaceSize.x = 0;
surfaceSize.y = 0;
}
runOnRenderThread(new Runnable() {
@Override public void run() {
eglBase.releaseSurface();
}
});
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
Logging.d(TAG, getResourceName() + "Surface changed: " + width + "x" + height);
synchronized (layoutLock) {
surfaceSize.x = width;
surfaceSize.y = height;
}
// Might have a pending frame waiting for a surface of correct size.
runOnRenderThread(renderFrameRunnable);
}
/**
* Private helper function to post tasks safely.
*/
private void runOnRenderThread(Runnable runnable) {
synchronized (handlerLock) {
if (renderThreadHandler != null) {
renderThreadHandler.post(runnable);
}
}
}
private String getResourceName() {
try {
return getResources().getResourceEntryName(getId()) + ": ";
} catch (NotFoundException e) {
return "";
}
}
private void makeBlack() {
if (Thread.currentThread() != renderThread) {
throw new IllegalStateException(getResourceName() + "Wrong thread.");
}
if (eglBase != null && eglBase.hasSurface()) {
GLES20.glClearColor(0, 0, 0, 0);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
eglBase.swapBuffers();
}
}
/**
* Requests new layout if necessary. Returns true if layout and surface size are consistent.
*/
private boolean checkConsistentLayout() {
if (Thread.currentThread() != renderThread) {
throw new IllegalStateException(getResourceName() + "Wrong thread.");
}
synchronized (layoutLock) {
// Return false while we are in the middle of a layout change.
return layoutSize.equals(desiredLayoutSize) && surfaceSize.equals(layoutSize);
}
}
/**
* Renders and releases |pendingFrame|.
*/
private void renderFrameOnRenderThread() {
if (Thread.currentThread() != renderThread) {
throw new IllegalStateException(getResourceName() + "Wrong thread.");
}
// Fetch and render |pendingFrame|.
final VideoRenderer.I420Frame frame;
synchronized (frameLock) {
if (pendingFrame == null) {
return;
}
frame = pendingFrame;
pendingFrame = null;
}
if (eglBase == null || !eglBase.hasSurface()) {
Logging.d(TAG, getResourceName() + "No surface to draw on");
VideoRenderer.renderFrameDone(frame);
return;
}
if (!checkConsistentLayout()) {
// Output intermediate black frames while the layout is updated.
makeBlack();
VideoRenderer.renderFrameDone(frame);
return;
}
// After a surface size change, the EGLSurface might still have a buffer of the old size in the
// pipeline. Querying the EGLSurface will show if the underlying buffer dimensions haven't yet
// changed. Such a buffer will be rendered incorrectly, so flush it with a black frame.
synchronized (layoutLock) {
if (eglBase.surfaceWidth() != surfaceSize.x || eglBase.surfaceHeight() != surfaceSize.y) {
makeBlack();
}
}
final long startTimeNs = System.nanoTime();
final float[] texMatrix;
synchronized (layoutLock) {
final float[] rotatedSamplingMatrix =
RendererCommon.rotateTextureMatrix(frame.samplingMatrix, frame.rotationDegree);
final float[] layoutMatrix = RendererCommon.getLayoutMatrix(
mirror, frameAspectRatio(), (float) layoutSize.x / layoutSize.y);
texMatrix = RendererCommon.multiplyMatrices(rotatedSamplingMatrix, layoutMatrix);
}
// TODO(magjed): glClear() shouldn't be necessary since every pixel is covered anyway, but it's
// a workaround for bug 5147. Performance will be slightly worse.
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
if (frame.yuvFrame) {
// Make sure YUV textures are allocated.
if (yuvTextures == null) {
yuvTextures = new int[3];
for (int i = 0; i < 3; i++) {
yuvTextures[i] = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
}
}
yuvUploader.uploadYuvData(
yuvTextures, frame.width, frame.height, frame.yuvStrides, frame.yuvPlanes);
drawer.drawYuv(yuvTextures, texMatrix, frame.rotatedWidth(), frame.rotatedHeight(),
0, 0, surfaceSize.x, surfaceSize.y);
} else {
drawer.drawOes(frame.textureId, texMatrix, frame.rotatedWidth(), frame.rotatedHeight(),
0, 0, surfaceSize.x, surfaceSize.y);
}
eglBase.swapBuffers();
VideoRenderer.renderFrameDone(frame);
synchronized (statisticsLock) {
if (framesRendered == 0) {
firstFrameTimeNs = startTimeNs;
synchronized (layoutLock) {
Logging.d(TAG, getResourceName() + "Reporting first rendered frame.");
if (rendererEvents != null) {
rendererEvents.onFirstFrameRendered();
}
}
}
++framesRendered;
renderTimeNs += (System.nanoTime() - startTimeNs);
if (framesRendered % 300 == 0) {
logStatistics();
}
}
}
// Return current frame aspect ratio, taking rotation into account.
private float frameAspectRatio() {
synchronized (layoutLock) {
if (frameWidth == 0 || frameHeight == 0) {
return 0.0f;
}
return (frameRotation % 180 == 0) ? (float) frameWidth / frameHeight
: (float) frameHeight / frameWidth;
}
}
// Update frame dimensions and report any changes to |rendererEvents|.
private void updateFrameDimensionsAndReportEvents(VideoRenderer.I420Frame frame) {
synchronized (layoutLock) {
if (frameWidth != frame.width || frameHeight != frame.height
|| frameRotation != frame.rotationDegree) {
Logging.d(TAG, getResourceName() + "Reporting frame resolution changed to "
+ frame.width + "x" + frame.height + " with rotation " + frame.rotationDegree);
if (rendererEvents != null) {
rendererEvents.onFrameResolutionChanged(frame.width, frame.height, frame.rotationDegree);
}
frameWidth = frame.width;
frameHeight = frame.height;
frameRotation = frame.rotationDegree;
post(new Runnable() {
@Override public void run() {
requestLayout();
}
});
}
}
}
private void logStatistics() {
synchronized (statisticsLock) {
Logging.d(TAG, getResourceName() + "Frames received: "
+ framesReceived + ". Dropped: " + framesDropped + ". Rendered: " + framesRendered);
if (framesReceived > 0 && framesRendered > 0) {
final long timeSinceFirstFrameNs = System.nanoTime() - firstFrameTimeNs;
Logging.d(TAG, getResourceName() + "Duration: " + (int) (timeSinceFirstFrameNs / 1e6) +
" ms. FPS: " + framesRendered * 1e9 / timeSinceFirstFrameNs);
Logging.d(TAG, getResourceName() + "Average render time: "
+ (int) (renderTimeNs / (1000 * framesRendered)) + " us.");
}
}
}
}

View File

@ -1,113 +0,0 @@
/*
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.content.Context;
import java.util.List;
// Base interface for all VideoCapturers to implement.
public interface VideoCapturer {
// Interface used for providing callbacks to an observer.
public interface CapturerObserver {
// Notify if the camera have been started successfully or not.
// Called on a Java thread owned by VideoCapturer.
void onCapturerStarted(boolean success);
// Delivers a captured frame. Called on a Java thread owned by VideoCapturer.
void onByteBufferFrameCaptured(byte[] data, int width, int height, int rotation,
long timeStamp);
// Delivers a captured frame in a texture with id |oesTextureId|. Called on a Java thread
// owned by VideoCapturer.
void onTextureFrameCaptured(
int width, int height, int oesTextureId, float[] transformMatrix, int rotation,
long timestamp);
// Requests an output format from the video capturer. Captured frames
// by the camera will be scaled/or dropped by the video capturer.
// Called on a Java thread owned by VideoCapturer.
void onOutputFormatRequest(int width, int height, int framerate);
}
// An implementation of CapturerObserver that forwards all calls from
// Java to the C layer.
static class NativeObserver implements CapturerObserver {
private final long nativeCapturer;
public NativeObserver(long nativeCapturer) {
this.nativeCapturer = nativeCapturer;
}
@Override
public void onCapturerStarted(boolean success) {
nativeCapturerStarted(nativeCapturer, success);
}
@Override
public void onByteBufferFrameCaptured(byte[] data, int width, int height,
int rotation, long timeStamp) {
nativeOnByteBufferFrameCaptured(nativeCapturer, data, data.length, width, height, rotation,
timeStamp);
}
@Override
public void onTextureFrameCaptured(
int width, int height, int oesTextureId, float[] transformMatrix, int rotation,
long timestamp) {
nativeOnTextureFrameCaptured(nativeCapturer, width, height, oesTextureId, transformMatrix,
rotation, timestamp);
}
@Override
public void onOutputFormatRequest(int width, int height, int framerate) {
nativeOnOutputFormatRequest(nativeCapturer, width, height, framerate);
}
private native void nativeCapturerStarted(long nativeCapturer,
boolean success);
private native void nativeOnByteBufferFrameCaptured(long nativeCapturer,
byte[] data, int length, int width, int height, int rotation, long timeStamp);
private native void nativeOnTextureFrameCaptured(long nativeCapturer, int width, int height,
int oesTextureId, float[] transformMatrix, int rotation, long timestamp);
private native void nativeOnOutputFormatRequest(long nativeCapturer,
int width, int height, int framerate);
}
/**
* Returns a list with all the formats this VideoCapturer supports.
*/
List<CameraEnumerationAndroid.CaptureFormat> getSupportedFormats();
/**
* Start capturing frames in a format that is as close as possible to |width| x |height| and
* |framerate|. If the VideoCapturer wants to deliver texture frames, it should do this by
* rendering on the SurfaceTexture in |surfaceTextureHelper|, register itself as a listener,
* and forward the texture frames to CapturerObserver.onTextureFrameCaptured().
*/
void startCapture(
int width, int height, int framerate, SurfaceTextureHelper surfaceTextureHelper,
Context applicationContext, CapturerObserver frameObserver);
/**
* Stop capturing. This function should block until capture is actually stopped.
*/
void stopCapture() throws InterruptedException;
void onOutputFormatRequest(int width, int height, int framerate);
void changeCaptureFormat(int width, int height, int framerate);
/**
* Perform any final cleanup here. No more capturing will be done after this call.
*/
void dispose();
}

View File

@ -1,672 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
import android.content.Context;
import android.os.Handler;
import android.os.SystemClock;
import android.view.Surface;
import android.view.WindowManager;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
// Android specific implementation of VideoCapturer.
// An instance of this class can be created by an application using
// VideoCapturerAndroid.create();
// This class extends VideoCapturer with a method to easily switch between the
// front and back camera. It also provides methods for enumerating valid device
// names.
//
// Threading notes: this class is called from C++ code, Android Camera callbacks, and possibly
// arbitrary Java threads. All public entry points are thread safe, and delegate the work to the
// camera thread. The internal *OnCameraThread() methods must check |camera| for null to check if
// the camera has been stopped.
// TODO(magjed): This class name is now confusing - rename to Camera1VideoCapturer.
@SuppressWarnings("deprecation")
public class VideoCapturerAndroid implements
CameraVideoCapturer,
android.hardware.Camera.PreviewCallback,
SurfaceTextureHelper.OnTextureFrameAvailableListener {
private final static String TAG = "VideoCapturerAndroid";
private static final int CAMERA_STOP_TIMEOUT_MS = 7000;
private android.hardware.Camera camera; // Only non-null while capturing.
private final Object handlerLock = new Object();
// |cameraThreadHandler| must be synchronized on |handlerLock| when not on the camera thread,
// or when modifying the reference. Use maybePostOnCameraThread() instead of posting directly to
// the handler - this way all callbacks with a specifed token can be removed at once.
private Handler cameraThreadHandler;
private Context applicationContext;
// Synchronization lock for |id|.
private final Object cameraIdLock = new Object();
private int id;
private android.hardware.Camera.CameraInfo info;
private CameraStatistics cameraStatistics;
// Remember the requested format in case we want to switch cameras.
private int requestedWidth;
private int requestedHeight;
private int requestedFramerate;
// The capture format will be the closest supported format to the requested format.
private CaptureFormat captureFormat;
private final Object pendingCameraSwitchLock = new Object();
private volatile boolean pendingCameraSwitch;
private CapturerObserver frameObserver = null;
private final CameraEventsHandler eventsHandler;
private boolean firstFrameReported;
// Arbitrary queue depth. Higher number means more memory allocated & held,
// lower number means more sensitivity to processing time in the client (and
// potentially stalling the capturer if it runs out of buffers to write to).
private static final int NUMBER_OF_CAPTURE_BUFFERS = 3;
private final Set<byte[]> queuedBuffers = new HashSet<byte[]>();
private final boolean isCapturingToTexture;
private SurfaceTextureHelper surfaceHelper;
private final static int MAX_OPEN_CAMERA_ATTEMPTS = 3;
private final static int OPEN_CAMERA_DELAY_MS = 500;
private int openCameraAttempts;
// Camera error callback.
private final android.hardware.Camera.ErrorCallback cameraErrorCallback =
new android.hardware.Camera.ErrorCallback() {
@Override
public void onError(int error, android.hardware.Camera camera) {
String errorMessage;
if (error == android.hardware.Camera.CAMERA_ERROR_SERVER_DIED) {
errorMessage = "Camera server died!";
} else {
errorMessage = "Camera error: " + error;
}
Logging.e(TAG, errorMessage);
if (eventsHandler != null) {
eventsHandler.onCameraError(errorMessage);
}
}
};
public static VideoCapturerAndroid create(String name,
CameraEventsHandler eventsHandler) {
return VideoCapturerAndroid.create(name, eventsHandler, false /* captureToTexture */);
}
// Use ctor directly instead.
@Deprecated
public static VideoCapturerAndroid create(String name,
CameraEventsHandler eventsHandler, boolean captureToTexture) {
try {
return new VideoCapturerAndroid(name, eventsHandler, captureToTexture);
} catch (RuntimeException e) {
Logging.e(TAG, "Couldn't create camera.", e);
return null;
}
}
public void printStackTrace() {
Thread cameraThread = null;
synchronized (handlerLock) {
if (cameraThreadHandler != null) {
cameraThread = cameraThreadHandler.getLooper().getThread();
}
}
if (cameraThread != null) {
StackTraceElement[] cameraStackTraces = cameraThread.getStackTrace();
if (cameraStackTraces.length > 0) {
Logging.d(TAG, "VideoCapturerAndroid stacks trace:");
for (StackTraceElement stackTrace : cameraStackTraces) {
Logging.d(TAG, stackTrace.toString());
}
}
}
}
// Switch camera to the next valid camera id. This can only be called while
// the camera is running.
@Override
public void switchCamera(final CameraSwitchHandler switchEventsHandler) {
if (android.hardware.Camera.getNumberOfCameras() < 2) {
if (switchEventsHandler != null) {
switchEventsHandler.onCameraSwitchError("No camera to switch to.");
}
return;
}
synchronized (pendingCameraSwitchLock) {
if (pendingCameraSwitch) {
// Do not handle multiple camera switch request to avoid blocking
// camera thread by handling too many switch request from a queue.
Logging.w(TAG, "Ignoring camera switch request.");
if (switchEventsHandler != null) {
switchEventsHandler.onCameraSwitchError("Pending camera switch already in progress.");
}
return;
}
pendingCameraSwitch = true;
}
final boolean didPost = maybePostOnCameraThread(new Runnable() {
@Override
public void run() {
switchCameraOnCameraThread();
synchronized (pendingCameraSwitchLock) {
pendingCameraSwitch = false;
}
if (switchEventsHandler != null) {
switchEventsHandler.onCameraSwitchDone(
info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT);
}
}
});
if (!didPost && switchEventsHandler != null) {
switchEventsHandler.onCameraSwitchError("Camera is stopped.");
}
}
// Requests a new output format from the video capturer. Captured frames
// by the camera will be scaled/or dropped by the video capturer.
// It does not matter if width and height are flipped. I.E, |width| = 640, |height| = 480 produce
// the same result as |width| = 480, |height| = 640.
// TODO(magjed/perkj): Document what this function does. Change name?
@Override
public void onOutputFormatRequest(final int width, final int height, final int framerate) {
maybePostOnCameraThread(new Runnable() {
@Override public void run() {
onOutputFormatRequestOnCameraThread(width, height, framerate);
}
});
}
// Reconfigure the camera to capture in a new format. This should only be called while the camera
// is running.
@Override
public void changeCaptureFormat(final int width, final int height, final int framerate) {
maybePostOnCameraThread(new Runnable() {
@Override public void run() {
startPreviewOnCameraThread(width, height, framerate);
}
});
}
// Helper function to retrieve the current camera id synchronously. Note that the camera id might
// change at any point by switchCamera() calls.
private int getCurrentCameraId() {
synchronized (cameraIdLock) {
return id;
}
}
@Override
public List<CaptureFormat> getSupportedFormats() {
return Camera1Enumerator.getSupportedFormats(getCurrentCameraId());
}
// Returns true if this VideoCapturer is setup to capture video frames to a SurfaceTexture.
public boolean isCapturingToTexture() {
return isCapturingToTexture;
}
public VideoCapturerAndroid(String cameraName, CameraEventsHandler eventsHandler,
boolean captureToTexture) {
if (android.hardware.Camera.getNumberOfCameras() == 0) {
throw new RuntimeException("No cameras available");
}
if (cameraName == null || cameraName.equals("")) {
this.id = 0;
} else {
this.id = Camera1Enumerator.getCameraIndex(cameraName);
}
this.eventsHandler = eventsHandler;
isCapturingToTexture = captureToTexture;
Logging.d(TAG, "VideoCapturerAndroid isCapturingToTexture : " + isCapturingToTexture);
}
private void checkIsOnCameraThread() {
synchronized (handlerLock) {
if (cameraThreadHandler == null) {
Logging.e(TAG, "Camera is stopped - can't check thread.");
} else if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) {
throw new IllegalStateException("Wrong thread");
}
}
}
private boolean maybePostOnCameraThread(Runnable runnable) {
return maybePostDelayedOnCameraThread(0 /* delayMs */, runnable);
}
private boolean maybePostDelayedOnCameraThread(int delayMs, Runnable runnable) {
synchronized (handlerLock) {
return cameraThreadHandler != null
&& cameraThreadHandler.postAtTime(
runnable, this /* token */, SystemClock.uptimeMillis() + delayMs);
}
}
@Override
public void dispose() {
Logging.d(TAG, "dispose");
}
// Note that this actually opens the camera, and Camera callbacks run on the
// thread that calls open(), so this is done on the CameraThread.
@Override
public void startCapture(
final int width, final int height, final int framerate,
final SurfaceTextureHelper surfaceTextureHelper, final Context applicationContext,
final CapturerObserver frameObserver) {
Logging.d(TAG, "startCapture requested: " + width + "x" + height + "@" + framerate);
if (surfaceTextureHelper == null) {
frameObserver.onCapturerStarted(false /* success */);
if (eventsHandler != null) {
eventsHandler.onCameraError("No SurfaceTexture created.");
}
return;
}
if (applicationContext == null) {
throw new IllegalArgumentException("applicationContext not set.");
}
if (frameObserver == null) {
throw new IllegalArgumentException("frameObserver not set.");
}
synchronized (handlerLock) {
if (this.cameraThreadHandler != null) {
throw new RuntimeException("Camera has already been started.");
}
this.cameraThreadHandler = surfaceTextureHelper.getHandler();
this.surfaceHelper = surfaceTextureHelper;
final boolean didPost = maybePostOnCameraThread(new Runnable() {
@Override
public void run() {
openCameraAttempts = 0;
startCaptureOnCameraThread(width, height, framerate, frameObserver,
applicationContext);
}
});
if (!didPost) {
frameObserver.onCapturerStarted(false);
if (eventsHandler != null) {
eventsHandler.onCameraError("Could not post task to camera thread.");
}
}
}
}
private void startCaptureOnCameraThread(
final int width, final int height, final int framerate, final CapturerObserver frameObserver,
final Context applicationContext) {
synchronized (handlerLock) {
if (cameraThreadHandler == null) {
Logging.e(TAG, "startCaptureOnCameraThread: Camera is stopped");
return;
} else {
checkIsOnCameraThread();
}
}
if (camera != null) {
Logging.e(TAG, "startCaptureOnCameraThread: Camera has already been started.");
return;
}
this.applicationContext = applicationContext;
this.frameObserver = frameObserver;
this.firstFrameReported = false;
try {
try {
synchronized (cameraIdLock) {
Logging.d(TAG, "Opening camera " + id);
if (eventsHandler != null) {
eventsHandler.onCameraOpening(id);
}
camera = android.hardware.Camera.open(id);
info = new android.hardware.Camera.CameraInfo();
android.hardware.Camera.getCameraInfo(id, info);
}
} catch (RuntimeException e) {
openCameraAttempts++;
if (openCameraAttempts < MAX_OPEN_CAMERA_ATTEMPTS) {
Logging.e(TAG, "Camera.open failed, retrying", e);
maybePostDelayedOnCameraThread(OPEN_CAMERA_DELAY_MS, new Runnable() {
@Override public void run() {
startCaptureOnCameraThread(width, height, framerate, frameObserver,
applicationContext);
}
});
return;
}
throw e;
}
camera.setPreviewTexture(surfaceHelper.getSurfaceTexture());
Logging.d(TAG, "Camera orientation: " + info.orientation +
" .Device orientation: " + getDeviceOrientation());
camera.setErrorCallback(cameraErrorCallback);
startPreviewOnCameraThread(width, height, framerate);
frameObserver.onCapturerStarted(true);
if (isCapturingToTexture) {
surfaceHelper.startListening(this);
}
// Start camera observer.
cameraStatistics = new CameraStatistics(surfaceHelper, eventsHandler);
} catch (IOException|RuntimeException e) {
Logging.e(TAG, "startCapture failed", e);
// Make sure the camera is released.
stopCaptureOnCameraThread(true /* stopHandler */);
frameObserver.onCapturerStarted(false);
if (eventsHandler != null) {
eventsHandler.onCameraError("Camera can not be started.");
}
}
}
// (Re)start preview with the closest supported format to |width| x |height| @ |framerate|.
private void startPreviewOnCameraThread(int width, int height, int framerate) {
synchronized (handlerLock) {
if (cameraThreadHandler == null || camera == null) {
Logging.e(TAG, "startPreviewOnCameraThread: Camera is stopped");
return;
} else {
checkIsOnCameraThread();
}
}
Logging.d(
TAG, "startPreviewOnCameraThread requested: " + width + "x" + height + "@" + framerate);
requestedWidth = width;
requestedHeight = height;
requestedFramerate = framerate;
// Find closest supported format for |width| x |height| @ |framerate|.
final android.hardware.Camera.Parameters parameters = camera.getParameters();
final List<CaptureFormat.FramerateRange> supportedFramerates =
Camera1Enumerator.convertFramerates(parameters.getSupportedPreviewFpsRange());
Logging.d(TAG, "Available fps ranges: " + supportedFramerates);
final CaptureFormat.FramerateRange fpsRange =
CameraEnumerationAndroid.getClosestSupportedFramerateRange(supportedFramerates, framerate);
final Size previewSize = CameraEnumerationAndroid.getClosestSupportedSize(
Camera1Enumerator.convertSizes(parameters.getSupportedPreviewSizes()), width, height);
final CaptureFormat captureFormat =
new CaptureFormat(previewSize.width, previewSize.height, fpsRange);
// Check if we are already using this capture format, then we don't need to do anything.
if (captureFormat.equals(this.captureFormat)) {
return;
}
// Update camera parameters.
Logging.d(TAG, "isVideoStabilizationSupported: " +
parameters.isVideoStabilizationSupported());
if (parameters.isVideoStabilizationSupported()) {
parameters.setVideoStabilization(true);
}
// Note: setRecordingHint(true) actually decrease frame rate on N5.
// parameters.setRecordingHint(true);
if (captureFormat.framerate.max > 0) {
parameters.setPreviewFpsRange(captureFormat.framerate.min, captureFormat.framerate.max);
}
parameters.setPreviewSize(previewSize.width, previewSize.height);
if (!isCapturingToTexture) {
parameters.setPreviewFormat(captureFormat.imageFormat);
}
// Picture size is for taking pictures and not for preview/video, but we need to set it anyway
// as a workaround for an aspect ratio problem on Nexus 7.
final Size pictureSize = CameraEnumerationAndroid.getClosestSupportedSize(
Camera1Enumerator.convertSizes(parameters.getSupportedPictureSizes()), width, height);
parameters.setPictureSize(pictureSize.width, pictureSize.height);
// Temporarily stop preview if it's already running.
if (this.captureFormat != null) {
camera.stopPreview();
// Calling |setPreviewCallbackWithBuffer| with null should clear the internal camera buffer
// queue, but sometimes we receive a frame with the old resolution after this call anyway.
camera.setPreviewCallbackWithBuffer(null);
}
// (Re)start preview.
Logging.d(TAG, "Start capturing: " + captureFormat);
this.captureFormat = captureFormat;
List<String> focusModes = parameters.getSupportedFocusModes();
if (focusModes.contains(android.hardware.Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
parameters.setFocusMode(android.hardware.Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
camera.setParameters(parameters);
// Calculate orientation manually and send it as CVO instead.
camera.setDisplayOrientation(0 /* degrees */);
if (!isCapturingToTexture) {
queuedBuffers.clear();
final int frameSize = captureFormat.frameSize();
for (int i = 0; i < NUMBER_OF_CAPTURE_BUFFERS; ++i) {
final ByteBuffer buffer = ByteBuffer.allocateDirect(frameSize);
queuedBuffers.add(buffer.array());
camera.addCallbackBuffer(buffer.array());
}
camera.setPreviewCallbackWithBuffer(this);
}
camera.startPreview();
}
// Blocks until camera is known to be stopped.
@Override
public void stopCapture() throws InterruptedException {
Logging.d(TAG, "stopCapture");
final CountDownLatch barrier = new CountDownLatch(1);
final boolean didPost = maybePostOnCameraThread(new Runnable() {
@Override public void run() {
stopCaptureOnCameraThread(true /* stopHandler */);
barrier.countDown();
}
});
if (!didPost) {
Logging.e(TAG, "Calling stopCapture() for already stopped camera.");
return;
}
if (!barrier.await(CAMERA_STOP_TIMEOUT_MS, TimeUnit.MILLISECONDS)) {
Logging.e(TAG, "Camera stop timeout");
printStackTrace();
if (eventsHandler != null) {
eventsHandler.onCameraError("Camera stop timeout");
}
}
Logging.d(TAG, "stopCapture done");
}
private void stopCaptureOnCameraThread(boolean stopHandler) {
synchronized (handlerLock) {
if (cameraThreadHandler == null) {
Logging.e(TAG, "stopCaptureOnCameraThread: Camera is stopped");
} else {
checkIsOnCameraThread();
}
}
Logging.d(TAG, "stopCaptureOnCameraThread");
// Note that the camera might still not be started here if startCaptureOnCameraThread failed
// and we posted a retry.
// Make sure onTextureFrameAvailable() is not called anymore.
if (surfaceHelper != null) {
surfaceHelper.stopListening();
}
if (stopHandler) {
synchronized (handlerLock) {
// Clear the cameraThreadHandler first, in case stopPreview or
// other driver code deadlocks. Deadlock in
// android.hardware.Camera._stopPreview(Native Method) has
// been observed on Nexus 5 (hammerhead), OS version LMY48I.
// The camera might post another one or two preview frames
// before stopped, so we have to check for a null
// cameraThreadHandler in our handler. Remove all pending
// Runnables posted from |this|.
if (cameraThreadHandler != null) {
cameraThreadHandler.removeCallbacksAndMessages(this /* token */);
cameraThreadHandler = null;
}
surfaceHelper = null;
}
}
if (cameraStatistics != null) {
cameraStatistics.release();
cameraStatistics = null;
}
Logging.d(TAG, "Stop preview.");
if (camera != null) {
camera.stopPreview();
camera.setPreviewCallbackWithBuffer(null);
}
queuedBuffers.clear();
captureFormat = null;
Logging.d(TAG, "Release camera.");
if (camera != null) {
camera.release();
camera = null;
}
if (eventsHandler != null) {
eventsHandler.onCameraClosed();
}
Logging.d(TAG, "stopCaptureOnCameraThread done");
}
private void switchCameraOnCameraThread() {
synchronized (handlerLock) {
if (cameraThreadHandler == null) {
Logging.e(TAG, "switchCameraOnCameraThread: Camera is stopped");
return;
} else {
checkIsOnCameraThread();
}
}
Logging.d(TAG, "switchCameraOnCameraThread");
stopCaptureOnCameraThread(false /* stopHandler */);
synchronized (cameraIdLock) {
id = (id + 1) % android.hardware.Camera.getNumberOfCameras();
}
startCaptureOnCameraThread(requestedWidth, requestedHeight, requestedFramerate, frameObserver,
applicationContext);
Logging.d(TAG, "switchCameraOnCameraThread done");
}
private void onOutputFormatRequestOnCameraThread(int width, int height, int framerate) {
synchronized (handlerLock) {
if (cameraThreadHandler == null || camera == null) {
Logging.e(TAG, "onOutputFormatRequestOnCameraThread: Camera is stopped");
return;
} else {
checkIsOnCameraThread();
}
}
Logging.d(TAG, "onOutputFormatRequestOnCameraThread: " + width + "x" + height +
"@" + framerate);
frameObserver.onOutputFormatRequest(width, height, framerate);
}
private int getDeviceOrientation() {
int orientation = 0;
WindowManager wm = (WindowManager) applicationContext.getSystemService(
Context.WINDOW_SERVICE);
switch(wm.getDefaultDisplay().getRotation()) {
case Surface.ROTATION_90:
orientation = 90;
break;
case Surface.ROTATION_180:
orientation = 180;
break;
case Surface.ROTATION_270:
orientation = 270;
break;
case Surface.ROTATION_0:
default:
orientation = 0;
break;
}
return orientation;
}
private int getFrameOrientation() {
int rotation = getDeviceOrientation();
if (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK) {
rotation = 360 - rotation;
}
return (info.orientation + rotation) % 360;
}
// Called on cameraThread so must not "synchronized".
@Override
public void onPreviewFrame(byte[] data, android.hardware.Camera callbackCamera) {
synchronized (handlerLock) {
if (cameraThreadHandler == null) {
Logging.e(TAG, "onPreviewFrame: Camera is stopped");
return;
} else {
checkIsOnCameraThread();
}
}
if (!queuedBuffers.contains(data)) {
// |data| is an old invalid buffer.
return;
}
if (camera != callbackCamera) {
throw new RuntimeException("Unexpected camera in callback!");
}
final long captureTimeNs =
TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
if (eventsHandler != null && !firstFrameReported) {
eventsHandler.onFirstFrameAvailable();
firstFrameReported = true;
}
cameraStatistics.addFrame();
frameObserver.onByteBufferFrameCaptured(data, captureFormat.width, captureFormat.height,
getFrameOrientation(), captureTimeNs);
camera.addCallbackBuffer(data);
}
@Override
public void onTextureFrameAvailable(
int oesTextureId, float[] transformMatrix, long timestampNs) {
synchronized (handlerLock) {
if (cameraThreadHandler == null) {
Logging.e(TAG, "onTextureFrameAvailable: Camera is stopped");
surfaceHelper.returnTextureFrame();
return;
} else {
checkIsOnCameraThread();
}
}
if (eventsHandler != null && !firstFrameReported) {
eventsHandler.onFirstFrameAvailable();
firstFrameReported = true;
}
int rotation = getFrameOrientation();
if (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT) {
// Undo the mirror that the OS "helps" us with.
// http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int)
transformMatrix =
RendererCommon.multiplyMatrices(transformMatrix, RendererCommon.horizontalFlipMatrix());
}
cameraStatistics.addFrame();
frameObserver.onTextureFrameCaptured(captureFormat.width, captureFormat.height, oesTextureId,
transformMatrix, rotation, timestampNs);
}
}

View File

@ -1,149 +0,0 @@
/*
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import java.nio.ByteBuffer;
/**
* Java version of VideoSinkInterface. In addition to allowing clients to
* define their own rendering behavior (by passing in a Callbacks object), this
* class also provides a createGui() method for creating a GUI-rendering window
* on various platforms.
*/
public class VideoRenderer {
/**
* Java version of cricket::VideoFrame. Frames are only constructed from native code and test
* code.
*/
public static class I420Frame {
public final int width;
public final int height;
public final int[] yuvStrides;
public ByteBuffer[] yuvPlanes;
public final boolean yuvFrame;
// Matrix that transforms standard coordinates to their proper sampling locations in
// the texture. This transform compensates for any properties of the video source that
// cause it to appear different from a normalized texture. This matrix does not take
// |rotationDegree| into account.
public final float[] samplingMatrix;
public int textureId;
// Frame pointer in C++.
private long nativeFramePointer;
// rotationDegree is the degree that the frame must be rotated clockwisely
// to be rendered correctly.
public int rotationDegree;
/**
* Construct a frame of the given dimensions with the specified planar data.
*/
I420Frame(int width, int height, int rotationDegree, int[] yuvStrides, ByteBuffer[] yuvPlanes,
long nativeFramePointer) {
this.width = width;
this.height = height;
this.yuvStrides = yuvStrides;
this.yuvPlanes = yuvPlanes;
this.yuvFrame = true;
this.rotationDegree = rotationDegree;
this.nativeFramePointer = nativeFramePointer;
if (rotationDegree % 90 != 0) {
throw new IllegalArgumentException("Rotation degree not multiple of 90: " + rotationDegree);
}
// The convention in WebRTC is that the first element in a ByteBuffer corresponds to the
// top-left corner of the image, but in glTexImage2D() the first element corresponds to the
// bottom-left corner. This discrepancy is corrected by setting a vertical flip as sampling
// matrix.
samplingMatrix = new float[] {
1, 0, 0, 0,
0, -1, 0, 0,
0, 0, 1, 0,
0, 1, 0, 1};
}
/**
* Construct a texture frame of the given dimensions with data in SurfaceTexture
*/
I420Frame(int width, int height, int rotationDegree, int textureId, float[] samplingMatrix,
long nativeFramePointer) {
this.width = width;
this.height = height;
this.yuvStrides = null;
this.yuvPlanes = null;
this.samplingMatrix = samplingMatrix;
this.textureId = textureId;
this.yuvFrame = false;
this.rotationDegree = rotationDegree;
this.nativeFramePointer = nativeFramePointer;
if (rotationDegree % 90 != 0) {
throw new IllegalArgumentException("Rotation degree not multiple of 90: " + rotationDegree);
}
}
public int rotatedWidth() {
return (rotationDegree % 180 == 0) ? width : height;
}
public int rotatedHeight() {
return (rotationDegree % 180 == 0) ? height : width;
}
@Override
public String toString() {
return width + "x" + height + ":" + yuvStrides[0] + ":" + yuvStrides[1] +
":" + yuvStrides[2];
}
}
// Helper native function to do a video frame plane copying.
public static native void nativeCopyPlane(ByteBuffer src, int width,
int height, int srcStride, ByteBuffer dst, int dstStride);
/** The real meat of VideoSinkInterface. */
public static interface Callbacks {
// |frame| might have pending rotation and implementation of Callbacks
// should handle that by applying rotation during rendering. The callee
// is responsible for signaling when it is done with |frame| by calling
// renderFrameDone(frame).
public void renderFrame(I420Frame frame);
}
/**
* This must be called after every renderFrame() to release the frame.
*/
public static void renderFrameDone(I420Frame frame) {
frame.yuvPlanes = null;
frame.textureId = 0;
if (frame.nativeFramePointer != 0) {
releaseNativeFrame(frame.nativeFramePointer);
frame.nativeFramePointer = 0;
}
}
long nativeVideoRenderer;
public VideoRenderer(Callbacks callbacks) {
nativeVideoRenderer = nativeWrapVideoRenderer(callbacks);
}
public void dispose() {
if (nativeVideoRenderer == 0) {
// Already disposed.
return;
}
freeWrappedVideoRenderer(nativeVideoRenderer);
nativeVideoRenderer = 0;
}
private static native long nativeWrapVideoRenderer(Callbacks callbacks);
private static native void freeWrappedVideoRenderer(long nativeVideoRenderer);
private static native void releaseNativeFrame(long nativeFramePointer);
}

View File

@ -1,650 +0,0 @@
/*
* Copyright 2014 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import java.util.ArrayList;
import java.util.concurrent.CountDownLatch;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.egl.EGL10;
import javax.microedition.khronos.egl.EGLContext;
import javax.microedition.khronos.opengles.GL10;
import android.annotation.SuppressLint;
import android.graphics.Point;
import android.graphics.Rect;
import android.opengl.EGL14;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import org.webrtc.Logging;
import org.webrtc.VideoRenderer.I420Frame;
/**
* Efficiently renders YUV frames using the GPU for CSC.
* Clients will want first to call setView() to pass GLSurfaceView
* and then for each video stream either create instance of VideoRenderer using
* createGui() call or VideoRenderer.Callbacks interface using create() call.
* Only one instance of the class can be created.
*/
public class VideoRendererGui implements GLSurfaceView.Renderer {
// |instance|, |instance.surface|, |eglContext|, and |eglContextReady| are synchronized on
// |VideoRendererGui.class|.
private static VideoRendererGui instance = null;
private static Runnable eglContextReady = null;
private static final String TAG = "VideoRendererGui";
private GLSurfaceView surface;
private static EglBase.Context eglContext = null;
// Indicates if SurfaceView.Renderer.onSurfaceCreated was called.
// If true then for every newly created yuv image renderer createTexture()
// should be called. The variable is accessed on multiple threads and
// all accesses are synchronized on yuvImageRenderers' object lock.
private boolean onSurfaceCreatedCalled;
private int screenWidth;
private int screenHeight;
// List of yuv renderers.
private final ArrayList<YuvImageRenderer> yuvImageRenderers;
// Render and draw threads.
private static Thread renderFrameThread;
private static Thread drawThread;
private VideoRendererGui(GLSurfaceView surface) {
this.surface = surface;
// Create an OpenGL ES 2.0 context.
surface.setPreserveEGLContextOnPause(true);
surface.setEGLContextClientVersion(2);
surface.setRenderer(this);
surface.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
yuvImageRenderers = new ArrayList<YuvImageRenderer>();
}
/**
* Class used to display stream of YUV420 frames at particular location
* on a screen. New video frames are sent to display using renderFrame()
* call.
*/
private static class YuvImageRenderer implements VideoRenderer.Callbacks {
// |surface| is synchronized on |this|.
private GLSurfaceView surface;
private int id;
// TODO(magjed): Delete GL resources in release(). Must be synchronized with draw(). We are
// currently leaking resources to avoid a rare crash in release() where the EGLContext has
// become invalid beforehand.
private int[] yuvTextures = { 0, 0, 0 };
private final RendererCommon.YuvUploader yuvUploader = new RendererCommon.YuvUploader();
private final RendererCommon.GlDrawer drawer;
// Resources for making a deep copy of incoming OES texture frame.
private GlTextureFrameBuffer textureCopy;
// Pending frame to render. Serves as a queue with size 1. |pendingFrame| is accessed by two
// threads - frames are received in renderFrame() and consumed in draw(). Frames are dropped in
// renderFrame() if the previous frame has not been rendered yet.
private I420Frame pendingFrame;
private final Object pendingFrameLock = new Object();
// Type of video frame used for recent frame rendering.
private static enum RendererType { RENDERER_YUV, RENDERER_TEXTURE };
private RendererType rendererType;
private RendererCommon.ScalingType scalingType;
private boolean mirror;
private RendererCommon.RendererEvents rendererEvents;
// Flag if renderFrame() was ever called.
boolean seenFrame;
// Total number of video frames received in renderFrame() call.
private int framesReceived;
// Number of video frames dropped by renderFrame() because previous
// frame has not been rendered yet.
private int framesDropped;
// Number of rendered video frames.
private int framesRendered;
// Time in ns when the first video frame was rendered.
private long startTimeNs = -1;
// Time in ns spent in draw() function.
private long drawTimeNs;
// Time in ns spent in draw() copying resources from |pendingFrame| - including uploading frame
// data to rendering planes.
private long copyTimeNs;
// The allowed view area in percentage of screen size.
private final Rect layoutInPercentage;
// The actual view area in pixels. It is a centered subrectangle of the rectangle defined by
// |layoutInPercentage|.
private final Rect displayLayout = new Rect();
// Cached layout transformation matrix, calculated from current layout parameters.
private float[] layoutMatrix;
// Flag if layout transformation matrix update is needed.
private boolean updateLayoutProperties;
// Layout properties update lock. Guards |updateLayoutProperties|, |screenWidth|,
// |screenHeight|, |videoWidth|, |videoHeight|, |rotationDegree|, |scalingType|, and |mirror|.
private final Object updateLayoutLock = new Object();
// Texture sampling matrix.
private float[] rotatedSamplingMatrix;
// Viewport dimensions.
private int screenWidth;
private int screenHeight;
// Video dimension.
private int videoWidth;
private int videoHeight;
// This is the degree that the frame should be rotated clockwisely to have
// it rendered up right.
private int rotationDegree;
private YuvImageRenderer(
GLSurfaceView surface, int id,
int x, int y, int width, int height,
RendererCommon.ScalingType scalingType, boolean mirror, RendererCommon.GlDrawer drawer) {
Logging.d(TAG, "YuvImageRenderer.Create id: " + id);
this.surface = surface;
this.id = id;
this.scalingType = scalingType;
this.mirror = mirror;
this.drawer = drawer;
layoutInPercentage = new Rect(x, y, Math.min(100, x + width), Math.min(100, y + height));
updateLayoutProperties = false;
rotationDegree = 0;
}
public synchronized void reset() {
seenFrame = false;
}
private synchronized void release() {
surface = null;
drawer.release();
synchronized (pendingFrameLock) {
if (pendingFrame != null) {
VideoRenderer.renderFrameDone(pendingFrame);
pendingFrame = null;
}
}
}
private void createTextures() {
Logging.d(TAG, " YuvImageRenderer.createTextures " + id + " on GL thread:" +
Thread.currentThread().getId());
// Generate 3 texture ids for Y/U/V and place them into |yuvTextures|.
for (int i = 0; i < 3; i++) {
yuvTextures[i] = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
}
// Generate texture and framebuffer for offscreen texture copy.
textureCopy = new GlTextureFrameBuffer(GLES20.GL_RGB);
}
private void updateLayoutMatrix() {
synchronized(updateLayoutLock) {
if (!updateLayoutProperties) {
return;
}
// Initialize to maximum allowed area. Round to integer coordinates inwards the layout
// bounding box (ceil left/top and floor right/bottom) to not break constraints.
displayLayout.set(
(screenWidth * layoutInPercentage.left + 99) / 100,
(screenHeight * layoutInPercentage.top + 99) / 100,
(screenWidth * layoutInPercentage.right) / 100,
(screenHeight * layoutInPercentage.bottom) / 100);
Logging.d(TAG, "ID: " + id + ". AdjustTextureCoords. Allowed display size: "
+ displayLayout.width() + " x " + displayLayout.height() + ". Video: " + videoWidth
+ " x " + videoHeight + ". Rotation: " + rotationDegree + ". Mirror: " + mirror);
final float videoAspectRatio = (rotationDegree % 180 == 0)
? (float) videoWidth / videoHeight
: (float) videoHeight / videoWidth;
// Adjust display size based on |scalingType|.
final Point displaySize = RendererCommon.getDisplaySize(scalingType,
videoAspectRatio, displayLayout.width(), displayLayout.height());
displayLayout.inset((displayLayout.width() - displaySize.x) / 2,
(displayLayout.height() - displaySize.y) / 2);
Logging.d(TAG, " Adjusted display size: " + displayLayout.width() + " x "
+ displayLayout.height());
layoutMatrix = RendererCommon.getLayoutMatrix(
mirror, videoAspectRatio, (float) displayLayout.width() / displayLayout.height());
updateLayoutProperties = false;
Logging.d(TAG, " AdjustTextureCoords done");
}
}
private void draw() {
if (!seenFrame) {
// No frame received yet - nothing to render.
return;
}
long now = System.nanoTime();
final boolean isNewFrame;
synchronized (pendingFrameLock) {
isNewFrame = (pendingFrame != null);
if (isNewFrame && startTimeNs == -1) {
startTimeNs = now;
}
if (isNewFrame) {
rotatedSamplingMatrix = RendererCommon.rotateTextureMatrix(
pendingFrame.samplingMatrix, pendingFrame.rotationDegree);
if (pendingFrame.yuvFrame) {
rendererType = RendererType.RENDERER_YUV;
yuvUploader.uploadYuvData(yuvTextures, pendingFrame.width, pendingFrame.height,
pendingFrame.yuvStrides, pendingFrame.yuvPlanes);
} else {
rendererType = RendererType.RENDERER_TEXTURE;
// External texture rendering. Make a deep copy of the external texture.
// Reallocate offscreen texture if necessary.
textureCopy.setSize(pendingFrame.rotatedWidth(), pendingFrame.rotatedHeight());
// Bind our offscreen framebuffer.
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, textureCopy.getFrameBufferId());
GlUtil.checkNoGLES2Error("glBindFramebuffer");
// Copy the OES texture content. This will also normalize the sampling matrix.
drawer.drawOes(pendingFrame.textureId, rotatedSamplingMatrix,
textureCopy.getWidth(), textureCopy.getHeight(),
0, 0, textureCopy.getWidth(), textureCopy.getHeight());
rotatedSamplingMatrix = RendererCommon.identityMatrix();
// Restore normal framebuffer.
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
GLES20.glFinish();
}
copyTimeNs += (System.nanoTime() - now);
VideoRenderer.renderFrameDone(pendingFrame);
pendingFrame = null;
}
}
updateLayoutMatrix();
final float[] texMatrix =
RendererCommon.multiplyMatrices(rotatedSamplingMatrix, layoutMatrix);
// OpenGL defaults to lower left origin - flip viewport position vertically.
final int viewportY = screenHeight - displayLayout.bottom;
if (rendererType == RendererType.RENDERER_YUV) {
drawer.drawYuv(yuvTextures, texMatrix, videoWidth, videoHeight,
displayLayout.left, viewportY, displayLayout.width(), displayLayout.height());
} else {
drawer.drawRgb(textureCopy.getTextureId(), texMatrix, videoWidth, videoHeight,
displayLayout.left, viewportY, displayLayout.width(), displayLayout.height());
}
if (isNewFrame) {
framesRendered++;
drawTimeNs += (System.nanoTime() - now);
if ((framesRendered % 300) == 0) {
logStatistics();
}
}
}
private void logStatistics() {
long timeSinceFirstFrameNs = System.nanoTime() - startTimeNs;
Logging.d(TAG, "ID: " + id + ". Type: " + rendererType +
". Frames received: " + framesReceived +
". Dropped: " + framesDropped + ". Rendered: " + framesRendered);
if (framesReceived > 0 && framesRendered > 0) {
Logging.d(TAG, "Duration: " + (int)(timeSinceFirstFrameNs / 1e6) +
" ms. FPS: " + framesRendered * 1e9 / timeSinceFirstFrameNs);
Logging.d(TAG, "Draw time: " +
(int) (drawTimeNs / (1000 * framesRendered)) + " us. Copy time: " +
(int) (copyTimeNs / (1000 * framesReceived)) + " us");
}
}
public void setScreenSize(final int screenWidth, final int screenHeight) {
synchronized(updateLayoutLock) {
if (screenWidth == this.screenWidth && screenHeight == this.screenHeight) {
return;
}
Logging.d(TAG, "ID: " + id + ". YuvImageRenderer.setScreenSize: " +
screenWidth + " x " + screenHeight);
this.screenWidth = screenWidth;
this.screenHeight = screenHeight;
updateLayoutProperties = true;
}
}
public void setPosition(int x, int y, int width, int height,
RendererCommon.ScalingType scalingType, boolean mirror) {
final Rect layoutInPercentage =
new Rect(x, y, Math.min(100, x + width), Math.min(100, y + height));
synchronized(updateLayoutLock) {
if (layoutInPercentage.equals(this.layoutInPercentage) && scalingType == this.scalingType
&& mirror == this.mirror) {
return;
}
Logging.d(TAG, "ID: " + id + ". YuvImageRenderer.setPosition: (" + x + ", " + y +
") " + width + " x " + height + ". Scaling: " + scalingType +
". Mirror: " + mirror);
this.layoutInPercentage.set(layoutInPercentage);
this.scalingType = scalingType;
this.mirror = mirror;
updateLayoutProperties = true;
}
}
private void setSize(final int videoWidth, final int videoHeight, final int rotation) {
if (videoWidth == this.videoWidth && videoHeight == this.videoHeight
&& rotation == rotationDegree) {
return;
}
if (rendererEvents != null) {
Logging.d(TAG, "ID: " + id +
". Reporting frame resolution changed to " + videoWidth + " x " + videoHeight);
rendererEvents.onFrameResolutionChanged(videoWidth, videoHeight, rotation);
}
synchronized (updateLayoutLock) {
Logging.d(TAG, "ID: " + id + ". YuvImageRenderer.setSize: " +
videoWidth + " x " + videoHeight + " rotation " + rotation);
this.videoWidth = videoWidth;
this.videoHeight = videoHeight;
rotationDegree = rotation;
updateLayoutProperties = true;
Logging.d(TAG, " YuvImageRenderer.setSize done.");
}
}
@Override
public synchronized void renderFrame(I420Frame frame) {
if (surface == null) {
// This object has been released.
VideoRenderer.renderFrameDone(frame);
return;
}
if (renderFrameThread == null) {
renderFrameThread = Thread.currentThread();
}
if (!seenFrame && rendererEvents != null) {
Logging.d(TAG, "ID: " + id + ". Reporting first rendered frame.");
rendererEvents.onFirstFrameRendered();
}
framesReceived++;
synchronized (pendingFrameLock) {
// Check input frame parameters.
if (frame.yuvFrame) {
if (frame.yuvStrides[0] < frame.width ||
frame.yuvStrides[1] < frame.width / 2 ||
frame.yuvStrides[2] < frame.width / 2) {
Logging.e(TAG, "Incorrect strides " + frame.yuvStrides[0] + ", " +
frame.yuvStrides[1] + ", " + frame.yuvStrides[2]);
VideoRenderer.renderFrameDone(frame);
return;
}
}
if (pendingFrame != null) {
// Skip rendering of this frame if previous frame was not rendered yet.
framesDropped++;
VideoRenderer.renderFrameDone(frame);
seenFrame = true;
return;
}
pendingFrame = frame;
}
setSize(frame.width, frame.height, frame.rotationDegree);
seenFrame = true;
// Request rendering.
surface.requestRender();
}
}
/** Passes GLSurfaceView to video renderer. */
public static synchronized void setView(GLSurfaceView surface,
Runnable eglContextReadyCallback) {
Logging.d(TAG, "VideoRendererGui.setView");
instance = new VideoRendererGui(surface);
eglContextReady = eglContextReadyCallback;
}
public static synchronized EglBase.Context getEglBaseContext() {
return eglContext;
}
/** Releases GLSurfaceView video renderer. */
public static synchronized void dispose() {
if (instance == null){
return;
}
Logging.d(TAG, "VideoRendererGui.dispose");
synchronized (instance.yuvImageRenderers) {
for (YuvImageRenderer yuvImageRenderer : instance.yuvImageRenderers) {
yuvImageRenderer.release();
}
instance.yuvImageRenderers.clear();
}
renderFrameThread = null;
drawThread = null;
instance.surface = null;
eglContext = null;
eglContextReady = null;
instance = null;
}
/**
* Creates VideoRenderer with top left corner at (x, y) and resolution
* (width, height). All parameters are in percentage of screen resolution.
*/
public static VideoRenderer createGui(int x, int y, int width, int height,
RendererCommon.ScalingType scalingType, boolean mirror) throws Exception {
YuvImageRenderer javaGuiRenderer = create(
x, y, width, height, scalingType, mirror);
return new VideoRenderer(javaGuiRenderer);
}
public static VideoRenderer.Callbacks createGuiRenderer(
int x, int y, int width, int height,
RendererCommon.ScalingType scalingType, boolean mirror) {
return create(x, y, width, height, scalingType, mirror);
}
/**
* Creates VideoRenderer.Callbacks with top left corner at (x, y) and
* resolution (width, height). All parameters are in percentage of
* screen resolution.
*/
public static synchronized YuvImageRenderer create(int x, int y, int width, int height,
RendererCommon.ScalingType scalingType, boolean mirror) {
return create(x, y, width, height, scalingType, mirror, new GlRectDrawer());
}
/**
* Creates VideoRenderer.Callbacks with top left corner at (x, y) and resolution (width, height).
* All parameters are in percentage of screen resolution. The custom |drawer| will be used for
* drawing frames on the EGLSurface. This class is responsible for calling release() on |drawer|.
*/
public static synchronized YuvImageRenderer create(int x, int y, int width, int height,
RendererCommon.ScalingType scalingType, boolean mirror, RendererCommon.GlDrawer drawer) {
// Check display region parameters.
if (x < 0 || x > 100 || y < 0 || y > 100 ||
width < 0 || width > 100 || height < 0 || height > 100 ||
x + width > 100 || y + height > 100) {
throw new RuntimeException("Incorrect window parameters.");
}
if (instance == null) {
throw new RuntimeException(
"Attempt to create yuv renderer before setting GLSurfaceView");
}
final YuvImageRenderer yuvImageRenderer = new YuvImageRenderer(
instance.surface, instance.yuvImageRenderers.size(),
x, y, width, height, scalingType, mirror, drawer);
synchronized (instance.yuvImageRenderers) {
if (instance.onSurfaceCreatedCalled) {
// onSurfaceCreated has already been called for VideoRendererGui -
// need to create texture for new image and add image to the
// rendering list.
final CountDownLatch countDownLatch = new CountDownLatch(1);
instance.surface.queueEvent(new Runnable() {
@Override
public void run() {
yuvImageRenderer.createTextures();
yuvImageRenderer.setScreenSize(
instance.screenWidth, instance.screenHeight);
countDownLatch.countDown();
}
});
// Wait for task completion.
try {
countDownLatch.await();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
// Add yuv renderer to rendering list.
instance.yuvImageRenderers.add(yuvImageRenderer);
}
return yuvImageRenderer;
}
public static synchronized void update(
VideoRenderer.Callbacks renderer, int x, int y, int width, int height,
RendererCommon.ScalingType scalingType, boolean mirror) {
Logging.d(TAG, "VideoRendererGui.update");
if (instance == null) {
throw new RuntimeException(
"Attempt to update yuv renderer before setting GLSurfaceView");
}
synchronized (instance.yuvImageRenderers) {
for (YuvImageRenderer yuvImageRenderer : instance.yuvImageRenderers) {
if (yuvImageRenderer == renderer) {
yuvImageRenderer.setPosition(x, y, width, height, scalingType, mirror);
}
}
}
}
public static synchronized void setRendererEvents(
VideoRenderer.Callbacks renderer, RendererCommon.RendererEvents rendererEvents) {
Logging.d(TAG, "VideoRendererGui.setRendererEvents");
if (instance == null) {
throw new RuntimeException(
"Attempt to set renderer events before setting GLSurfaceView");
}
synchronized (instance.yuvImageRenderers) {
for (YuvImageRenderer yuvImageRenderer : instance.yuvImageRenderers) {
if (yuvImageRenderer == renderer) {
yuvImageRenderer.rendererEvents = rendererEvents;
}
}
}
}
public static synchronized void remove(VideoRenderer.Callbacks renderer) {
Logging.d(TAG, "VideoRendererGui.remove");
if (instance == null) {
throw new RuntimeException(
"Attempt to remove renderer before setting GLSurfaceView");
}
synchronized (instance.yuvImageRenderers) {
final int index = instance.yuvImageRenderers.indexOf(renderer);
if (index == -1) {
Logging.w(TAG, "Couldn't remove renderer (not present in current list)");
} else {
instance.yuvImageRenderers.remove(index).release();
}
}
}
public static synchronized void reset(VideoRenderer.Callbacks renderer) {
Logging.d(TAG, "VideoRendererGui.reset");
if (instance == null) {
throw new RuntimeException(
"Attempt to reset renderer before setting GLSurfaceView");
}
synchronized (instance.yuvImageRenderers) {
for (YuvImageRenderer yuvImageRenderer : instance.yuvImageRenderers) {
if (yuvImageRenderer == renderer) {
yuvImageRenderer.reset();
}
}
}
}
private static void printStackTrace(Thread thread, String threadName) {
if (thread != null) {
StackTraceElement[] stackTraces = thread.getStackTrace();
if (stackTraces.length > 0) {
Logging.d(TAG, threadName + " stacks trace:");
for (StackTraceElement stackTrace : stackTraces) {
Logging.d(TAG, stackTrace.toString());
}
}
}
}
public static synchronized void printStackTraces() {
if (instance == null) {
return;
}
printStackTrace(renderFrameThread, "Render frame thread");
printStackTrace(drawThread, "Draw thread");
}
@SuppressLint("NewApi")
@Override
public void onSurfaceCreated(GL10 unused, EGLConfig config) {
Logging.d(TAG, "VideoRendererGui.onSurfaceCreated");
// Store render EGL context.
synchronized (VideoRendererGui.class) {
if (EglBase14.isEGL14Supported()) {
eglContext = new EglBase14.Context(EGL14.eglGetCurrentContext());
} else {
eglContext = new EglBase10.Context(((EGL10) EGLContext.getEGL()).eglGetCurrentContext());
}
Logging.d(TAG, "VideoRendererGui EGL Context: " + eglContext);
}
synchronized (yuvImageRenderers) {
// Create textures for all images.
for (YuvImageRenderer yuvImageRenderer : yuvImageRenderers) {
yuvImageRenderer.createTextures();
}
onSurfaceCreatedCalled = true;
}
GlUtil.checkNoGLES2Error("onSurfaceCreated done");
GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1);
GLES20.glClearColor(0.15f, 0.15f, 0.15f, 1.0f);
// Fire EGL context ready event.
synchronized (VideoRendererGui.class) {
if (eglContextReady != null) {
eglContextReady.run();
}
}
}
@Override
public void onSurfaceChanged(GL10 unused, int width, int height) {
Logging.d(TAG, "VideoRendererGui.onSurfaceChanged: " +
width + " x " + height + " ");
screenWidth = width;
screenHeight = height;
synchronized (yuvImageRenderers) {
for (YuvImageRenderer yuvImageRenderer : yuvImageRenderers) {
yuvImageRenderer.setScreenSize(screenWidth, screenHeight);
}
}
}
@Override
public void onDrawFrame(GL10 unused) {
if (drawThread == null) {
drawThread = Thread.currentThread();
}
GLES20.glViewport(0, 0, screenWidth, screenHeight);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
synchronized (yuvImageRenderers) {
for (YuvImageRenderer yuvImageRenderer : yuvImageRenderers) {
yuvImageRenderer.draw();
}
}
}
}

View File

@ -1,46 +0,0 @@
/*
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/**
* Java version of VideoSourceInterface, extended with stop/restart
* functionality to allow explicit control of the camera device on android,
* where there is no support for multiple open capture devices and the cost of
* holding a camera open (even if MediaStreamTrack.setEnabled(false) is muting
* its output to the encoder) can be too high to bear.
*/
public class VideoSource extends MediaSource {
public VideoSource(long nativeSource) {
super(nativeSource);
}
// Stop capture feeding this source.
public void stop() {
stop(nativeSource);
}
// Restart capture feeding this source. stop() must have been called since
// the last call to restart() (if any). Note that this isn't "start()";
// sources are started by default at birth.
public void restart() {
restart(nativeSource);
}
@Override
public void dispose() {
super.dispose();
}
private static native void stop(long nativeSource);
private static native void restart(long nativeSource);
}

View File

@ -1,51 +0,0 @@
/*
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import java.util.LinkedList;
/** Java version of VideoTrackInterface. */
public class VideoTrack extends MediaStreamTrack {
private final LinkedList<VideoRenderer> renderers =
new LinkedList<VideoRenderer>();
public VideoTrack(long nativeTrack) {
super(nativeTrack);
}
public void addRenderer(VideoRenderer renderer) {
renderers.add(renderer);
nativeAddRenderer(nativeTrack, renderer.nativeVideoRenderer);
}
public void removeRenderer(VideoRenderer renderer) {
if (!renderers.remove(renderer)) {
return;
}
nativeRemoveRenderer(nativeTrack, renderer.nativeVideoRenderer);
renderer.dispose();
}
public void dispose() {
while (!renderers.isEmpty()) {
removeRenderer(renderers.getFirst());
}
super.dispose();
}
private static native void free(long nativeTrack);
private static native void nativeAddRenderer(
long nativeTrack, long nativeRenderer);
private static native void nativeRemoveRenderer(
long nativeTrack, long nativeRenderer);
}