Moving src/webrtc into src/.

In order to eliminate the WebRTC Subtree mirror in Chromium, 
WebRTC is moving the content of the src/webrtc directory up
to the src/ directory.

NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
TBR=tommi@webrtc.org

Bug: chromium:611808
Change-Id: Iac59c5b51b950f174119565bac87955a7994bc38
Reviewed-on: https://webrtc-review.googlesource.com/1560
Commit-Queue: Mirko Bonadei <mbonadei@webrtc.org>
Reviewed-by: Henrik Kjellander <kjellander@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#19845}
This commit is contained in:
Mirko Bonadei
2017-09-15 06:15:48 +02:00
committed by Commit Bot
parent 6674846b4a
commit bb547203bf
4576 changed files with 1092 additions and 1196 deletions

View File

@ -0,0 +1,21 @@
/*
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/**
* Java wrapper for a C++ AudioSourceInterface. Used as the source for one or
* more {@code AudioTrack} objects.
*/
public class AudioSource extends MediaSource {
public AudioSource(long nativeSource) {
super(nativeSource);
}
}

View File

@ -0,0 +1,27 @@
/*
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/** Java wrapper for a C++ AudioTrackInterface */
public class AudioTrack extends MediaStreamTrack {
public AudioTrack(long nativeTrack) {
super(nativeTrack);
}
/** Sets the volume for the underlying MediaSource. Volume is a gain value in the range
* 0 to 10.
*/
public void setVolume(double volume) {
nativeSetVolume(super.nativeTrack, volume);
}
private static native void nativeSetVolume(long nativeTrack, double volume);
}

View File

@ -0,0 +1,39 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
public class CallSessionFileRotatingLogSink {
static {
System.loadLibrary("jingle_peerconnection_so");
}
private long nativeSink;
public static byte[] getLogData(String dirPath) {
return nativeGetLogData(dirPath);
}
public CallSessionFileRotatingLogSink(
String dirPath, int maxFileSize, Logging.Severity severity) {
nativeSink = nativeAddSink(dirPath, maxFileSize, severity.ordinal());
}
public void dispose() {
if (nativeSink != 0) {
nativeDeleteSink(nativeSink);
nativeSink = 0;
}
}
private static native long nativeAddSink(String dirPath, int maxFileSize, int severity);
private static native void nativeDeleteSink(long nativeSink);
private static native byte[] nativeGetLogData(String dirPath);
}

View File

@ -0,0 +1,35 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.content.Context;
import android.media.MediaRecorder;
public class Camera1Capturer extends CameraCapturer {
private final boolean captureToTexture;
public Camera1Capturer(
String cameraName, CameraEventsHandler eventsHandler, boolean captureToTexture) {
super(cameraName, eventsHandler, new Camera1Enumerator(captureToTexture));
this.captureToTexture = captureToTexture;
}
@Override
protected void createCameraSession(CameraSession.CreateSessionCallback createSessionCallback,
CameraSession.Events events, Context applicationContext,
SurfaceTextureHelper surfaceTextureHelper, MediaRecorder mediaRecorder, String cameraName,
int width, int height, int framerate) {
Camera1Session.create(createSessionCallback, events,
captureToTexture || (mediaRecorder != null), applicationContext, surfaceTextureHelper,
mediaRecorder, Camera1Enumerator.getCameraIndex(cameraName), width, height, framerate);
}
}

View File

@ -0,0 +1,184 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.os.SystemClock;
import java.util.ArrayList;
import java.util.List;
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
@SuppressWarnings("deprecation")
public class Camera1Enumerator implements CameraEnumerator {
private final static String TAG = "Camera1Enumerator";
// Each entry contains the supported formats for corresponding camera index. The formats for all
// cameras are enumerated on the first call to getSupportedFormats(), and cached for future
// reference.
private static List<List<CaptureFormat>> cachedSupportedFormats;
private final boolean captureToTexture;
public Camera1Enumerator() {
this(true /* captureToTexture */);
}
public Camera1Enumerator(boolean captureToTexture) {
this.captureToTexture = captureToTexture;
}
// Returns device names that can be used to create a new VideoCapturerAndroid.
@Override
public String[] getDeviceNames() {
ArrayList<String> namesList = new ArrayList<>();
for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
String name = getDeviceName(i);
if (name != null) {
namesList.add(name);
Logging.d(TAG, "Index: " + i + ". " + name);
} else {
Logging.e(TAG, "Index: " + i + ". Failed to query camera name.");
}
}
String[] namesArray = new String[namesList.size()];
return namesList.toArray(namesArray);
}
@Override
public boolean isFrontFacing(String deviceName) {
android.hardware.Camera.CameraInfo info = getCameraInfo(getCameraIndex(deviceName));
return info != null && info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT;
}
@Override
public boolean isBackFacing(String deviceName) {
android.hardware.Camera.CameraInfo info = getCameraInfo(getCameraIndex(deviceName));
return info != null && info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK;
}
@Override
public List<CaptureFormat> getSupportedFormats(String deviceName) {
return getSupportedFormats(getCameraIndex(deviceName));
}
@Override
public CameraVideoCapturer createCapturer(
String deviceName, CameraVideoCapturer.CameraEventsHandler eventsHandler) {
return new Camera1Capturer(deviceName, eventsHandler, captureToTexture);
}
private static android.hardware.Camera.CameraInfo getCameraInfo(int index) {
android.hardware.Camera.CameraInfo info = new android.hardware.Camera.CameraInfo();
try {
android.hardware.Camera.getCameraInfo(index, info);
} catch (Exception e) {
Logging.e(TAG, "getCameraInfo failed on index " + index, e);
return null;
}
return info;
}
static synchronized List<CaptureFormat> getSupportedFormats(int cameraId) {
if (cachedSupportedFormats == null) {
cachedSupportedFormats = new ArrayList<List<CaptureFormat>>();
for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
cachedSupportedFormats.add(enumerateFormats(i));
}
}
return cachedSupportedFormats.get(cameraId);
}
private static List<CaptureFormat> enumerateFormats(int cameraId) {
Logging.d(TAG, "Get supported formats for camera index " + cameraId + ".");
final long startTimeMs = SystemClock.elapsedRealtime();
final android.hardware.Camera.Parameters parameters;
android.hardware.Camera camera = null;
try {
Logging.d(TAG, "Opening camera with index " + cameraId);
camera = android.hardware.Camera.open(cameraId);
parameters = camera.getParameters();
} catch (RuntimeException e) {
Logging.e(TAG, "Open camera failed on camera index " + cameraId, e);
return new ArrayList<CaptureFormat>();
} finally {
if (camera != null) {
camera.release();
}
}
final List<CaptureFormat> formatList = new ArrayList<CaptureFormat>();
try {
int minFps = 0;
int maxFps = 0;
final List<int[]> listFpsRange = parameters.getSupportedPreviewFpsRange();
if (listFpsRange != null) {
// getSupportedPreviewFpsRange() returns a sorted list. Take the fps range
// corresponding to the highest fps.
final int[] range = listFpsRange.get(listFpsRange.size() - 1);
minFps = range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX];
maxFps = range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX];
}
for (android.hardware.Camera.Size size : parameters.getSupportedPreviewSizes()) {
formatList.add(new CaptureFormat(size.width, size.height, minFps, maxFps));
}
} catch (Exception e) {
Logging.e(TAG, "getSupportedFormats() failed on camera index " + cameraId, e);
}
final long endTimeMs = SystemClock.elapsedRealtime();
Logging.d(TAG, "Get supported formats for camera index " + cameraId + " done."
+ " Time spent: " + (endTimeMs - startTimeMs) + " ms.");
return formatList;
}
// Convert from android.hardware.Camera.Size to Size.
static List<Size> convertSizes(List<android.hardware.Camera.Size> cameraSizes) {
final List<Size> sizes = new ArrayList<Size>();
for (android.hardware.Camera.Size size : cameraSizes) {
sizes.add(new Size(size.width, size.height));
}
return sizes;
}
// Convert from int[2] to CaptureFormat.FramerateRange.
static List<CaptureFormat.FramerateRange> convertFramerates(List<int[]> arrayRanges) {
final List<CaptureFormat.FramerateRange> ranges = new ArrayList<CaptureFormat.FramerateRange>();
for (int[] range : arrayRanges) {
ranges.add(new CaptureFormat.FramerateRange(
range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX]));
}
return ranges;
}
// Returns the camera index for camera with name |deviceName|, or throws IllegalArgumentException
// if no such camera can be found.
static int getCameraIndex(String deviceName) {
Logging.d(TAG, "getCameraIndex: " + deviceName);
for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
if (deviceName.equals(getDeviceName(i))) {
return i;
}
}
throw new IllegalArgumentException("No such camera: " + deviceName);
}
// Returns the name of the camera with camera index. Returns null if the
// camera can not be used.
static String getDeviceName(int index) {
android.hardware.Camera.CameraInfo info = getCameraInfo(index);
if (info == null) {
return null;
}
String facing =
(info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT) ? "front" : "back";
return "Camera " + index + ", Facing " + facing + ", Orientation " + info.orientation;
}
}

View File

@ -0,0 +1,38 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.annotation.TargetApi;
import android.content.Context;
import android.hardware.camera2.CameraManager;
import android.media.MediaRecorder;
@TargetApi(21)
public class Camera2Capturer extends CameraCapturer {
private final Context context;
private final CameraManager cameraManager;
public Camera2Capturer(Context context, String cameraName, CameraEventsHandler eventsHandler) {
super(cameraName, eventsHandler, new Camera2Enumerator(context));
this.context = context;
cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
}
@Override
protected void createCameraSession(CameraSession.CreateSessionCallback createSessionCallback,
CameraSession.Events events, Context applicationContext,
SurfaceTextureHelper surfaceTextureHelper, MediaRecorder mediaRecoder, String cameraName,
int width, int height, int framerate) {
Camera2Session.create(createSessionCallback, events, applicationContext, cameraManager,
surfaceTextureHelper, mediaRecoder, cameraName, width, height, framerate);
}
}

View File

@ -0,0 +1,248 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
import android.annotation.TargetApi;
import android.content.Context;
import android.graphics.Rect;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CameraMetadata;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.os.Build;
import android.os.SystemClock;
import android.util.AndroidException;
import android.util.Range;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@TargetApi(21)
public class Camera2Enumerator implements CameraEnumerator {
private final static String TAG = "Camera2Enumerator";
private final static double NANO_SECONDS_PER_SECOND = 1.0e9;
// Each entry contains the supported formats for a given camera index. The formats are enumerated
// lazily in getSupportedFormats(), and cached for future reference.
private static final Map<String, List<CaptureFormat>> cachedSupportedFormats =
new HashMap<String, List<CaptureFormat>>();
final Context context;
final CameraManager cameraManager;
public Camera2Enumerator(Context context) {
this.context = context;
this.cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
}
@Override
public String[] getDeviceNames() {
try {
return cameraManager.getCameraIdList();
// On Android OS pre 4.4.2, a class will not load because of VerifyError if it contains a
// catch statement with an Exception from a newer API, even if the code is never executed.
// https://code.google.com/p/android/issues/detail?id=209129
} catch (/* CameraAccessException */ AndroidException e) {
Logging.e(TAG, "Camera access exception: " + e);
return new String[] {};
}
}
@Override
public boolean isFrontFacing(String deviceName) {
CameraCharacteristics characteristics = getCameraCharacteristics(deviceName);
return characteristics != null
&& characteristics.get(CameraCharacteristics.LENS_FACING)
== CameraMetadata.LENS_FACING_FRONT;
}
@Override
public boolean isBackFacing(String deviceName) {
CameraCharacteristics characteristics = getCameraCharacteristics(deviceName);
return characteristics != null
&& characteristics.get(CameraCharacteristics.LENS_FACING)
== CameraMetadata.LENS_FACING_BACK;
}
@Override
public List<CaptureFormat> getSupportedFormats(String deviceName) {
return getSupportedFormats(context, deviceName);
}
@Override
public CameraVideoCapturer createCapturer(
String deviceName, CameraVideoCapturer.CameraEventsHandler eventsHandler) {
return new Camera2Capturer(context, deviceName, eventsHandler);
}
private CameraCharacteristics getCameraCharacteristics(String deviceName) {
try {
return cameraManager.getCameraCharacteristics(deviceName);
// On Android OS pre 4.4.2, a class will not load because of VerifyError if it contains a
// catch statement with an Exception from a newer API, even if the code is never executed.
// https://code.google.com/p/android/issues/detail?id=209129
} catch (/* CameraAccessException */ AndroidException e) {
Logging.e(TAG, "Camera access exception: " + e);
return null;
}
}
/**
* Checks if API is supported and all cameras have better than legacy support.
*/
public static boolean isSupported(Context context) {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) {
return false;
}
CameraManager cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
try {
String[] cameraIds = cameraManager.getCameraIdList();
for (String id : cameraIds) {
CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(id);
if (characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL)
== CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
return false;
}
}
// On Android OS pre 4.4.2, a class will not load because of VerifyError if it contains a
// catch statement with an Exception from a newer API, even if the code is never executed.
// https://code.google.com/p/android/issues/detail?id=209129
} catch (/* CameraAccessException */ AndroidException e) {
Logging.e(TAG, "Camera access exception: " + e);
return false;
}
return true;
}
static int getFpsUnitFactor(Range<Integer>[] fpsRanges) {
if (fpsRanges.length == 0) {
return 1000;
}
return fpsRanges[0].getUpper() < 1000 ? 1000 : 1;
}
static List<Size> getSupportedSizes(CameraCharacteristics cameraCharacteristics) {
final StreamConfigurationMap streamMap =
cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
final int supportLevel =
cameraCharacteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
final android.util.Size[] nativeSizes = streamMap.getOutputSizes(SurfaceTexture.class);
final List<Size> sizes = convertSizes(nativeSizes);
// Video may be stretched pre LMR1 on legacy implementations.
// Filter out formats that have different aspect ratio than the sensor array.
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP_MR1
&& supportLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
final Rect activeArraySize =
cameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
final ArrayList<Size> filteredSizes = new ArrayList<Size>();
for (Size size : sizes) {
if (activeArraySize.width() * size.height == activeArraySize.height() * size.width) {
filteredSizes.add(size);
}
}
return filteredSizes;
} else {
return sizes;
}
}
static List<CaptureFormat> getSupportedFormats(Context context, String cameraId) {
return getSupportedFormats(
(CameraManager) context.getSystemService(Context.CAMERA_SERVICE), cameraId);
}
static List<CaptureFormat> getSupportedFormats(CameraManager cameraManager, String cameraId) {
synchronized (cachedSupportedFormats) {
if (cachedSupportedFormats.containsKey(cameraId)) {
return cachedSupportedFormats.get(cameraId);
}
Logging.d(TAG, "Get supported formats for camera index " + cameraId + ".");
final long startTimeMs = SystemClock.elapsedRealtime();
final CameraCharacteristics cameraCharacteristics;
try {
cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId);
} catch (Exception ex) {
Logging.e(TAG, "getCameraCharacteristics(): " + ex);
return new ArrayList<CaptureFormat>();
}
final StreamConfigurationMap streamMap =
cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
Range<Integer>[] fpsRanges =
cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
List<CaptureFormat.FramerateRange> framerateRanges =
convertFramerates(fpsRanges, getFpsUnitFactor(fpsRanges));
List<Size> sizes = getSupportedSizes(cameraCharacteristics);
int defaultMaxFps = 0;
for (CaptureFormat.FramerateRange framerateRange : framerateRanges) {
defaultMaxFps = Math.max(defaultMaxFps, framerateRange.max);
}
final List<CaptureFormat> formatList = new ArrayList<CaptureFormat>();
for (Size size : sizes) {
long minFrameDurationNs = 0;
try {
minFrameDurationNs = streamMap.getOutputMinFrameDuration(
SurfaceTexture.class, new android.util.Size(size.width, size.height));
} catch (Exception e) {
// getOutputMinFrameDuration() is not supported on all devices. Ignore silently.
}
final int maxFps = (minFrameDurationNs == 0)
? defaultMaxFps
: (int) Math.round(NANO_SECONDS_PER_SECOND / minFrameDurationNs) * 1000;
formatList.add(new CaptureFormat(size.width, size.height, 0, maxFps));
Logging.d(TAG, "Format: " + size.width + "x" + size.height + "@" + maxFps);
}
cachedSupportedFormats.put(cameraId, formatList);
final long endTimeMs = SystemClock.elapsedRealtime();
Logging.d(TAG, "Get supported formats for camera index " + cameraId + " done."
+ " Time spent: " + (endTimeMs - startTimeMs) + " ms.");
return formatList;
}
}
// Convert from android.util.Size to Size.
private static List<Size> convertSizes(android.util.Size[] cameraSizes) {
final List<Size> sizes = new ArrayList<Size>();
for (android.util.Size size : cameraSizes) {
sizes.add(new Size(size.getWidth(), size.getHeight()));
}
return sizes;
}
// Convert from android.util.Range<Integer> to CaptureFormat.FramerateRange.
static List<CaptureFormat.FramerateRange> convertFramerates(
Range<Integer>[] arrayRanges, int unitFactor) {
final List<CaptureFormat.FramerateRange> ranges = new ArrayList<CaptureFormat.FramerateRange>();
for (Range<Integer> range : arrayRanges) {
ranges.add(new CaptureFormat.FramerateRange(
range.getLower() * unitFactor, range.getUpper() * unitFactor));
}
return ranges;
}
}

View File

@ -0,0 +1,206 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import static java.lang.Math.abs;
import android.graphics.ImageFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
@SuppressWarnings("deprecation")
public class CameraEnumerationAndroid {
private final static String TAG = "CameraEnumerationAndroid";
static final ArrayList<Size> COMMON_RESOLUTIONS = new ArrayList<Size>(Arrays.asList(
// 0, Unknown resolution
new Size(160, 120), // 1, QQVGA
new Size(240, 160), // 2, HQVGA
new Size(320, 240), // 3, QVGA
new Size(400, 240), // 4, WQVGA
new Size(480, 320), // 5, HVGA
new Size(640, 360), // 6, nHD
new Size(640, 480), // 7, VGA
new Size(768, 480), // 8, WVGA
new Size(854, 480), // 9, FWVGA
new Size(800, 600), // 10, SVGA
new Size(960, 540), // 11, qHD
new Size(960, 640), // 12, DVGA
new Size(1024, 576), // 13, WSVGA
new Size(1024, 600), // 14, WVSGA
new Size(1280, 720), // 15, HD
new Size(1280, 1024), // 16, SXGA
new Size(1920, 1080), // 17, Full HD
new Size(1920, 1440), // 18, Full HD 4:3
new Size(2560, 1440), // 19, QHD
new Size(3840, 2160) // 20, UHD
));
public static class CaptureFormat {
// Class to represent a framerate range. The framerate varies because of lightning conditions.
// The values are multiplied by 1000, so 1000 represents one frame per second.
public static class FramerateRange {
public int min;
public int max;
public FramerateRange(int min, int max) {
this.min = min;
this.max = max;
}
@Override
public String toString() {
return "[" + (min / 1000.0f) + ":" + (max / 1000.0f) + "]";
}
@Override
public boolean equals(Object other) {
if (!(other instanceof FramerateRange)) {
return false;
}
final FramerateRange otherFramerate = (FramerateRange) other;
return min == otherFramerate.min && max == otherFramerate.max;
}
@Override
public int hashCode() {
// Use prime close to 2^16 to avoid collisions for normal values less than 2^16.
return 1 + 65537 * min + max;
}
}
public final int width;
public final int height;
public final FramerateRange framerate;
// TODO(hbos): If VideoCapturer.startCapture is updated to support other image formats then this
// needs to be updated and VideoCapturer.getSupportedFormats need to return CaptureFormats of
// all imageFormats.
public final int imageFormat = ImageFormat.NV21;
public CaptureFormat(int width, int height, int minFramerate, int maxFramerate) {
this.width = width;
this.height = height;
this.framerate = new FramerateRange(minFramerate, maxFramerate);
}
public CaptureFormat(int width, int height, FramerateRange framerate) {
this.width = width;
this.height = height;
this.framerate = framerate;
}
// Calculates the frame size of this capture format.
public int frameSize() {
return frameSize(width, height, imageFormat);
}
// Calculates the frame size of the specified image format. Currently only
// supporting ImageFormat.NV21.
// The size is width * height * number of bytes per pixel.
// http://developer.android.com/reference/android/hardware/Camera.html#addCallbackBuffer(byte[])
public static int frameSize(int width, int height, int imageFormat) {
if (imageFormat != ImageFormat.NV21) {
throw new UnsupportedOperationException("Don't know how to calculate "
+ "the frame size of non-NV21 image formats.");
}
return (width * height * ImageFormat.getBitsPerPixel(imageFormat)) / 8;
}
@Override
public String toString() {
return width + "x" + height + "@" + framerate;
}
@Override
public boolean equals(Object other) {
if (!(other instanceof CaptureFormat)) {
return false;
}
final CaptureFormat otherFormat = (CaptureFormat) other;
return width == otherFormat.width && height == otherFormat.height
&& framerate.equals(otherFormat.framerate);
}
@Override
public int hashCode() {
return 1 + (width * 65497 + height) * 251 + framerate.hashCode();
}
}
// Helper class for finding the closest supported format for the two functions below. It creates a
// comparator based on the difference to some requested parameters, where the element with the
// minimum difference is the element that is closest to the requested parameters.
private static abstract class ClosestComparator<T> implements Comparator<T> {
// Difference between supported and requested parameter.
abstract int diff(T supportedParameter);
@Override
public int compare(T t1, T t2) {
return diff(t1) - diff(t2);
}
}
// Prefer a fps range with an upper bound close to |framerate|. Also prefer a fps range with a low
// lower bound, to allow the framerate to fluctuate based on lightning conditions.
public static CaptureFormat.FramerateRange getClosestSupportedFramerateRange(
List<CaptureFormat.FramerateRange> supportedFramerates, final int requestedFps) {
return Collections.min(
supportedFramerates, new ClosestComparator<CaptureFormat.FramerateRange>() {
// Progressive penalty if the upper bound is further away than |MAX_FPS_DIFF_THRESHOLD|
// from requested.
private static final int MAX_FPS_DIFF_THRESHOLD = 5000;
private static final int MAX_FPS_LOW_DIFF_WEIGHT = 1;
private static final int MAX_FPS_HIGH_DIFF_WEIGHT = 3;
// Progressive penalty if the lower bound is bigger than |MIN_FPS_THRESHOLD|.
private static final int MIN_FPS_THRESHOLD = 8000;
private static final int MIN_FPS_LOW_VALUE_WEIGHT = 1;
private static final int MIN_FPS_HIGH_VALUE_WEIGHT = 4;
// Use one weight for small |value| less than |threshold|, and another weight above.
private int progressivePenalty(int value, int threshold, int lowWeight, int highWeight) {
return (value < threshold) ? value * lowWeight
: threshold * lowWeight + (value - threshold) * highWeight;
}
@Override
int diff(CaptureFormat.FramerateRange range) {
final int minFpsError = progressivePenalty(
range.min, MIN_FPS_THRESHOLD, MIN_FPS_LOW_VALUE_WEIGHT, MIN_FPS_HIGH_VALUE_WEIGHT);
final int maxFpsError = progressivePenalty(Math.abs(requestedFps * 1000 - range.max),
MAX_FPS_DIFF_THRESHOLD, MAX_FPS_LOW_DIFF_WEIGHT, MAX_FPS_HIGH_DIFF_WEIGHT);
return minFpsError + maxFpsError;
}
});
}
public static Size getClosestSupportedSize(
List<Size> supportedSizes, final int requestedWidth, final int requestedHeight) {
return Collections.min(supportedSizes, new ClosestComparator<Size>() {
@Override
int diff(Size size) {
return abs(requestedWidth - size.width) + abs(requestedHeight - size.height);
}
});
}
// Helper method for camera classes.
static void reportCameraResolution(Histogram histogram, Size resolution) {
int index = COMMON_RESOLUTIONS.indexOf(resolution);
// 0 is reserved for unknown resolution, so add 1.
// indexOf returns -1 for unknown resolutions so it becomes 0 automatically.
histogram.addSample(index + 1);
}
}

View File

@ -0,0 +1,25 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
import java.util.List;
public interface CameraEnumerator {
public String[] getDeviceNames();
public boolean isFrontFacing(String deviceName);
public boolean isBackFacing(String deviceName);
public List<CaptureFormat> getSupportedFormats(String deviceName);
public CameraVideoCapturer createCapturer(
String deviceName, CameraVideoCapturer.CameraEventsHandler eventsHandler);
}

View File

@ -0,0 +1,158 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.media.MediaRecorder;
/**
* Base interface for camera1 and camera2 implementations. Extends VideoCapturer with a
* switchCamera() function. Also provides subinterfaces for handling camera events, and a helper
* class for detecting camera freezes.
*/
public interface CameraVideoCapturer extends VideoCapturer {
/**
* Camera events handler - can be used to be notifed about camera events. The callbacks are
* executed from an arbitrary thread.
*/
public interface CameraEventsHandler {
// Camera error handler - invoked when camera can not be opened
// or any camera exception happens on camera thread.
void onCameraError(String errorDescription);
// Called when camera is disconnected.
void onCameraDisconnected();
// Invoked when camera stops receiving frames.
void onCameraFreezed(String errorDescription);
// Callback invoked when camera is opening.
void onCameraOpening(String cameraName);
// Callback invoked when first camera frame is available after camera is started.
void onFirstFrameAvailable();
// Callback invoked when camera is closed.
void onCameraClosed();
}
/**
* Camera switch handler - one of these functions are invoked with the result of switchCamera().
* The callback may be called on an arbitrary thread.
*/
public interface CameraSwitchHandler {
// Invoked on success. |isFrontCamera| is true if the new camera is front facing.
void onCameraSwitchDone(boolean isFrontCamera);
// Invoked on failure, e.g. camera is stopped or only one camera available.
void onCameraSwitchError(String errorDescription);
}
/**
* Switch camera to the next valid camera id. This can only be called while the camera is running.
* This function can be called from any thread.
*/
void switchCamera(CameraSwitchHandler switchEventsHandler);
/**
* MediaRecorder add/remove handler - one of these functions are invoked with the result of
* addMediaRecorderToCamera() or removeMediaRecorderFromCamera calls.
* The callback may be called on an arbitrary thread.
*/
public interface MediaRecorderHandler {
// Invoked on success.
void onMediaRecorderSuccess();
// Invoked on failure, e.g. camera is stopped or any exception happens.
void onMediaRecorderError(String errorDescription);
}
/**
* Add MediaRecorder to camera pipeline. This can only be called while the camera is running.
* Once MediaRecorder is added to camera pipeline camera switch is not allowed.
* This function can be called from any thread.
*/
void addMediaRecorderToCamera(MediaRecorder mediaRecorder, MediaRecorderHandler resultHandler);
/**
* Remove MediaRecorder from camera pipeline. This can only be called while the camera is running.
* This function can be called from any thread.
*/
void removeMediaRecorderFromCamera(MediaRecorderHandler resultHandler);
/**
* Helper class to log framerate and detect if the camera freezes. It will run periodic callbacks
* on the SurfaceTextureHelper thread passed in the ctor, and should only be operated from that
* thread.
*/
public static class CameraStatistics {
private final static String TAG = "CameraStatistics";
private final static int CAMERA_OBSERVER_PERIOD_MS = 2000;
private final static int CAMERA_FREEZE_REPORT_TIMOUT_MS = 4000;
private final SurfaceTextureHelper surfaceTextureHelper;
private final CameraEventsHandler eventsHandler;
private int frameCount;
private int freezePeriodCount;
// Camera observer - monitors camera framerate. Observer is executed on camera thread.
private final Runnable cameraObserver = new Runnable() {
@Override
public void run() {
final int cameraFps = Math.round(frameCount * 1000.0f / CAMERA_OBSERVER_PERIOD_MS);
Logging.d(TAG, "Camera fps: " + cameraFps + ".");
if (frameCount == 0) {
++freezePeriodCount;
if (CAMERA_OBSERVER_PERIOD_MS * freezePeriodCount >= CAMERA_FREEZE_REPORT_TIMOUT_MS
&& eventsHandler != null) {
Logging.e(TAG, "Camera freezed.");
if (surfaceTextureHelper.isTextureInUse()) {
// This can only happen if we are capturing to textures.
eventsHandler.onCameraFreezed("Camera failure. Client must return video buffers.");
} else {
eventsHandler.onCameraFreezed("Camera failure.");
}
return;
}
} else {
freezePeriodCount = 0;
}
frameCount = 0;
surfaceTextureHelper.getHandler().postDelayed(this, CAMERA_OBSERVER_PERIOD_MS);
}
};
public CameraStatistics(
SurfaceTextureHelper surfaceTextureHelper, CameraEventsHandler eventsHandler) {
if (surfaceTextureHelper == null) {
throw new IllegalArgumentException("SurfaceTextureHelper is null");
}
this.surfaceTextureHelper = surfaceTextureHelper;
this.eventsHandler = eventsHandler;
this.frameCount = 0;
this.freezePeriodCount = 0;
surfaceTextureHelper.getHandler().postDelayed(cameraObserver, CAMERA_OBSERVER_PERIOD_MS);
}
private void checkThread() {
if (Thread.currentThread() != surfaceTextureHelper.getHandler().getLooper().getThread()) {
throw new IllegalStateException("Wrong thread");
}
}
public void addFrame() {
checkThread();
++frameCount;
}
public void release() {
surfaceTextureHelper.getHandler().removeCallbacks(cameraObserver);
}
}
}

View File

@ -0,0 +1,127 @@
/*
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import java.nio.ByteBuffer;
/** Java wrapper for a C++ DataChannelInterface. */
public class DataChannel {
/** Java wrapper for WebIDL RTCDataChannel. */
public static class Init {
public boolean ordered = true;
// Optional unsigned short in WebIDL, -1 means unspecified.
public int maxRetransmitTimeMs = -1;
// Optional unsigned short in WebIDL, -1 means unspecified.
public int maxRetransmits = -1;
public String protocol = "";
public boolean negotiated = false;
// Optional unsigned short in WebIDL, -1 means unspecified.
public int id = -1;
public Init() {}
// Called only by native code.
private Init(boolean ordered, int maxRetransmitTimeMs, int maxRetransmits, String protocol,
boolean negotiated, int id) {
this.ordered = ordered;
this.maxRetransmitTimeMs = maxRetransmitTimeMs;
this.maxRetransmits = maxRetransmits;
this.protocol = protocol;
this.negotiated = negotiated;
this.id = id;
}
}
/** Java version of C++ DataBuffer. The atom of data in a DataChannel. */
public static class Buffer {
/** The underlying data. */
public final ByteBuffer data;
/**
* Indicates whether |data| contains UTF-8 text or "binary data"
* (i.e. anything else).
*/
public final boolean binary;
public Buffer(ByteBuffer data, boolean binary) {
this.data = data;
this.binary = binary;
}
}
/** Java version of C++ DataChannelObserver. */
public interface Observer {
/** The data channel's bufferedAmount has changed. */
public void onBufferedAmountChange(long previousAmount);
/** The data channel state has changed. */
public void onStateChange();
/**
* A data buffer was successfully received. NOTE: |buffer.data| will be
* freed once this function returns so callers who want to use the data
* asynchronously must make sure to copy it first.
*/
public void onMessage(Buffer buffer);
}
/** Keep in sync with DataChannelInterface::DataState. */
public enum State { CONNECTING, OPEN, CLOSING, CLOSED }
private final long nativeDataChannel;
private long nativeObserver;
public DataChannel(long nativeDataChannel) {
this.nativeDataChannel = nativeDataChannel;
}
/** Register |observer|, replacing any previously-registered observer. */
public void registerObserver(Observer observer) {
if (nativeObserver != 0) {
unregisterObserverNative(nativeObserver);
}
nativeObserver = registerObserverNative(observer);
}
private native long registerObserverNative(Observer observer);
/** Unregister the (only) observer. */
public void unregisterObserver() {
unregisterObserverNative(nativeObserver);
}
private native void unregisterObserverNative(long nativeObserver);
public native String label();
public native int id();
public native State state();
/**
* Return the number of bytes of application data (UTF-8 text and binary data)
* that have been queued using SendBuffer but have not yet been transmitted
* to the network.
*/
public native long bufferedAmount();
/** Close the channel. */
public native void close();
/** Send |data| to the remote peer; return success. */
public boolean send(Buffer buffer) {
// TODO(fischman): this could be cleverer about avoiding copies if the
// ByteBuffer is direct and/or is backed by an array.
byte[] data = new byte[buffer.data.remaining()];
buffer.data.get(data);
return sendNative(data, buffer.binary);
}
private native boolean sendNative(byte[] data, boolean binary);
/** Dispose of native resources attached to this channel. */
public native void dispose();
};

View File

@ -0,0 +1,83 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/** Java wrapper for a C++ DtmfSenderInterface. */
public class DtmfSender {
final long nativeDtmfSender;
public DtmfSender(long nativeDtmfSender) {
this.nativeDtmfSender = nativeDtmfSender;
}
/**
* @return true if this DtmfSender is capable of sending DTMF. Otherwise false.
*/
public boolean canInsertDtmf() {
return nativeCanInsertDtmf(nativeDtmfSender);
}
/**
* Queues a task that sends the provided DTMF tones.
* <p>
* If insertDtmf is called on the same object while an existing task for this
* object to generate DTMF is still running, the previous task is canceled.
*
* @param tones This parameter is treated as a series of characters. The characters 0
* through 9, A through D, #, and * generate the associated DTMF tones. The
* characters a to d are equivalent to A to D. The character ',' indicates a
* delay of 2 seconds before processing the next character in the tones
* parameter. Unrecognized characters are ignored.
* @param duration Indicates the duration in ms to use for each character passed in the tones
* parameter. The duration cannot be more than 6000 or less than 70.
* @param interToneGap Indicates the gap between tones in ms. Must be at least 50 ms but should be
* as short as possible.
* @return true on success and false on failure.
*/
public boolean insertDtmf(String tones, int duration, int interToneGap) {
return nativeInsertDtmf(nativeDtmfSender, tones, duration, interToneGap);
}
/**
* @return The tones remaining to be played out
*/
public String tones() {
return nativeTones(nativeDtmfSender);
}
/**
* @return The current tone duration value in ms. This value will be the value last set via the
* insertDtmf() method, or the default value of 100 ms if insertDtmf() was never called.
*/
public int duration() {
return nativeDuration(nativeDtmfSender);
}
/**
* @return The current value of the between-tone gap in ms. This value will be the value last set
* via the insertDtmf() method, or the default value of 50 ms if insertDtmf() was never
* called.
*/
public int interToneGap() {
return nativeInterToneGap(nativeDtmfSender);
}
public void dispose() {
JniCommon.nativeReleaseRef(nativeDtmfSender);
}
private static native boolean nativeCanInsertDtmf(long nativeDtmfSender);
private static native boolean nativeInsertDtmf(
long nativeDtmfSender, String tones, int duration, int interToneGap);
private static native String nativeTones(long nativeDtmfSender);
private static native int nativeDuration(long nativeDtmfSender);
private static native int nativeInterToneGap(long nativeDtmfSender);
};

View File

@ -0,0 +1,173 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.graphics.SurfaceTexture;
import android.view.Surface;
import javax.microedition.khronos.egl.EGL10;
/**
* Holds EGL state and utility methods for handling an egl 1.0 EGLContext, an EGLDisplay,
* and an EGLSurface.
*/
public abstract class EglBase {
// EGL wrapper for an actual EGLContext.
public static class Context {}
// According to the documentation, EGL can be used from multiple threads at the same time if each
// thread has its own EGLContext, but in practice it deadlocks on some devices when doing this.
// Therefore, synchronize on this global lock before calling dangerous EGL functions that might
// deadlock. See https://bugs.chromium.org/p/webrtc/issues/detail?id=5702 for more info.
public static final Object lock = new Object();
// These constants are taken from EGL14.EGL_OPENGL_ES2_BIT and EGL14.EGL_CONTEXT_CLIENT_VERSION.
// https://android.googlesource.com/platform/frameworks/base/+/master/opengl/java/android/opengl/EGL14.java
// This is similar to how GlSurfaceView does:
// http://grepcode.com/file/repository.grepcode.com/java/ext/com.google.android/android/5.1.1_r1/android/opengl/GLSurfaceView.java#760
public static final int EGL_OPENGL_ES2_BIT = 4;
// Android-specific extension.
public static final int EGL_RECORDABLE_ANDROID = 0x3142;
// clang-format off
public static final int[] CONFIG_PLAIN = {
EGL10.EGL_RED_SIZE, 8,
EGL10.EGL_GREEN_SIZE, 8,
EGL10.EGL_BLUE_SIZE, 8,
EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
EGL10.EGL_NONE
};
public static final int[] CONFIG_RGBA = {
EGL10.EGL_RED_SIZE, 8,
EGL10.EGL_GREEN_SIZE, 8,
EGL10.EGL_BLUE_SIZE, 8,
EGL10.EGL_ALPHA_SIZE, 8,
EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
EGL10.EGL_NONE
};
public static final int[] CONFIG_PIXEL_BUFFER = {
EGL10.EGL_RED_SIZE, 8,
EGL10.EGL_GREEN_SIZE, 8,
EGL10.EGL_BLUE_SIZE, 8,
EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
EGL10.EGL_SURFACE_TYPE, EGL10.EGL_PBUFFER_BIT,
EGL10.EGL_NONE
};
public static final int[] CONFIG_PIXEL_RGBA_BUFFER = {
EGL10.EGL_RED_SIZE, 8,
EGL10.EGL_GREEN_SIZE, 8,
EGL10.EGL_BLUE_SIZE, 8,
EGL10.EGL_ALPHA_SIZE, 8,
EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
EGL10.EGL_SURFACE_TYPE, EGL10.EGL_PBUFFER_BIT,
EGL10.EGL_NONE
};
public static final int[] CONFIG_RECORDABLE = {
EGL10.EGL_RED_SIZE, 8,
EGL10.EGL_GREEN_SIZE, 8,
EGL10.EGL_BLUE_SIZE, 8,
EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
EGL_RECORDABLE_ANDROID, 1,
EGL10.EGL_NONE
};
// clang-format on
/**
* Create a new context with the specified config attributes, sharing data with |sharedContext|.
* If |sharedContext| is null, a root context is created. This function will try to create an EGL
* 1.4 context if possible, and an EGL 1.0 context otherwise.
*/
public static EglBase create(Context sharedContext, int[] configAttributes) {
return (EglBase14.isEGL14Supported()
&& (sharedContext == null || sharedContext instanceof EglBase14.Context))
? new EglBase14((EglBase14.Context) sharedContext, configAttributes)
: new EglBase10((EglBase10.Context) sharedContext, configAttributes);
}
/**
* Helper function for creating a plain root context. This function will try to create an EGL 1.4
* context if possible, and an EGL 1.0 context otherwise.
*/
public static EglBase create() {
return create(null /* shaderContext */, CONFIG_PLAIN);
}
/**
* Helper function for creating a plain context, sharing data with |sharedContext|. This function
* will try to create an EGL 1.4 context if possible, and an EGL 1.0 context otherwise.
*/
public static EglBase create(Context sharedContext) {
return create(sharedContext, CONFIG_PLAIN);
}
/**
* Explicitly create a root EGl 1.0 context with the specified config attributes.
*/
public static EglBase createEgl10(int[] configAttributes) {
return new EglBase10(null /* shaderContext */, configAttributes);
}
/**
* Explicitly create a root EGl 1.0 context with the specified config attributes
* and shared context.
*/
public static EglBase createEgl10(
javax.microedition.khronos.egl.EGLContext sharedContext, int[] configAttributes) {
return new EglBase10(new EglBase10.Context(sharedContext), configAttributes);
}
/**
* Explicitly create a root EGl 1.4 context with the specified config attributes.
*/
public static EglBase createEgl14(int[] configAttributes) {
return new EglBase14(null /* shaderContext */, configAttributes);
}
/**
* Explicitly create a root EGl 1.4 context with the specified config attributes
* and shared context.
*/
public static EglBase createEgl14(
android.opengl.EGLContext sharedContext, int[] configAttributes) {
return new EglBase14(new EglBase14.Context(sharedContext), configAttributes);
}
public abstract void createSurface(Surface surface);
// Create EGLSurface from the Android SurfaceTexture.
public abstract void createSurface(SurfaceTexture surfaceTexture);
// Create dummy 1x1 pixel buffer surface so the context can be made current.
public abstract void createDummyPbufferSurface();
public abstract void createPbufferSurface(int width, int height);
public abstract Context getEglBaseContext();
public abstract boolean hasSurface();
public abstract int surfaceWidth();
public abstract int surfaceHeight();
public abstract void releaseSurface();
public abstract void release();
public abstract void makeCurrent();
// Detach the current EGL context, so that it can be made current on another thread.
public abstract void detachCurrent();
public abstract void swapBuffers();
public abstract void swapBuffers(long presentationTimeStampNs);
}

View File

@ -0,0 +1,685 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.graphics.Bitmap;
import android.graphics.Matrix;
import android.graphics.SurfaceTexture;
import android.opengl.GLES20;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Looper;
import android.view.Surface;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.Locale;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
/**
* Implements org.webrtc.VideoRenderer.Callbacks by displaying the video stream on an EGL Surface.
* This class is intended to be used as a helper class for rendering on SurfaceViews and
* TextureViews.
*/
public class EglRenderer implements VideoRenderer.Callbacks, VideoSink {
private static final String TAG = "EglRenderer";
private static final long LOG_INTERVAL_SEC = 4;
private static final int MAX_SURFACE_CLEAR_COUNT = 3;
public interface FrameListener { void onFrame(Bitmap frame); }
private static class FrameListenerAndParams {
public final FrameListener listener;
public final float scale;
public final RendererCommon.GlDrawer drawer;
public final boolean applyFpsReduction;
public FrameListenerAndParams(FrameListener listener, float scale,
RendererCommon.GlDrawer drawer, boolean applyFpsReduction) {
this.listener = listener;
this.scale = scale;
this.drawer = drawer;
this.applyFpsReduction = applyFpsReduction;
}
}
private class EglSurfaceCreation implements Runnable {
private Object surface;
public synchronized void setSurface(Object surface) {
this.surface = surface;
}
@Override
public synchronized void run() {
if (surface != null && eglBase != null && !eglBase.hasSurface()) {
if (surface instanceof Surface) {
eglBase.createSurface((Surface) surface);
} else if (surface instanceof SurfaceTexture) {
eglBase.createSurface((SurfaceTexture) surface);
} else {
throw new IllegalStateException("Invalid surface: " + surface);
}
eglBase.makeCurrent();
// Necessary for YUV frames with odd width.
GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1);
}
}
}
private final String name;
// |renderThreadHandler| is a handler for communicating with |renderThread|, and is synchronized
// on |handlerLock|.
private final Object handlerLock = new Object();
private Handler renderThreadHandler;
private final ArrayList<FrameListenerAndParams> frameListeners = new ArrayList<>();
// Variables for fps reduction.
private final Object fpsReductionLock = new Object();
// Time for when next frame should be rendered.
private long nextFrameTimeNs;
// Minimum duration between frames when fps reduction is active, or -1 if video is completely
// paused.
private long minRenderPeriodNs;
// EGL and GL resources for drawing YUV/OES textures. After initilization, these are only accessed
// from the render thread.
private EglBase eglBase;
private final VideoFrameDrawer frameDrawer = new VideoFrameDrawer();
private RendererCommon.GlDrawer drawer;
private final Matrix drawMatrix = new Matrix();
// Pending frame to render. Serves as a queue with size 1. Synchronized on |frameLock|.
private final Object frameLock = new Object();
private VideoFrame pendingFrame;
// These variables are synchronized on |layoutLock|.
private final Object layoutLock = new Object();
private float layoutAspectRatio;
// If true, mirrors the video stream horizontally.
private boolean mirror;
// These variables are synchronized on |statisticsLock|.
private final Object statisticsLock = new Object();
// Total number of video frames received in renderFrame() call.
private int framesReceived;
// Number of video frames dropped by renderFrame() because previous frame has not been rendered
// yet.
private int framesDropped;
// Number of rendered video frames.
private int framesRendered;
// Start time for counting these statistics, or 0 if we haven't started measuring yet.
private long statisticsStartTimeNs;
// Time in ns spent in renderFrameOnRenderThread() function.
private long renderTimeNs;
// Time in ns spent by the render thread in the swapBuffers() function.
private long renderSwapBufferTimeNs;
// Used for bitmap capturing.
private GlTextureFrameBuffer bitmapTextureFramebuffer;
private final Runnable logStatisticsRunnable = new Runnable() {
@Override
public void run() {
logStatistics();
synchronized (handlerLock) {
if (renderThreadHandler != null) {
renderThreadHandler.removeCallbacks(logStatisticsRunnable);
renderThreadHandler.postDelayed(
logStatisticsRunnable, TimeUnit.SECONDS.toMillis(LOG_INTERVAL_SEC));
}
}
}
};
private final EglSurfaceCreation eglSurfaceCreationRunnable = new EglSurfaceCreation();
/**
* Standard constructor. The name will be used for the render thread name and included when
* logging. In order to render something, you must first call init() and createEglSurface.
*/
public EglRenderer(String name) {
this.name = name;
}
/**
* Initialize this class, sharing resources with |sharedContext|. The custom |drawer| will be used
* for drawing frames on the EGLSurface. This class is responsible for calling release() on
* |drawer|. It is allowed to call init() to reinitialize the renderer after a previous
* init()/release() cycle.
*/
public void init(final EglBase.Context sharedContext, final int[] configAttributes,
RendererCommon.GlDrawer drawer) {
synchronized (handlerLock) {
if (renderThreadHandler != null) {
throw new IllegalStateException(name + "Already initialized");
}
logD("Initializing EglRenderer");
this.drawer = drawer;
final HandlerThread renderThread = new HandlerThread(name + "EglRenderer");
renderThread.start();
renderThreadHandler = new Handler(renderThread.getLooper());
// Create EGL context on the newly created render thread. It should be possibly to create the
// context on this thread and make it current on the render thread, but this causes failure on
// some Marvel based JB devices. https://bugs.chromium.org/p/webrtc/issues/detail?id=6350.
ThreadUtils.invokeAtFrontUninterruptibly(renderThreadHandler, () -> {
// If sharedContext is null, then texture frames are disabled. This is typically for old
// devices that might not be fully spec compliant, so force EGL 1.0 since EGL 1.4 has
// caused trouble on some weird devices.
if (sharedContext == null) {
logD("EglBase10.create context");
eglBase = EglBase.createEgl10(configAttributes);
} else {
logD("EglBase.create shared context");
eglBase = EglBase.create(sharedContext, configAttributes);
}
});
renderThreadHandler.post(eglSurfaceCreationRunnable);
final long currentTimeNs = System.nanoTime();
resetStatistics(currentTimeNs);
renderThreadHandler.postDelayed(
logStatisticsRunnable, TimeUnit.SECONDS.toMillis(LOG_INTERVAL_SEC));
}
}
public void createEglSurface(Surface surface) {
createEglSurfaceInternal(surface);
}
public void createEglSurface(SurfaceTexture surfaceTexture) {
createEglSurfaceInternal(surfaceTexture);
}
private void createEglSurfaceInternal(Object surface) {
eglSurfaceCreationRunnable.setSurface(surface);
postToRenderThread(eglSurfaceCreationRunnable);
}
/**
* Block until any pending frame is returned and all GL resources released, even if an interrupt
* occurs. If an interrupt occurs during release(), the interrupt flag will be set. This function
* should be called before the Activity is destroyed and the EGLContext is still valid. If you
* don't call this function, the GL resources might leak.
*/
public void release() {
logD("Releasing.");
final CountDownLatch eglCleanupBarrier = new CountDownLatch(1);
synchronized (handlerLock) {
if (renderThreadHandler == null) {
logD("Already released");
return;
}
renderThreadHandler.removeCallbacks(logStatisticsRunnable);
// Release EGL and GL resources on render thread.
renderThreadHandler.postAtFrontOfQueue(() -> {
if (drawer != null) {
drawer.release();
drawer = null;
}
frameDrawer.release();
if (bitmapTextureFramebuffer != null) {
bitmapTextureFramebuffer.release();
bitmapTextureFramebuffer = null;
}
if (eglBase != null) {
logD("eglBase detach and release.");
eglBase.detachCurrent();
eglBase.release();
eglBase = null;
}
eglCleanupBarrier.countDown();
});
final Looper renderLooper = renderThreadHandler.getLooper();
// TODO(magjed): Replace this post() with renderLooper.quitSafely() when API support >= 18.
renderThreadHandler.post(() -> {
logD("Quitting render thread.");
renderLooper.quit();
});
// Don't accept any more frames or messages to the render thread.
renderThreadHandler = null;
}
// Make sure the EGL/GL cleanup posted above is executed.
ThreadUtils.awaitUninterruptibly(eglCleanupBarrier);
synchronized (frameLock) {
if (pendingFrame != null) {
pendingFrame.release();
pendingFrame = null;
}
}
logD("Releasing done.");
}
/**
* Reset the statistics logged in logStatistics().
*/
private void resetStatistics(long currentTimeNs) {
synchronized (statisticsLock) {
statisticsStartTimeNs = currentTimeNs;
framesReceived = 0;
framesDropped = 0;
framesRendered = 0;
renderTimeNs = 0;
renderSwapBufferTimeNs = 0;
}
}
public void printStackTrace() {
synchronized (handlerLock) {
final Thread renderThread =
(renderThreadHandler == null) ? null : renderThreadHandler.getLooper().getThread();
if (renderThread != null) {
final StackTraceElement[] renderStackTrace = renderThread.getStackTrace();
if (renderStackTrace.length > 0) {
logD("EglRenderer stack trace:");
for (StackTraceElement traceElem : renderStackTrace) {
logD(traceElem.toString());
}
}
}
}
}
/**
* Set if the video stream should be mirrored or not.
*/
public void setMirror(final boolean mirror) {
logD("setMirror: " + mirror);
synchronized (layoutLock) {
this.mirror = mirror;
}
}
/**
* Set layout aspect ratio. This is used to crop frames when rendering to avoid stretched video.
* Set this to 0 to disable cropping.
*/
public void setLayoutAspectRatio(float layoutAspectRatio) {
logD("setLayoutAspectRatio: " + layoutAspectRatio);
synchronized (layoutLock) {
this.layoutAspectRatio = layoutAspectRatio;
}
}
/**
* Limit render framerate.
*
* @param fps Limit render framerate to this value, or use Float.POSITIVE_INFINITY to disable fps
* reduction.
*/
public void setFpsReduction(float fps) {
logD("setFpsReduction: " + fps);
synchronized (fpsReductionLock) {
final long previousRenderPeriodNs = minRenderPeriodNs;
if (fps <= 0) {
minRenderPeriodNs = Long.MAX_VALUE;
} else {
minRenderPeriodNs = (long) (TimeUnit.SECONDS.toNanos(1) / fps);
}
if (minRenderPeriodNs != previousRenderPeriodNs) {
// Fps reduction changed - reset frame time.
nextFrameTimeNs = System.nanoTime();
}
}
}
public void disableFpsReduction() {
setFpsReduction(Float.POSITIVE_INFINITY /* fps */);
}
public void pauseVideo() {
setFpsReduction(0 /* fps */);
}
/**
* Register a callback to be invoked when a new video frame has been received. This version uses
* the drawer of the EglRenderer that was passed in init.
*
* @param listener The callback to be invoked. The callback will be invoked on the render thread.
* It should be lightweight and must not call removeFrameListener.
* @param scale The scale of the Bitmap passed to the callback, or 0 if no Bitmap is
* required.
*/
public void addFrameListener(final FrameListener listener, final float scale) {
addFrameListener(listener, scale, null, false /* applyFpsReduction */);
}
/**
* Register a callback to be invoked when a new video frame has been received.
*
* @param listener The callback to be invoked. The callback will be invoked on the render thread.
* It should be lightweight and must not call removeFrameListener.
* @param scale The scale of the Bitmap passed to the callback, or 0 if no Bitmap is
* required.
* @param drawer Custom drawer to use for this frame listener or null to use the default one.
*/
public void addFrameListener(
final FrameListener listener, final float scale, final RendererCommon.GlDrawer drawerParam) {
addFrameListener(listener, scale, drawerParam, false /* applyFpsReduction */);
}
/**
* Register a callback to be invoked when a new video frame has been received.
*
* @param listener The callback to be invoked. The callback will be invoked on the render thread.
* It should be lightweight and must not call removeFrameListener.
* @param scale The scale of the Bitmap passed to the callback, or 0 if no Bitmap is
* required.
* @param drawer Custom drawer to use for this frame listener or null to use the default one.
* @param applyFpsReduction This callback will not be called for frames that have been dropped by
* FPS reduction.
*/
public void addFrameListener(final FrameListener listener, final float scale,
final RendererCommon.GlDrawer drawerParam, final boolean applyFpsReduction) {
postToRenderThread(() -> {
final RendererCommon.GlDrawer listenerDrawer = drawerParam == null ? drawer : drawerParam;
frameListeners.add(
new FrameListenerAndParams(listener, scale, listenerDrawer, applyFpsReduction));
});
}
/**
* Remove any pending callback that was added with addFrameListener. If the callback is not in
* the queue, nothing happens. It is ensured that callback won't be called after this method
* returns.
*
* @param runnable The callback to remove.
*/
public void removeFrameListener(final FrameListener listener) {
if (Thread.currentThread() == renderThreadHandler.getLooper().getThread()) {
throw new RuntimeException("removeFrameListener must not be called on the render thread.");
}
final CountDownLatch latch = new CountDownLatch(1);
postToRenderThread(() -> {
latch.countDown();
final Iterator<FrameListenerAndParams> iter = frameListeners.iterator();
while (iter.hasNext()) {
if (iter.next().listener == listener) {
iter.remove();
}
}
});
ThreadUtils.awaitUninterruptibly(latch);
}
// VideoRenderer.Callbacks interface.
@Override
public void renderFrame(VideoRenderer.I420Frame frame) {
VideoFrame videoFrame = frame.toVideoFrame();
onFrame(videoFrame);
videoFrame.release();
}
// VideoSink interface.
@Override
public void onFrame(VideoFrame frame) {
synchronized (statisticsLock) {
++framesReceived;
}
final boolean dropOldFrame;
synchronized (handlerLock) {
if (renderThreadHandler == null) {
logD("Dropping frame - Not initialized or already released.");
return;
}
synchronized (frameLock) {
dropOldFrame = (pendingFrame != null);
if (dropOldFrame) {
pendingFrame.release();
}
pendingFrame = frame;
pendingFrame.retain();
renderThreadHandler.post(this ::renderFrameOnRenderThread);
}
}
if (dropOldFrame) {
synchronized (statisticsLock) {
++framesDropped;
}
}
}
/**
* Release EGL surface. This function will block until the EGL surface is released.
*/
public void releaseEglSurface(final Runnable completionCallback) {
// Ensure that the render thread is no longer touching the Surface before returning from this
// function.
eglSurfaceCreationRunnable.setSurface(null /* surface */);
synchronized (handlerLock) {
if (renderThreadHandler != null) {
renderThreadHandler.removeCallbacks(eglSurfaceCreationRunnable);
renderThreadHandler.postAtFrontOfQueue(() -> {
if (eglBase != null) {
eglBase.detachCurrent();
eglBase.releaseSurface();
}
completionCallback.run();
});
return;
}
}
completionCallback.run();
}
/**
* Private helper function to post tasks safely.
*/
private void postToRenderThread(Runnable runnable) {
synchronized (handlerLock) {
if (renderThreadHandler != null) {
renderThreadHandler.post(runnable);
}
}
}
private void clearSurfaceOnRenderThread(float r, float g, float b, float a) {
if (eglBase != null && eglBase.hasSurface()) {
logD("clearSurface");
GLES20.glClearColor(r, g, b, a);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
eglBase.swapBuffers();
}
}
/**
* Post a task to clear the surface to a transparent uniform color.
*/
public void clearImage() {
clearImage(0 /* red */, 0 /* green */, 0 /* blue */, 0 /* alpha */);
}
/**
* Post a task to clear the surface to a specific color.
*/
public void clearImage(final float r, final float g, final float b, final float a) {
synchronized (handlerLock) {
if (renderThreadHandler == null) {
return;
}
renderThreadHandler.postAtFrontOfQueue(() -> clearSurfaceOnRenderThread(r, g, b, a));
}
}
/**
* Renders and releases |pendingFrame|.
*/
private void renderFrameOnRenderThread() {
// Fetch and render |pendingFrame|.
final VideoFrame frame;
synchronized (frameLock) {
if (pendingFrame == null) {
return;
}
frame = pendingFrame;
pendingFrame = null;
}
if (eglBase == null || !eglBase.hasSurface()) {
logD("Dropping frame - No surface");
frame.release();
return;
}
// Check if fps reduction is active.
final boolean shouldRenderFrame;
synchronized (fpsReductionLock) {
if (minRenderPeriodNs == Long.MAX_VALUE) {
// Rendering is paused.
shouldRenderFrame = false;
} else if (minRenderPeriodNs <= 0) {
// FPS reduction is disabled.
shouldRenderFrame = true;
} else {
final long currentTimeNs = System.nanoTime();
if (currentTimeNs < nextFrameTimeNs) {
logD("Skipping frame rendering - fps reduction is active.");
shouldRenderFrame = false;
} else {
nextFrameTimeNs += minRenderPeriodNs;
// The time for the next frame should always be in the future.
nextFrameTimeNs = Math.max(nextFrameTimeNs, currentTimeNs);
shouldRenderFrame = true;
}
}
}
final long startTimeNs = System.nanoTime();
final float frameAspectRatio = frame.getRotatedWidth() / (float) frame.getRotatedHeight();
final float drawnAspectRatio;
synchronized (layoutLock) {
drawnAspectRatio = layoutAspectRatio != 0f ? layoutAspectRatio : frameAspectRatio;
}
final float scaleX;
final float scaleY;
if (frameAspectRatio > drawnAspectRatio) {
scaleX = drawnAspectRatio / frameAspectRatio;
scaleY = 1f;
} else {
scaleX = 1f;
scaleY = frameAspectRatio / drawnAspectRatio;
}
drawMatrix.reset();
drawMatrix.preTranslate(0.5f, 0.5f);
if (mirror)
drawMatrix.preScale(-1f, 1f);
drawMatrix.preScale(scaleX, scaleY);
drawMatrix.preTranslate(-0.5f, -0.5f);
if (shouldRenderFrame) {
GLES20.glClearColor(0 /* red */, 0 /* green */, 0 /* blue */, 0 /* alpha */);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
frameDrawer.drawFrame(frame, drawer, drawMatrix, 0 /* viewportX */, 0 /* viewportY */,
eglBase.surfaceWidth(), eglBase.surfaceHeight());
final long swapBuffersStartTimeNs = System.nanoTime();
eglBase.swapBuffers();
final long currentTimeNs = System.nanoTime();
synchronized (statisticsLock) {
++framesRendered;
renderTimeNs += (currentTimeNs - startTimeNs);
renderSwapBufferTimeNs += (currentTimeNs - swapBuffersStartTimeNs);
}
}
notifyCallbacks(frame, shouldRenderFrame);
frame.release();
}
private void notifyCallbacks(VideoFrame frame, boolean wasRendered) {
if (frameListeners.isEmpty())
return;
drawMatrix.reset();
drawMatrix.preTranslate(0.5f, 0.5f);
if (mirror)
drawMatrix.preScale(-1f, 1f);
drawMatrix.preScale(1f, -1f); // We want the output to be upside down for Bitmap.
drawMatrix.preTranslate(-0.5f, -0.5f);
Iterator<FrameListenerAndParams> it = frameListeners.iterator();
while (it.hasNext()) {
FrameListenerAndParams listenerAndParams = it.next();
if (!wasRendered && listenerAndParams.applyFpsReduction) {
continue;
}
it.remove();
final int scaledWidth = (int) (listenerAndParams.scale * frame.getRotatedWidth());
final int scaledHeight = (int) (listenerAndParams.scale * frame.getRotatedHeight());
if (scaledWidth == 0 || scaledHeight == 0) {
listenerAndParams.listener.onFrame(null);
continue;
}
if (bitmapTextureFramebuffer == null) {
bitmapTextureFramebuffer = new GlTextureFrameBuffer(GLES20.GL_RGBA);
}
bitmapTextureFramebuffer.setSize(scaledWidth, scaledHeight);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, bitmapTextureFramebuffer.getFrameBufferId());
GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0,
GLES20.GL_TEXTURE_2D, bitmapTextureFramebuffer.getTextureId(), 0);
GLES20.glClearColor(0 /* red */, 0 /* green */, 0 /* blue */, 0 /* alpha */);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
frameDrawer.drawFrame(frame, listenerAndParams.drawer, drawMatrix, 0 /* viewportX */,
0 /* viewportY */, scaledWidth, scaledHeight);
final ByteBuffer bitmapBuffer = ByteBuffer.allocateDirect(scaledWidth * scaledHeight * 4);
GLES20.glViewport(0, 0, scaledWidth, scaledHeight);
GLES20.glReadPixels(
0, 0, scaledWidth, scaledHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, bitmapBuffer);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
GlUtil.checkNoGLES2Error("EglRenderer.notifyCallbacks");
final Bitmap bitmap = Bitmap.createBitmap(scaledWidth, scaledHeight, Bitmap.Config.ARGB_8888);
bitmap.copyPixelsFromBuffer(bitmapBuffer);
listenerAndParams.listener.onFrame(bitmap);
}
}
private String averageTimeAsString(long sumTimeNs, int count) {
return (count <= 0) ? "NA" : TimeUnit.NANOSECONDS.toMicros(sumTimeNs / count) + " μs";
}
private void logStatistics() {
final long currentTimeNs = System.nanoTime();
synchronized (statisticsLock) {
final long elapsedTimeNs = currentTimeNs - statisticsStartTimeNs;
if (elapsedTimeNs <= 0) {
return;
}
final float renderFps = framesRendered * TimeUnit.SECONDS.toNanos(1) / (float) elapsedTimeNs;
logD("Duration: " + TimeUnit.NANOSECONDS.toMillis(elapsedTimeNs) + " ms."
+ " Frames received: " + framesReceived + "."
+ " Dropped: " + framesDropped + "."
+ " Rendered: " + framesRendered + "."
+ " Render fps: " + String.format(Locale.US, "%.1f", renderFps) + "."
+ " Average render time: " + averageTimeAsString(renderTimeNs, framesRendered) + "."
+ " Average swapBuffer time: "
+ averageTimeAsString(renderSwapBufferTimeNs, framesRendered) + ".");
resetStatistics(currentTimeNs);
}
}
private void logD(String string) {
Logging.d(TAG, name + string);
}
}

View File

@ -0,0 +1,137 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import java.nio.ByteBuffer;
import java.util.concurrent.TimeUnit;
/**
* An encoded frame from a video stream. Used as an input for decoders and as an output for
* encoders.
*/
public class EncodedImage {
// Must be kept in sync with common_types.h FrameType.
public enum FrameType {
EmptyFrame(0),
VideoFrameKey(3),
VideoFrameDelta(4);
private final int nativeIndex;
private FrameType(int nativeIndex) {
this.nativeIndex = nativeIndex;
}
public int getNative() {
return nativeIndex;
}
public static FrameType fromNative(int nativeIndex) {
for (FrameType type : FrameType.values()) {
if (type.nativeIndex == nativeIndex) {
return type;
}
}
throw new IllegalArgumentException("Unknown native frame type: " + nativeIndex);
}
}
public final ByteBuffer buffer;
public final int encodedWidth;
public final int encodedHeight;
public final long captureTimeMs; // Deprecated
public final long captureTimeNs;
public final FrameType frameType;
public final int rotation;
public final boolean completeFrame;
public final Integer qp;
private EncodedImage(ByteBuffer buffer, int encodedWidth, int encodedHeight, long captureTimeNs,
FrameType frameType, int rotation, boolean completeFrame, Integer qp) {
this.buffer = buffer;
this.encodedWidth = encodedWidth;
this.encodedHeight = encodedHeight;
this.captureTimeMs = TimeUnit.NANOSECONDS.toMillis(captureTimeNs);
this.captureTimeNs = captureTimeNs;
this.frameType = frameType;
this.rotation = rotation;
this.completeFrame = completeFrame;
this.qp = qp;
}
public static Builder builder() {
return new Builder();
}
public static class Builder {
private ByteBuffer buffer;
private int encodedWidth;
private int encodedHeight;
private long captureTimeNs;
private EncodedImage.FrameType frameType;
private int rotation;
private boolean completeFrame;
private Integer qp;
private Builder() {}
public Builder setBuffer(ByteBuffer buffer) {
this.buffer = buffer;
return this;
}
public Builder setEncodedWidth(int encodedWidth) {
this.encodedWidth = encodedWidth;
return this;
}
public Builder setEncodedHeight(int encodedHeight) {
this.encodedHeight = encodedHeight;
return this;
}
@Deprecated
public Builder setCaptureTimeMs(long captureTimeMs) {
this.captureTimeNs = TimeUnit.MILLISECONDS.toNanos(captureTimeMs);
return this;
}
public Builder setCaptureTimeNs(long captureTimeNs) {
this.captureTimeNs = captureTimeNs;
return this;
}
public Builder setFrameType(EncodedImage.FrameType frameType) {
this.frameType = frameType;
return this;
}
public Builder setRotation(int rotation) {
this.rotation = rotation;
return this;
}
public Builder setCompleteFrame(boolean completeFrame) {
this.completeFrame = completeFrame;
return this;
}
public Builder setQp(Integer qp) {
this.qp = qp;
return this;
}
public EncodedImage createEncodedImage() {
return new EncodedImage(buffer, encodedWidth, encodedHeight, captureTimeNs, frameType,
rotation, completeFrame, qp);
}
}
}

View File

@ -0,0 +1,215 @@
/*
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.content.Context;
import android.os.SystemClock;
import java.util.concurrent.TimeUnit;
import java.util.Timer;
import java.util.TimerTask;
import java.io.RandomAccessFile;
import java.io.IOException;
public class FileVideoCapturer implements VideoCapturer {
static {
System.loadLibrary("jingle_peerconnection_so");
}
private interface VideoReader {
int getFrameWidth();
int getFrameHeight();
byte[] getNextFrame();
void close();
}
/**
* Read video data from file for the .y4m container.
*/
private static class VideoReaderY4M implements VideoReader {
private final static String TAG = "VideoReaderY4M";
private final int frameWidth;
private final int frameHeight;
private final int frameSize;
// First char after header
private final long videoStart;
private static final String Y4M_FRAME_DELIMETER = "FRAME";
private final RandomAccessFile mediaFileStream;
public int getFrameWidth() {
return frameWidth;
}
public int getFrameHeight() {
return frameHeight;
}
public VideoReaderY4M(String file) throws IOException {
mediaFileStream = new RandomAccessFile(file, "r");
StringBuilder builder = new StringBuilder();
for (;;) {
int c = mediaFileStream.read();
if (c == -1) {
// End of file reached.
throw new RuntimeException("Found end of file before end of header for file: " + file);
}
if (c == '\n') {
// End of header found.
break;
}
builder.append((char) c);
}
videoStart = mediaFileStream.getFilePointer();
String header = builder.toString();
String[] headerTokens = header.split("[ ]");
int w = 0;
int h = 0;
String colorSpace = "";
for (String tok : headerTokens) {
char c = tok.charAt(0);
switch (c) {
case 'W':
w = Integer.parseInt(tok.substring(1));
break;
case 'H':
h = Integer.parseInt(tok.substring(1));
break;
case 'C':
colorSpace = tok.substring(1);
break;
}
}
Logging.d(TAG, "Color space: " + colorSpace);
if (!colorSpace.equals("420") && !colorSpace.equals("420mpeg2")) {
throw new IllegalArgumentException(
"Does not support any other color space than I420 or I420mpeg2");
}
if ((w % 2) == 1 || (h % 2) == 1) {
throw new IllegalArgumentException("Does not support odd width or height");
}
frameWidth = w;
frameHeight = h;
frameSize = w * h * 3 / 2;
Logging.d(TAG, "frame dim: (" + w + ", " + h + ") frameSize: " + frameSize);
}
public byte[] getNextFrame() {
byte[] frame = new byte[frameSize];
try {
byte[] frameDelim = new byte[Y4M_FRAME_DELIMETER.length() + 1];
if (mediaFileStream.read(frameDelim) < frameDelim.length) {
// We reach end of file, loop
mediaFileStream.seek(videoStart);
if (mediaFileStream.read(frameDelim) < frameDelim.length) {
throw new RuntimeException("Error looping video");
}
}
String frameDelimStr = new String(frameDelim);
if (!frameDelimStr.equals(Y4M_FRAME_DELIMETER + "\n")) {
throw new RuntimeException(
"Frames should be delimited by FRAME plus newline, found delimter was: '"
+ frameDelimStr + "'");
}
mediaFileStream.readFully(frame);
byte[] nv21Frame = new byte[frameSize];
nativeI420ToNV21(frame, frameWidth, frameHeight, nv21Frame);
return nv21Frame;
} catch (IOException e) {
throw new RuntimeException(e);
}
}
public void close() {
try {
mediaFileStream.close();
} catch (IOException e) {
Logging.e(TAG, "Problem closing file", e);
}
}
}
private final static String TAG = "FileVideoCapturer";
private final VideoReader videoReader;
private CapturerObserver capturerObserver;
private final Timer timer = new Timer();
private final TimerTask tickTask = new TimerTask() {
@Override
public void run() {
tick();
}
};
private int getFrameWidth() {
return videoReader.getFrameWidth();
}
private int getFrameHeight() {
return videoReader.getFrameHeight();
}
public FileVideoCapturer(String inputFile) throws IOException {
try {
videoReader = new VideoReaderY4M(inputFile);
} catch (IOException e) {
Logging.d(TAG, "Could not open video file: " + inputFile);
throw e;
}
}
private byte[] getNextFrame() {
return videoReader.getNextFrame();
}
public void tick() {
final long captureTimeNs = TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
byte[] frameData = getNextFrame();
capturerObserver.onByteBufferFrameCaptured(
frameData, getFrameWidth(), getFrameHeight(), 0, captureTimeNs);
}
@Override
public void initialize(SurfaceTextureHelper surfaceTextureHelper, Context applicationContext,
CapturerObserver capturerObserver) {
this.capturerObserver = capturerObserver;
}
@Override
public void startCapture(int width, int height, int framerate) {
timer.schedule(tickTask, 0, 1000 / framerate);
}
@Override
public void stopCapture() throws InterruptedException {
timer.cancel();
}
@Override
public void changeCaptureFormat(int width, int height, int framerate) {
// Empty on purpose
}
@Override
public void dispose() {
videoReader.close();
}
@Override
public boolean isScreencast() {
return false;
}
public static native void nativeI420ToNV21(byte[] src, int width, int height, byte[] dst);
}

View File

@ -0,0 +1,210 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import java.nio.FloatBuffer;
import java.util.IdentityHashMap;
import java.util.Map;
/**
* Helper class to draw an opaque quad on the target viewport location. Rotation, mirror, and
* cropping is specified using a 4x4 texture coordinate transform matrix. The frame input can either
* be an OES texture or YUV textures in I420 format. The GL state must be preserved between draw
* calls, this is intentional to maximize performance. The function release() must be called
* manually to free the resources held by this object.
*/
public class GlRectDrawer implements RendererCommon.GlDrawer {
// clang-format off
// Simple vertex shader, used for both YUV and OES.
private static final String VERTEX_SHADER_STRING =
"varying vec2 interp_tc;\n"
+ "attribute vec4 in_pos;\n"
+ "attribute vec4 in_tc;\n"
+ "\n"
+ "uniform mat4 texMatrix;\n"
+ "\n"
+ "void main() {\n"
+ " gl_Position = in_pos;\n"
+ " interp_tc = (texMatrix * in_tc).xy;\n"
+ "}\n";
private static final String YUV_FRAGMENT_SHADER_STRING =
"precision mediump float;\n"
+ "varying vec2 interp_tc;\n"
+ "\n"
+ "uniform sampler2D y_tex;\n"
+ "uniform sampler2D u_tex;\n"
+ "uniform sampler2D v_tex;\n"
+ "\n"
+ "void main() {\n"
// CSC according to http://www.fourcc.org/fccyvrgb.php
+ " float y = texture2D(y_tex, interp_tc).r;\n"
+ " float u = texture2D(u_tex, interp_tc).r - 0.5;\n"
+ " float v = texture2D(v_tex, interp_tc).r - 0.5;\n"
+ " gl_FragColor = vec4(y + 1.403 * v, "
+ " y - 0.344 * u - 0.714 * v, "
+ " y + 1.77 * u, 1);\n"
+ "}\n";
private static final String RGB_FRAGMENT_SHADER_STRING =
"precision mediump float;\n"
+ "varying vec2 interp_tc;\n"
+ "\n"
+ "uniform sampler2D rgb_tex;\n"
+ "\n"
+ "void main() {\n"
+ " gl_FragColor = texture2D(rgb_tex, interp_tc);\n"
+ "}\n";
private static final String OES_FRAGMENT_SHADER_STRING =
"#extension GL_OES_EGL_image_external : require\n"
+ "precision mediump float;\n"
+ "varying vec2 interp_tc;\n"
+ "\n"
+ "uniform samplerExternalOES oes_tex;\n"
+ "\n"
+ "void main() {\n"
+ " gl_FragColor = texture2D(oes_tex, interp_tc);\n"
+ "}\n";
// clang-format on
// Vertex coordinates in Normalized Device Coordinates, i.e. (-1, -1) is bottom-left and (1, 1) is
// top-right.
private static final FloatBuffer FULL_RECTANGLE_BUF = GlUtil.createFloatBuffer(new float[] {
-1.0f, -1.0f, // Bottom left.
1.0f, -1.0f, // Bottom right.
-1.0f, 1.0f, // Top left.
1.0f, 1.0f, // Top right.
});
// Texture coordinates - (0, 0) is bottom-left and (1, 1) is top-right.
private static final FloatBuffer FULL_RECTANGLE_TEX_BUF = GlUtil.createFloatBuffer(new float[] {
0.0f, 0.0f, // Bottom left.
1.0f, 0.0f, // Bottom right.
0.0f, 1.0f, // Top left.
1.0f, 1.0f // Top right.
});
private static class Shader {
public final GlShader glShader;
public final int texMatrixLocation;
public Shader(String fragmentShader) {
this.glShader = new GlShader(VERTEX_SHADER_STRING, fragmentShader);
this.texMatrixLocation = glShader.getUniformLocation("texMatrix");
}
}
// The keys are one of the fragments shaders above.
private final Map<String, Shader> shaders = new IdentityHashMap<String, Shader>();
/**
* Draw an OES texture frame with specified texture transformation matrix. Required resources are
* allocated at the first call to this function.
*/
@Override
public void drawOes(int oesTextureId, float[] texMatrix, int frameWidth, int frameHeight,
int viewportX, int viewportY, int viewportWidth, int viewportHeight) {
prepareShader(OES_FRAGMENT_SHADER_STRING, texMatrix);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
// updateTexImage() may be called from another thread in another EGL context, so we need to
// bind/unbind the texture in each draw call so that GLES understads it's a new texture.
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, oesTextureId);
drawRectangle(viewportX, viewportY, viewportWidth, viewportHeight);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
}
/**
* Draw a RGB(A) texture frame with specified texture transformation matrix. Required resources
* are allocated at the first call to this function.
*/
@Override
public void drawRgb(int textureId, float[] texMatrix, int frameWidth, int frameHeight,
int viewportX, int viewportY, int viewportWidth, int viewportHeight) {
prepareShader(RGB_FRAGMENT_SHADER_STRING, texMatrix);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
drawRectangle(viewportX, viewportY, viewportWidth, viewportHeight);
// Unbind the texture as a precaution.
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
}
/**
* Draw a YUV frame with specified texture transformation matrix. Required resources are
* allocated at the first call to this function.
*/
@Override
public void drawYuv(int[] yuvTextures, float[] texMatrix, int frameWidth, int frameHeight,
int viewportX, int viewportY, int viewportWidth, int viewportHeight) {
prepareShader(YUV_FRAGMENT_SHADER_STRING, texMatrix);
// Bind the textures.
for (int i = 0; i < 3; ++i) {
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
}
drawRectangle(viewportX, viewportY, viewportWidth, viewportHeight);
// Unbind the textures as a precaution..
for (int i = 0; i < 3; ++i) {
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
}
}
private void drawRectangle(int x, int y, int width, int height) {
// Draw quad.
GLES20.glViewport(x, y, width, height);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
}
private void prepareShader(String fragmentShader, float[] texMatrix) {
final Shader shader;
if (shaders.containsKey(fragmentShader)) {
shader = shaders.get(fragmentShader);
} else {
// Lazy allocation.
shader = new Shader(fragmentShader);
shaders.put(fragmentShader, shader);
shader.glShader.useProgram();
// Initialize fragment shader uniform values.
if (fragmentShader == YUV_FRAGMENT_SHADER_STRING) {
GLES20.glUniform1i(shader.glShader.getUniformLocation("y_tex"), 0);
GLES20.glUniform1i(shader.glShader.getUniformLocation("u_tex"), 1);
GLES20.glUniform1i(shader.glShader.getUniformLocation("v_tex"), 2);
} else if (fragmentShader == RGB_FRAGMENT_SHADER_STRING) {
GLES20.glUniform1i(shader.glShader.getUniformLocation("rgb_tex"), 0);
} else if (fragmentShader == OES_FRAGMENT_SHADER_STRING) {
GLES20.glUniform1i(shader.glShader.getUniformLocation("oes_tex"), 0);
} else {
throw new IllegalStateException("Unknown fragment shader: " + fragmentShader);
}
GlUtil.checkNoGLES2Error("Initialize fragment shader uniform values.");
// Initialize vertex shader attributes.
shader.glShader.setVertexAttribArray("in_pos", 2, FULL_RECTANGLE_BUF);
shader.glShader.setVertexAttribArray("in_tc", 2, FULL_RECTANGLE_TEX_BUF);
}
shader.glShader.useProgram();
// Copy the texture transformation matrix over.
GLES20.glUniformMatrix4fv(shader.texMatrixLocation, 1, false, texMatrix, 0);
}
/**
* Release all GLES resources. This needs to be done manually, otherwise the resources are leaked.
*/
@Override
public void release() {
for (Shader shader : shaders.values()) {
shader.glShader.release();
}
shaders.clear();
}
}

View File

@ -0,0 +1,121 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.opengl.GLES20;
import java.nio.FloatBuffer;
// Helper class for handling OpenGL shaders and shader programs.
public class GlShader {
private static final String TAG = "GlShader";
private static int compileShader(int shaderType, String source) {
final int shader = GLES20.glCreateShader(shaderType);
if (shader == 0) {
throw new RuntimeException("glCreateShader() failed. GLES20 error: " + GLES20.glGetError());
}
GLES20.glShaderSource(shader, source);
GLES20.glCompileShader(shader);
int[] compileStatus = new int[] {GLES20.GL_FALSE};
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
if (compileStatus[0] != GLES20.GL_TRUE) {
Logging.e(
TAG, "Could not compile shader " + shaderType + ":" + GLES20.glGetShaderInfoLog(shader));
throw new RuntimeException(GLES20.glGetShaderInfoLog(shader));
}
GlUtil.checkNoGLES2Error("compileShader");
return shader;
}
private int program;
public GlShader(String vertexSource, String fragmentSource) {
final int vertexShader = compileShader(GLES20.GL_VERTEX_SHADER, vertexSource);
final int fragmentShader = compileShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
program = GLES20.glCreateProgram();
if (program == 0) {
throw new RuntimeException("glCreateProgram() failed. GLES20 error: " + GLES20.glGetError());
}
GLES20.glAttachShader(program, vertexShader);
GLES20.glAttachShader(program, fragmentShader);
GLES20.glLinkProgram(program);
int[] linkStatus = new int[] {GLES20.GL_FALSE};
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
if (linkStatus[0] != GLES20.GL_TRUE) {
Logging.e(TAG, "Could not link program: " + GLES20.glGetProgramInfoLog(program));
throw new RuntimeException(GLES20.glGetProgramInfoLog(program));
}
// According to the documentation of glLinkProgram():
// "After the link operation, applications are free to modify attached shader objects, compile
// attached shader objects, detach shader objects, delete shader objects, and attach additional
// shader objects. None of these operations affects the information log or the program that is
// part of the program object."
// But in practice, detaching shaders from the program seems to break some devices. Deleting the
// shaders are fine however - it will delete them when they are no longer attached to a program.
GLES20.glDeleteShader(vertexShader);
GLES20.glDeleteShader(fragmentShader);
GlUtil.checkNoGLES2Error("Creating GlShader");
}
public int getAttribLocation(String label) {
if (program == -1) {
throw new RuntimeException("The program has been released");
}
int location = GLES20.glGetAttribLocation(program, label);
if (location < 0) {
throw new RuntimeException("Could not locate '" + label + "' in program");
}
return location;
}
/**
* Enable and upload a vertex array for attribute |label|. The vertex data is specified in
* |buffer| with |dimension| number of components per vertex.
*/
public void setVertexAttribArray(String label, int dimension, FloatBuffer buffer) {
if (program == -1) {
throw new RuntimeException("The program has been released");
}
int location = getAttribLocation(label);
GLES20.glEnableVertexAttribArray(location);
GLES20.glVertexAttribPointer(location, dimension, GLES20.GL_FLOAT, false, 0, buffer);
GlUtil.checkNoGLES2Error("setVertexAttribArray");
}
public int getUniformLocation(String label) {
if (program == -1) {
throw new RuntimeException("The program has been released");
}
int location = GLES20.glGetUniformLocation(program, label);
if (location < 0) {
throw new RuntimeException("Could not locate uniform '" + label + "' in program");
}
return location;
}
public void useProgram() {
if (program == -1) {
throw new RuntimeException("The program has been released");
}
GLES20.glUseProgram(program);
GlUtil.checkNoGLES2Error("glUseProgram");
}
public void release() {
Logging.d(TAG, "Deleting shader.");
// Delete program, automatically detaching any shaders from it.
if (program != -1) {
GLES20.glDeleteProgram(program);
program = -1;
}
}
}

View File

@ -0,0 +1,117 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.opengl.GLES20;
/**
* Helper class for handling OpenGL framebuffer with only color attachment and no depth or stencil
* buffer. Intended for simple tasks such as texture copy, texture downscaling, and texture color
* conversion.
*/
// TODO(magjed): Add unittests for this class.
public class GlTextureFrameBuffer {
private final int frameBufferId;
private final int textureId;
private final int pixelFormat;
private int width;
private int height;
/**
* Generate texture and framebuffer resources. An EGLContext must be bound on the current thread
* when calling this function. The framebuffer is not complete until setSize() is called.
*/
public GlTextureFrameBuffer(int pixelFormat) {
switch (pixelFormat) {
case GLES20.GL_LUMINANCE:
case GLES20.GL_RGB:
case GLES20.GL_RGBA:
this.pixelFormat = pixelFormat;
break;
default:
throw new IllegalArgumentException("Invalid pixel format: " + pixelFormat);
}
// Create texture.
textureId = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
this.width = 0;
this.height = 0;
// Create framebuffer object.
final int frameBuffers[] = new int[1];
GLES20.glGenFramebuffers(1, frameBuffers, 0);
frameBufferId = frameBuffers[0];
}
/**
* (Re)allocate texture. Will do nothing if the requested size equals the current size. An
* EGLContext must be bound on the current thread when calling this function. Must be called at
* least once before using the framebuffer. May be called multiple times to change size.
*/
public void setSize(int width, int height) {
if (width == 0 || height == 0) {
throw new IllegalArgumentException("Invalid size: " + width + "x" + height);
}
if (width == this.width && height == this.height) {
return;
}
this.width = width;
this.height = height;
// Allocate texture.
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, pixelFormat, width, height, 0, pixelFormat,
GLES20.GL_UNSIGNED_BYTE, null);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
GlUtil.checkNoGLES2Error("GlTextureFrameBuffer setSize");
// Attach the texture to the framebuffer as color attachment.
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBufferId);
GLES20.glFramebufferTexture2D(
GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, textureId, 0);
// Check that the framebuffer is in a good state.
final int status = GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER);
if (status != GLES20.GL_FRAMEBUFFER_COMPLETE) {
throw new IllegalStateException("Framebuffer not complete, status: " + status);
}
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
}
public int getWidth() {
return width;
}
public int getHeight() {
return height;
}
public int getFrameBufferId() {
return frameBufferId;
}
public int getTextureId() {
return textureId;
}
/**
* Release texture and framebuffer. An EGLContext must be bound on the current thread when calling
* this function. This object should not be used after this call.
*/
public void release() {
GLES20.glDeleteTextures(1, new int[] {textureId}, 0);
GLES20.glDeleteFramebuffers(1, new int[] {frameBufferId}, 0);
width = 0;
height = 0;
}
}

View File

@ -0,0 +1,58 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.opengl.GLES20;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
/**
* Some OpenGL static utility functions.
*/
public class GlUtil {
private GlUtil() {}
// Assert that no OpenGL ES 2.0 error has been raised.
public static void checkNoGLES2Error(String msg) {
int error = GLES20.glGetError();
if (error != GLES20.GL_NO_ERROR) {
throw new RuntimeException(msg + ": GLES20 error: " + error);
}
}
public static FloatBuffer createFloatBuffer(float[] coords) {
// Allocate a direct ByteBuffer, using 4 bytes per float, and copy coords into it.
ByteBuffer bb = ByteBuffer.allocateDirect(coords.length * 4);
bb.order(ByteOrder.nativeOrder());
FloatBuffer fb = bb.asFloatBuffer();
fb.put(coords);
fb.position(0);
return fb;
}
/**
* Generate texture with standard parameters.
*/
public static int generateTexture(int target) {
final int textureArray[] = new int[1];
GLES20.glGenTextures(1, textureArray, 0);
final int textureId = textureArray[0];
GLES20.glBindTexture(target, textureId);
GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
checkNoGLES2Error("generateTexture");
return textureId;
}
}

View File

@ -0,0 +1,117 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import static org.webrtc.MediaCodecUtils.EXYNOS_PREFIX;
import static org.webrtc.MediaCodecUtils.INTEL_PREFIX;
import static org.webrtc.MediaCodecUtils.NVIDIA_PREFIX;
import static org.webrtc.MediaCodecUtils.QCOM_PREFIX;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecInfo.CodecCapabilities;
import android.media.MediaCodecList;
import android.os.Build;
/** Factory for Android hardware VideoDecoders. */
@SuppressWarnings("deprecation") // API level 16 requires use of deprecated methods.
public class HardwareVideoDecoderFactory implements VideoDecoderFactory {
private static final String TAG = "HardwareVideoDecoderFactory";
private final EglBase.Context sharedContext;
/** Creates a HardwareVideoDecoderFactory that does not use surface textures. */
@Deprecated // Not removed yet to avoid breaking callers.
public HardwareVideoDecoderFactory() {
this(null);
}
/**
* Creates a HardwareVideoDecoderFactory that supports surface texture rendering using the given
* shared context. The context may be null. If it is null, then surface support is disabled.
*/
public HardwareVideoDecoderFactory(EglBase.Context sharedContext) {
this.sharedContext = sharedContext;
}
@Override
public VideoDecoder createDecoder(String codecType) {
VideoCodecType type = VideoCodecType.valueOf(codecType);
MediaCodecInfo info = findCodecForType(type);
if (info == null) {
return null; // No support for this codec type.
}
CodecCapabilities capabilities = info.getCapabilitiesForType(type.mimeType());
return new HardwareVideoDecoder(info.getName(), type,
MediaCodecUtils.selectColorFormat(MediaCodecUtils.DECODER_COLOR_FORMATS, capabilities),
sharedContext);
}
private MediaCodecInfo findCodecForType(VideoCodecType type) {
// HW decoding is not supported on builds before KITKAT.
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
return null;
}
for (int i = 0; i < MediaCodecList.getCodecCount(); ++i) {
MediaCodecInfo info = null;
try {
info = MediaCodecList.getCodecInfoAt(i);
} catch (IllegalArgumentException e) {
Logging.e(TAG, "Cannot retrieve encoder codec info", e);
}
if (info == null || info.isEncoder()) {
continue;
}
if (isSupportedCodec(info, type)) {
return info;
}
}
return null; // No support for this type.
}
// Returns true if the given MediaCodecInfo indicates a supported encoder for the given type.
private boolean isSupportedCodec(MediaCodecInfo info, VideoCodecType type) {
if (!MediaCodecUtils.codecSupportsType(info, type)) {
return false;
}
// Check for a supported color format.
if (MediaCodecUtils.selectColorFormat(
MediaCodecUtils.DECODER_COLOR_FORMATS, info.getCapabilitiesForType(type.mimeType()))
== null) {
return false;
}
return isHardwareSupported(info, type);
}
private boolean isHardwareSupported(MediaCodecInfo info, VideoCodecType type) {
String name = info.getName();
switch (type) {
case VP8:
// QCOM, Intel, Exynos, and Nvidia all supported for VP8.
return name.startsWith(QCOM_PREFIX) || name.startsWith(INTEL_PREFIX)
|| name.startsWith(EXYNOS_PREFIX) || name.startsWith(NVIDIA_PREFIX);
case VP9:
// QCOM and Exynos supported for VP9.
return name.startsWith(QCOM_PREFIX) || name.startsWith(EXYNOS_PREFIX);
case H264:
// QCOM, Intel, and Exynos supported for H264.
return name.startsWith(QCOM_PREFIX) || name.startsWith(INTEL_PREFIX)
|| name.startsWith(EXYNOS_PREFIX);
default:
return false;
}
}
}

View File

@ -0,0 +1,247 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import static org.webrtc.MediaCodecUtils.EXYNOS_PREFIX;
import static org.webrtc.MediaCodecUtils.INTEL_PREFIX;
import static org.webrtc.MediaCodecUtils.QCOM_PREFIX;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.os.Build;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/** Factory for android hardware video encoders. */
@SuppressWarnings("deprecation") // API 16 requires the use of deprecated methods.
public class HardwareVideoEncoderFactory implements VideoEncoderFactory {
private static final String TAG = "HardwareVideoEncoderFactory";
// Forced key frame interval - used to reduce color distortions on Qualcomm platforms.
private static final int QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_L_MS = 15000;
private static final int QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_M_MS = 20000;
private static final int QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_N_MS = 15000;
// List of devices with poor H.264 encoder quality.
// HW H.264 encoder on below devices has poor bitrate control - actual
// bitrates deviates a lot from the target value.
private static final List<String> H264_HW_EXCEPTION_MODELS =
Arrays.asList("SAMSUNG-SGH-I337", "Nexus 7", "Nexus 4");
private final EglBase14.Context sharedContext;
private final boolean enableIntelVp8Encoder;
private final boolean enableH264HighProfile;
public HardwareVideoEncoderFactory(
EglBase.Context sharedContext, boolean enableIntelVp8Encoder, boolean enableH264HighProfile) {
// Texture mode requires EglBase14.
if (sharedContext instanceof EglBase14.Context) {
this.sharedContext = (EglBase14.Context) sharedContext;
} else {
Logging.w(TAG, "No shared EglBase.Context. Encoders will not use texture mode.");
this.sharedContext = null;
}
this.enableIntelVp8Encoder = enableIntelVp8Encoder;
this.enableH264HighProfile = enableH264HighProfile;
}
@Deprecated
public HardwareVideoEncoderFactory(boolean enableIntelVp8Encoder, boolean enableH264HighProfile) {
this(null, enableIntelVp8Encoder, enableH264HighProfile);
}
@Override
public VideoEncoder createEncoder(VideoCodecInfo input) {
VideoCodecType type = VideoCodecType.valueOf(input.name);
MediaCodecInfo info = findCodecForType(type);
if (info == null) {
return null; // No support for this type.
}
String codecName = info.getName();
String mime = type.mimeType();
int colorFormat = MediaCodecUtils.selectColorFormat(sharedContext == null
? MediaCodecUtils.ENCODER_COLOR_FORMATS
: MediaCodecUtils.TEXTURE_COLOR_FORMATS,
info.getCapabilitiesForType(mime));
return new HardwareVideoEncoder(codecName, type, colorFormat, input.params,
getKeyFrameIntervalSec(type), getForcedKeyFrameIntervalMs(type, codecName),
createBitrateAdjuster(type, codecName), sharedContext);
}
@Override
public VideoCodecInfo[] getSupportedCodecs() {
List<VideoCodecInfo> supportedCodecInfos = new ArrayList<VideoCodecInfo>();
// Generate a list of supported codecs in order of preference:
// VP8, VP9, H264 (high profile), and H264 (baseline profile).
for (VideoCodecType type :
new VideoCodecType[] {VideoCodecType.VP8, VideoCodecType.VP9, VideoCodecType.H264}) {
MediaCodecInfo codec = findCodecForType(type);
if (codec != null) {
String name = type.name();
if (type == VideoCodecType.H264 && isH264HighProfileSupported(codec)) {
supportedCodecInfos.add(new VideoCodecInfo(0, name, getCodecProperties(type, true)));
}
supportedCodecInfos.add(new VideoCodecInfo(0, name, getCodecProperties(type, false)));
}
}
return supportedCodecInfos.toArray(new VideoCodecInfo[supportedCodecInfos.size()]);
}
private MediaCodecInfo findCodecForType(VideoCodecType type) {
for (int i = 0; i < MediaCodecList.getCodecCount(); ++i) {
MediaCodecInfo info = null;
try {
info = MediaCodecList.getCodecInfoAt(i);
} catch (IllegalArgumentException e) {
Logging.e(TAG, "Cannot retrieve encoder codec info", e);
}
if (info == null || !info.isEncoder()) {
continue;
}
if (isSupportedCodec(info, type)) {
return info;
}
}
return null; // No support for this type.
}
// Returns true if the given MediaCodecInfo indicates a supported encoder for the given type.
private boolean isSupportedCodec(MediaCodecInfo info, VideoCodecType type) {
if (!MediaCodecUtils.codecSupportsType(info, type)) {
return false;
}
// Check for a supported color format.
if (MediaCodecUtils.selectColorFormat(sharedContext == null
? MediaCodecUtils.ENCODER_COLOR_FORMATS
: MediaCodecUtils.TEXTURE_COLOR_FORMATS,
info.getCapabilitiesForType(type.mimeType()))
== null) {
return false;
}
return isHardwareSupportedInCurrentSdk(info, type);
}
// Returns true if the given MediaCodecInfo indicates a hardware module that is supported on the
// current SDK.
private boolean isHardwareSupportedInCurrentSdk(MediaCodecInfo info, VideoCodecType type) {
switch (type) {
case VP8:
return isHardwareSupportedInCurrentSdkVp8(info);
case VP9:
return isHardwareSupportedInCurrentSdkVp9(info);
case H264:
return isHardwareSupportedInCurrentSdkH264(info);
}
return false;
}
private boolean isHardwareSupportedInCurrentSdkVp8(MediaCodecInfo info) {
String name = info.getName();
// QCOM Vp8 encoder is supported in KITKAT or later.
return (name.startsWith(QCOM_PREFIX) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT)
// Exynos VP8 encoder is supported in M or later.
|| (name.startsWith(EXYNOS_PREFIX) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.M)
// Intel Vp8 encoder is supported in LOLLIPOP or later, with the intel encoder enabled.
|| (name.startsWith(INTEL_PREFIX) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP
&& enableIntelVp8Encoder);
}
private boolean isHardwareSupportedInCurrentSdkVp9(MediaCodecInfo info) {
String name = info.getName();
return (name.startsWith(QCOM_PREFIX) || name.startsWith(EXYNOS_PREFIX))
// Both QCOM and Exynos VP9 encoders are supported in N or later.
&& Build.VERSION.SDK_INT >= Build.VERSION_CODES.N;
}
private boolean isHardwareSupportedInCurrentSdkH264(MediaCodecInfo info) {
// First, H264 hardware might perform poorly on this model.
if (H264_HW_EXCEPTION_MODELS.contains(Build.MODEL)) {
return false;
}
String name = info.getName();
// QCOM H264 encoder is supported in KITKAT or later.
return (name.startsWith(QCOM_PREFIX) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT)
// Exynos H264 encoder is supported in LOLLIPOP or later.
|| (name.startsWith(EXYNOS_PREFIX)
&& Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP);
}
private int getKeyFrameIntervalSec(VideoCodecType type) {
switch (type) {
case VP8: // Fallthrough intended.
case VP9:
return 100;
case H264:
return 20;
}
throw new IllegalArgumentException("Unsupported VideoCodecType " + type);
}
private int getForcedKeyFrameIntervalMs(VideoCodecType type, String codecName) {
if (type == VideoCodecType.VP8 && codecName.startsWith(QCOM_PREFIX)) {
if (Build.VERSION.SDK_INT == Build.VERSION_CODES.LOLLIPOP
|| Build.VERSION.SDK_INT == Build.VERSION_CODES.LOLLIPOP_MR1) {
return QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_L_MS;
} else if (Build.VERSION.SDK_INT == Build.VERSION_CODES.M) {
return QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_M_MS;
} else if (Build.VERSION.SDK_INT > Build.VERSION_CODES.M) {
return QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_N_MS;
}
}
// Other codecs don't need key frame forcing.
return 0;
}
private BitrateAdjuster createBitrateAdjuster(VideoCodecType type, String codecName) {
if (codecName.startsWith(EXYNOS_PREFIX)) {
if (type == VideoCodecType.VP8) {
// Exynos VP8 encoders need dynamic bitrate adjustment.
return new DynamicBitrateAdjuster();
} else {
// Exynos VP9 and H264 encoders need framerate-based bitrate adjustment.
return new FramerateBitrateAdjuster();
}
}
// Other codecs don't need bitrate adjustment.
return new BaseBitrateAdjuster();
}
private boolean isH264HighProfileSupported(MediaCodecInfo info) {
return enableH264HighProfile && info.getName().startsWith(QCOM_PREFIX);
}
private Map<String, String> getCodecProperties(VideoCodecType type, boolean highProfile) {
switch (type) {
case VP8:
case VP9:
return new HashMap<String, String>();
case H264:
Map<String, String> properties = new HashMap<>();
properties.put(VideoCodecInfo.H264_FMTP_LEVEL_ASYMMETRY_ALLOWED, "1");
properties.put(VideoCodecInfo.H264_FMTP_PACKETIZATION_MODE, "1");
properties.put(VideoCodecInfo.H264_FMTP_PROFILE_LEVEL_ID,
highProfile ? VideoCodecInfo.H264_CONSTRAINED_HIGH_3_1
: VideoCodecInfo.H264_CONSTRAINED_BASELINE_3_1);
return properties;
default:
throw new IllegalArgumentException("Unsupported codec: " + type);
}
}
}

View File

@ -0,0 +1,41 @@
/*
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/**
* Representation of a single ICE Candidate, mirroring
* {@code IceCandidateInterface} in the C++ API.
*/
public class IceCandidate {
public final String sdpMid;
public final int sdpMLineIndex;
public final String sdp;
public final String serverUrl;
public IceCandidate(String sdpMid, int sdpMLineIndex, String sdp) {
this.sdpMid = sdpMid;
this.sdpMLineIndex = sdpMLineIndex;
this.sdp = sdp;
this.serverUrl = "";
}
// Only be called internally from JNI.
private IceCandidate(String sdpMid, int sdpMLineIndex, String sdp, String serverUrl) {
this.sdpMid = sdpMid;
this.sdpMLineIndex = sdpMLineIndex;
this.sdp = sdp;
this.serverUrl = serverUrl;
}
public String toString() {
return sdpMid + ":" + sdpMLineIndex + ":" + sdp + ":" + serverUrl;
}
}

View File

@ -0,0 +1,751 @@
/*
* Copyright 2014 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecInfo.CodecCapabilities;
import android.media.MediaCodecList;
import android.media.MediaFormat;
import android.os.Build;
import android.os.SystemClock;
import android.view.Surface;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Queue;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
// Java-side of peerconnection_jni.cc:MediaCodecVideoDecoder.
// This class is an implementation detail of the Java PeerConnection API.
@SuppressWarnings("deprecation")
public class MediaCodecVideoDecoder {
// This class is constructed, operated, and destroyed by its C++ incarnation,
// so the class and its methods have non-public visibility. The API this
// class exposes aims to mimic the webrtc::VideoDecoder API as closely as
// possibly to minimize the amount of translation work necessary.
private static final String TAG = "MediaCodecVideoDecoder";
private static final long MAX_DECODE_TIME_MS = 200;
// TODO(magjed): Use MediaFormat constants when part of the public API.
private static final String FORMAT_KEY_STRIDE = "stride";
private static final String FORMAT_KEY_SLICE_HEIGHT = "slice-height";
private static final String FORMAT_KEY_CROP_LEFT = "crop-left";
private static final String FORMAT_KEY_CROP_RIGHT = "crop-right";
private static final String FORMAT_KEY_CROP_TOP = "crop-top";
private static final String FORMAT_KEY_CROP_BOTTOM = "crop-bottom";
// Tracks webrtc::VideoCodecType.
public enum VideoCodecType { VIDEO_CODEC_VP8, VIDEO_CODEC_VP9, VIDEO_CODEC_H264 }
// Timeout for input buffer dequeue.
private static final int DEQUEUE_INPUT_TIMEOUT = 500000;
// Timeout for codec releasing.
private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000;
// Max number of output buffers queued before starting to drop decoded frames.
private static final int MAX_QUEUED_OUTPUTBUFFERS = 3;
// Active running decoder instance. Set in initDecode() (called from native code)
// and reset to null in release() call.
private static MediaCodecVideoDecoder runningInstance = null;
private static MediaCodecVideoDecoderErrorCallback errorCallback = null;
private static int codecErrors = 0;
// List of disabled codec types - can be set from application.
private static Set<String> hwDecoderDisabledTypes = new HashSet<String>();
private Thread mediaCodecThread;
private MediaCodec mediaCodec;
private ByteBuffer[] inputBuffers;
private ByteBuffer[] outputBuffers;
private static final String VP8_MIME_TYPE = "video/x-vnd.on2.vp8";
private static final String VP9_MIME_TYPE = "video/x-vnd.on2.vp9";
private static final String H264_MIME_TYPE = "video/avc";
// List of supported HW VP8 decoders.
private static final String[] supportedVp8HwCodecPrefixes = {
"OMX.qcom.", "OMX.Nvidia.", "OMX.Exynos.", "OMX.Intel."};
// List of supported HW VP9 decoders.
private static final String[] supportedVp9HwCodecPrefixes = {"OMX.qcom.", "OMX.Exynos."};
// List of supported HW H.264 decoders.
private static final String[] supportedH264HwCodecPrefixes = {
"OMX.qcom.", "OMX.Intel.", "OMX.Exynos."};
// List of supported HW H.264 high profile decoders.
private static final String supportedQcomH264HighProfileHwCodecPrefix = "OMX.qcom.";
private static final String supportedExynosH264HighProfileHwCodecPrefix = "OMX.Exynos.";
// NV12 color format supported by QCOM codec, but not declared in MediaCodec -
// see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h
private static final int COLOR_QCOM_FORMATYVU420PackedSemiPlanar32m4ka = 0x7FA30C01;
private static final int COLOR_QCOM_FORMATYVU420PackedSemiPlanar16m4ka = 0x7FA30C02;
private static final int COLOR_QCOM_FORMATYVU420PackedSemiPlanar64x32Tile2m8ka = 0x7FA30C03;
private static final int COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04;
// Allowable color formats supported by codec - in order of preference.
private static final List<Integer> supportedColorList = Arrays.asList(
CodecCapabilities.COLOR_FormatYUV420Planar, CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
COLOR_QCOM_FORMATYVU420PackedSemiPlanar32m4ka, COLOR_QCOM_FORMATYVU420PackedSemiPlanar16m4ka,
COLOR_QCOM_FORMATYVU420PackedSemiPlanar64x32Tile2m8ka,
COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m);
private int colorFormat;
private int width;
private int height;
private int stride;
private int sliceHeight;
private boolean hasDecodedFirstFrame;
private final Queue<TimeStamps> decodeStartTimeMs = new LinkedList<TimeStamps>();
private boolean useSurface;
// The below variables are only used when decoding to a Surface.
private TextureListener textureListener;
private int droppedFrames;
private Surface surface = null;
private final Queue<DecodedOutputBuffer> dequeuedSurfaceOutputBuffers =
new LinkedList<DecodedOutputBuffer>();
// MediaCodec error handler - invoked when critical error happens which may prevent
// further use of media codec API. Now it means that one of media codec instances
// is hanging and can no longer be used in the next call.
public static interface MediaCodecVideoDecoderErrorCallback {
void onMediaCodecVideoDecoderCriticalError(int codecErrors);
}
public static void setErrorCallback(MediaCodecVideoDecoderErrorCallback errorCallback) {
Logging.d(TAG, "Set error callback");
MediaCodecVideoDecoder.errorCallback = errorCallback;
}
// Functions to disable HW decoding - can be called from applications for platforms
// which have known HW decoding problems.
public static void disableVp8HwCodec() {
Logging.w(TAG, "VP8 decoding is disabled by application.");
hwDecoderDisabledTypes.add(VP8_MIME_TYPE);
}
public static void disableVp9HwCodec() {
Logging.w(TAG, "VP9 decoding is disabled by application.");
hwDecoderDisabledTypes.add(VP9_MIME_TYPE);
}
public static void disableH264HwCodec() {
Logging.w(TAG, "H.264 decoding is disabled by application.");
hwDecoderDisabledTypes.add(H264_MIME_TYPE);
}
// Functions to query if HW decoding is supported.
public static boolean isVp8HwSupported() {
return !hwDecoderDisabledTypes.contains(VP8_MIME_TYPE)
&& (findDecoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes) != null);
}
public static boolean isVp9HwSupported() {
return !hwDecoderDisabledTypes.contains(VP9_MIME_TYPE)
&& (findDecoder(VP9_MIME_TYPE, supportedVp9HwCodecPrefixes) != null);
}
public static boolean isH264HwSupported() {
return !hwDecoderDisabledTypes.contains(H264_MIME_TYPE)
&& (findDecoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes) != null);
}
public static boolean isH264HighProfileHwSupported() {
if (hwDecoderDisabledTypes.contains(H264_MIME_TYPE)) {
return false;
}
// Support H.264 HP decoding on QCOM chips for Android L and above.
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP
&& findDecoder(H264_MIME_TYPE, new String[] {supportedQcomH264HighProfileHwCodecPrefix})
!= null) {
return true;
}
// Support H.264 HP decoding on Exynos chips for Android M and above.
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M
&& findDecoder(H264_MIME_TYPE, new String[] {supportedExynosH264HighProfileHwCodecPrefix})
!= null) {
return true;
}
return false;
}
public static void printStackTrace() {
if (runningInstance != null && runningInstance.mediaCodecThread != null) {
StackTraceElement[] mediaCodecStackTraces = runningInstance.mediaCodecThread.getStackTrace();
if (mediaCodecStackTraces.length > 0) {
Logging.d(TAG, "MediaCodecVideoDecoder stacks trace:");
for (StackTraceElement stackTrace : mediaCodecStackTraces) {
Logging.d(TAG, stackTrace.toString());
}
}
}
}
// Helper struct for findDecoder() below.
private static class DecoderProperties {
public DecoderProperties(String codecName, int colorFormat) {
this.codecName = codecName;
this.colorFormat = colorFormat;
}
public final String codecName; // OpenMax component name for VP8 codec.
public final int colorFormat; // Color format supported by codec.
}
private static DecoderProperties findDecoder(String mime, String[] supportedCodecPrefixes) {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
return null; // MediaCodec.setParameters is missing.
}
Logging.d(TAG, "Trying to find HW decoder for mime " + mime);
for (int i = 0; i < MediaCodecList.getCodecCount(); ++i) {
MediaCodecInfo info = null;
try {
info = MediaCodecList.getCodecInfoAt(i);
} catch (IllegalArgumentException e) {
Logging.e(TAG, "Cannot retrieve decoder codec info", e);
}
if (info == null || info.isEncoder()) {
continue;
}
String name = null;
for (String mimeType : info.getSupportedTypes()) {
if (mimeType.equals(mime)) {
name = info.getName();
break;
}
}
if (name == null) {
continue; // No HW support in this codec; try the next one.
}
Logging.d(TAG, "Found candidate decoder " + name);
// Check if this is supported decoder.
boolean supportedCodec = false;
for (String codecPrefix : supportedCodecPrefixes) {
if (name.startsWith(codecPrefix)) {
supportedCodec = true;
break;
}
}
if (!supportedCodec) {
continue;
}
// Check if codec supports either yuv420 or nv12.
CodecCapabilities capabilities;
try {
capabilities = info.getCapabilitiesForType(mime);
} catch (IllegalArgumentException e) {
Logging.e(TAG, "Cannot retrieve decoder capabilities", e);
continue;
}
for (int colorFormat : capabilities.colorFormats) {
Logging.v(TAG, " Color: 0x" + Integer.toHexString(colorFormat));
}
for (int supportedColorFormat : supportedColorList) {
for (int codecColorFormat : capabilities.colorFormats) {
if (codecColorFormat == supportedColorFormat) {
// Found supported HW decoder.
Logging.d(TAG, "Found target decoder " + name + ". Color: 0x"
+ Integer.toHexString(codecColorFormat));
return new DecoderProperties(name, codecColorFormat);
}
}
}
}
Logging.d(TAG, "No HW decoder found for mime " + mime);
return null; // No HW decoder.
}
private void checkOnMediaCodecThread() throws IllegalStateException {
if (mediaCodecThread.getId() != Thread.currentThread().getId()) {
throw new IllegalStateException("MediaCodecVideoDecoder previously operated on "
+ mediaCodecThread + " but is now called on " + Thread.currentThread());
}
}
// Pass null in |surfaceTextureHelper| to configure the codec for ByteBuffer output.
private boolean initDecode(
VideoCodecType type, int width, int height, SurfaceTextureHelper surfaceTextureHelper) {
if (mediaCodecThread != null) {
throw new RuntimeException("initDecode: Forgot to release()?");
}
String mime = null;
useSurface = (surfaceTextureHelper != null);
String[] supportedCodecPrefixes = null;
if (type == VideoCodecType.VIDEO_CODEC_VP8) {
mime = VP8_MIME_TYPE;
supportedCodecPrefixes = supportedVp8HwCodecPrefixes;
} else if (type == VideoCodecType.VIDEO_CODEC_VP9) {
mime = VP9_MIME_TYPE;
supportedCodecPrefixes = supportedVp9HwCodecPrefixes;
} else if (type == VideoCodecType.VIDEO_CODEC_H264) {
mime = H264_MIME_TYPE;
supportedCodecPrefixes = supportedH264HwCodecPrefixes;
} else {
throw new RuntimeException("initDecode: Non-supported codec " + type);
}
DecoderProperties properties = findDecoder(mime, supportedCodecPrefixes);
if (properties == null) {
throw new RuntimeException("Cannot find HW decoder for " + type);
}
Logging.d(TAG, "Java initDecode: " + type + " : " + width + " x " + height + ". Color: 0x"
+ Integer.toHexString(properties.colorFormat) + ". Use Surface: " + useSurface);
runningInstance = this; // Decoder is now running and can be queried for stack traces.
mediaCodecThread = Thread.currentThread();
try {
this.width = width;
this.height = height;
stride = width;
sliceHeight = height;
if (useSurface) {
textureListener = new TextureListener(surfaceTextureHelper);
surface = new Surface(surfaceTextureHelper.getSurfaceTexture());
}
MediaFormat format = MediaFormat.createVideoFormat(mime, width, height);
if (!useSurface) {
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat);
}
Logging.d(TAG, " Format: " + format);
mediaCodec = MediaCodecVideoEncoder.createByCodecName(properties.codecName);
if (mediaCodec == null) {
Logging.e(TAG, "Can not create media decoder");
return false;
}
mediaCodec.configure(format, surface, null, 0);
mediaCodec.start();
colorFormat = properties.colorFormat;
outputBuffers = mediaCodec.getOutputBuffers();
inputBuffers = mediaCodec.getInputBuffers();
decodeStartTimeMs.clear();
hasDecodedFirstFrame = false;
dequeuedSurfaceOutputBuffers.clear();
droppedFrames = 0;
Logging.d(TAG,
"Input buffers: " + inputBuffers.length + ". Output buffers: " + outputBuffers.length);
return true;
} catch (IllegalStateException e) {
Logging.e(TAG, "initDecode failed", e);
return false;
}
}
// Resets the decoder so it can start decoding frames with new resolution.
// Flushes MediaCodec and clears decoder output buffers.
private void reset(int width, int height) {
if (mediaCodecThread == null || mediaCodec == null) {
throw new RuntimeException("Incorrect reset call for non-initialized decoder.");
}
Logging.d(TAG, "Java reset: " + width + " x " + height);
mediaCodec.flush();
this.width = width;
this.height = height;
decodeStartTimeMs.clear();
dequeuedSurfaceOutputBuffers.clear();
hasDecodedFirstFrame = false;
droppedFrames = 0;
}
private void release() {
Logging.d(TAG, "Java releaseDecoder. Total number of dropped frames: " + droppedFrames);
checkOnMediaCodecThread();
// Run Mediacodec stop() and release() on separate thread since sometime
// Mediacodec.stop() may hang.
final CountDownLatch releaseDone = new CountDownLatch(1);
Runnable runMediaCodecRelease = new Runnable() {
@Override
public void run() {
try {
Logging.d(TAG, "Java releaseDecoder on release thread");
mediaCodec.stop();
mediaCodec.release();
Logging.d(TAG, "Java releaseDecoder on release thread done");
} catch (Exception e) {
Logging.e(TAG, "Media decoder release failed", e);
}
releaseDone.countDown();
}
};
new Thread(runMediaCodecRelease).start();
if (!ThreadUtils.awaitUninterruptibly(releaseDone, MEDIA_CODEC_RELEASE_TIMEOUT_MS)) {
Logging.e(TAG, "Media decoder release timeout");
codecErrors++;
if (errorCallback != null) {
Logging.e(TAG, "Invoke codec error callback. Errors: " + codecErrors);
errorCallback.onMediaCodecVideoDecoderCriticalError(codecErrors);
}
}
mediaCodec = null;
mediaCodecThread = null;
runningInstance = null;
if (useSurface) {
surface.release();
surface = null;
textureListener.release();
}
Logging.d(TAG, "Java releaseDecoder done");
}
// Dequeue an input buffer and return its index, -1 if no input buffer is
// available, or -2 if the codec is no longer operative.
private int dequeueInputBuffer() {
checkOnMediaCodecThread();
try {
return mediaCodec.dequeueInputBuffer(DEQUEUE_INPUT_TIMEOUT);
} catch (IllegalStateException e) {
Logging.e(TAG, "dequeueIntputBuffer failed", e);
return -2;
}
}
private boolean queueInputBuffer(int inputBufferIndex, int size, long presentationTimeStamUs,
long timeStampMs, long ntpTimeStamp) {
checkOnMediaCodecThread();
try {
inputBuffers[inputBufferIndex].position(0);
inputBuffers[inputBufferIndex].limit(size);
decodeStartTimeMs.add(
new TimeStamps(SystemClock.elapsedRealtime(), timeStampMs, ntpTimeStamp));
mediaCodec.queueInputBuffer(inputBufferIndex, 0, size, presentationTimeStamUs, 0);
return true;
} catch (IllegalStateException e) {
Logging.e(TAG, "decode failed", e);
return false;
}
}
private static class TimeStamps {
public TimeStamps(long decodeStartTimeMs, long timeStampMs, long ntpTimeStampMs) {
this.decodeStartTimeMs = decodeStartTimeMs;
this.timeStampMs = timeStampMs;
this.ntpTimeStampMs = ntpTimeStampMs;
}
// Time when this frame was queued for decoding.
private final long decodeStartTimeMs;
// Only used for bookkeeping in Java. Stores C++ inputImage._timeStamp value for input frame.
private final long timeStampMs;
// Only used for bookkeeping in Java. Stores C++ inputImage.ntp_time_ms_ value for input frame.
private final long ntpTimeStampMs;
}
// Helper struct for dequeueOutputBuffer() below.
private static class DecodedOutputBuffer {
public DecodedOutputBuffer(int index, int offset, int size, long presentationTimeStampMs,
long timeStampMs, long ntpTimeStampMs, long decodeTime, long endDecodeTime) {
this.index = index;
this.offset = offset;
this.size = size;
this.presentationTimeStampMs = presentationTimeStampMs;
this.timeStampMs = timeStampMs;
this.ntpTimeStampMs = ntpTimeStampMs;
this.decodeTimeMs = decodeTime;
this.endDecodeTimeMs = endDecodeTime;
}
private final int index;
private final int offset;
private final int size;
// Presentation timestamp returned in dequeueOutputBuffer call.
private final long presentationTimeStampMs;
// C++ inputImage._timeStamp value for output frame.
private final long timeStampMs;
// C++ inputImage.ntp_time_ms_ value for output frame.
private final long ntpTimeStampMs;
// Number of ms it took to decode this frame.
private final long decodeTimeMs;
// System time when this frame decoding finished.
private final long endDecodeTimeMs;
}
// Helper struct for dequeueTextureBuffer() below.
private static class DecodedTextureBuffer {
private final int textureID;
private final float[] transformMatrix;
// Presentation timestamp returned in dequeueOutputBuffer call.
private final long presentationTimeStampMs;
// C++ inputImage._timeStamp value for output frame.
private final long timeStampMs;
// C++ inputImage.ntp_time_ms_ value for output frame.
private final long ntpTimeStampMs;
// Number of ms it took to decode this frame.
private final long decodeTimeMs;
// Interval from when the frame finished decoding until this buffer has been created.
// Since there is only one texture, this interval depend on the time from when
// a frame is decoded and provided to C++ and until that frame is returned to the MediaCodec
// so that the texture can be updated with the next decoded frame.
private final long frameDelayMs;
// A DecodedTextureBuffer with zero |textureID| has special meaning and represents a frame
// that was dropped.
public DecodedTextureBuffer(int textureID, float[] transformMatrix,
long presentationTimeStampMs, long timeStampMs, long ntpTimeStampMs, long decodeTimeMs,
long frameDelay) {
this.textureID = textureID;
this.transformMatrix = transformMatrix;
this.presentationTimeStampMs = presentationTimeStampMs;
this.timeStampMs = timeStampMs;
this.ntpTimeStampMs = ntpTimeStampMs;
this.decodeTimeMs = decodeTimeMs;
this.frameDelayMs = frameDelay;
}
}
// Poll based texture listener.
private static class TextureListener
implements SurfaceTextureHelper.OnTextureFrameAvailableListener {
private final SurfaceTextureHelper surfaceTextureHelper;
// |newFrameLock| is used to synchronize arrival of new frames with wait()/notifyAll().
private final Object newFrameLock = new Object();
// |bufferToRender| is non-null when waiting for transition between addBufferToRender() to
// onTextureFrameAvailable().
private DecodedOutputBuffer bufferToRender;
private DecodedTextureBuffer renderedBuffer;
public TextureListener(SurfaceTextureHelper surfaceTextureHelper) {
this.surfaceTextureHelper = surfaceTextureHelper;
surfaceTextureHelper.startListening(this);
}
public void addBufferToRender(DecodedOutputBuffer buffer) {
if (bufferToRender != null) {
Logging.e(TAG, "Unexpected addBufferToRender() called while waiting for a texture.");
throw new IllegalStateException("Waiting for a texture.");
}
bufferToRender = buffer;
}
public boolean isWaitingForTexture() {
synchronized (newFrameLock) {
return bufferToRender != null;
}
}
// Callback from |surfaceTextureHelper|. May be called on an arbitrary thread.
@Override
public void onTextureFrameAvailable(
int oesTextureId, float[] transformMatrix, long timestampNs) {
synchronized (newFrameLock) {
if (renderedBuffer != null) {
Logging.e(
TAG, "Unexpected onTextureFrameAvailable() called while already holding a texture.");
throw new IllegalStateException("Already holding a texture.");
}
// |timestampNs| is always zero on some Android versions.
renderedBuffer = new DecodedTextureBuffer(oesTextureId, transformMatrix,
bufferToRender.presentationTimeStampMs, bufferToRender.timeStampMs,
bufferToRender.ntpTimeStampMs, bufferToRender.decodeTimeMs,
SystemClock.elapsedRealtime() - bufferToRender.endDecodeTimeMs);
bufferToRender = null;
newFrameLock.notifyAll();
}
}
// Dequeues and returns a DecodedTextureBuffer if available, or null otherwise.
public DecodedTextureBuffer dequeueTextureBuffer(int timeoutMs) {
synchronized (newFrameLock) {
if (renderedBuffer == null && timeoutMs > 0 && isWaitingForTexture()) {
try {
newFrameLock.wait(timeoutMs);
} catch (InterruptedException e) {
// Restore the interrupted status by reinterrupting the thread.
Thread.currentThread().interrupt();
}
}
DecodedTextureBuffer returnedBuffer = renderedBuffer;
renderedBuffer = null;
return returnedBuffer;
}
}
public void release() {
// SurfaceTextureHelper.stopListening() will block until any onTextureFrameAvailable() in
// progress is done. Therefore, the call must be outside any synchronized
// statement that is also used in the onTextureFrameAvailable() above to avoid deadlocks.
surfaceTextureHelper.stopListening();
synchronized (newFrameLock) {
if (renderedBuffer != null) {
surfaceTextureHelper.returnTextureFrame();
renderedBuffer = null;
}
}
}
}
// Returns null if no decoded buffer is available, and otherwise a DecodedByteBuffer.
// Throws IllegalStateException if call is made on the wrong thread, if color format changes to an
// unsupported format, or if |mediaCodec| is not in the Executing state. Throws CodecException
// upon codec error.
private DecodedOutputBuffer dequeueOutputBuffer(int dequeueTimeoutMs) {
checkOnMediaCodecThread();
if (decodeStartTimeMs.isEmpty()) {
return null;
}
// Drain the decoder until receiving a decoded buffer or hitting
// MediaCodec.INFO_TRY_AGAIN_LATER.
final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
while (true) {
final int result =
mediaCodec.dequeueOutputBuffer(info, TimeUnit.MILLISECONDS.toMicros(dequeueTimeoutMs));
switch (result) {
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
outputBuffers = mediaCodec.getOutputBuffers();
Logging.d(TAG, "Decoder output buffers changed: " + outputBuffers.length);
if (hasDecodedFirstFrame) {
throw new RuntimeException("Unexpected output buffer change event.");
}
break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
MediaFormat format = mediaCodec.getOutputFormat();
Logging.d(TAG, "Decoder format changed: " + format.toString());
final int newWidth;
final int newHeight;
if (format.containsKey(FORMAT_KEY_CROP_LEFT) && format.containsKey(FORMAT_KEY_CROP_RIGHT)
&& format.containsKey(FORMAT_KEY_CROP_BOTTOM)
&& format.containsKey(FORMAT_KEY_CROP_TOP)) {
newWidth = 1 + format.getInteger(FORMAT_KEY_CROP_RIGHT)
- format.getInteger(FORMAT_KEY_CROP_LEFT);
newHeight = 1 + format.getInteger(FORMAT_KEY_CROP_BOTTOM)
- format.getInteger(FORMAT_KEY_CROP_TOP);
} else {
newWidth = format.getInteger(MediaFormat.KEY_WIDTH);
newHeight = format.getInteger(MediaFormat.KEY_HEIGHT);
}
if (hasDecodedFirstFrame && (newWidth != width || newHeight != height)) {
throw new RuntimeException("Unexpected size change. Configured " + width + "*" + height
+ ". New " + newWidth + "*" + newHeight);
}
width = newWidth;
height = newHeight;
if (!useSurface && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) {
colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
Logging.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat));
if (!supportedColorList.contains(colorFormat)) {
throw new IllegalStateException("Non supported color format: " + colorFormat);
}
}
if (format.containsKey(FORMAT_KEY_STRIDE)) {
stride = format.getInteger(FORMAT_KEY_STRIDE);
}
if (format.containsKey(FORMAT_KEY_SLICE_HEIGHT)) {
sliceHeight = format.getInteger(FORMAT_KEY_SLICE_HEIGHT);
}
Logging.d(TAG, "Frame stride and slice height: " + stride + " x " + sliceHeight);
stride = Math.max(width, stride);
sliceHeight = Math.max(height, sliceHeight);
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
return null;
default:
hasDecodedFirstFrame = true;
TimeStamps timeStamps = decodeStartTimeMs.remove();
long decodeTimeMs = SystemClock.elapsedRealtime() - timeStamps.decodeStartTimeMs;
if (decodeTimeMs > MAX_DECODE_TIME_MS) {
Logging.e(TAG, "Very high decode time: " + decodeTimeMs + "ms"
+ ". Q size: " + decodeStartTimeMs.size()
+ ". Might be caused by resuming H264 decoding after a pause.");
decodeTimeMs = MAX_DECODE_TIME_MS;
}
return new DecodedOutputBuffer(result, info.offset, info.size,
TimeUnit.MICROSECONDS.toMillis(info.presentationTimeUs), timeStamps.timeStampMs,
timeStamps.ntpTimeStampMs, decodeTimeMs, SystemClock.elapsedRealtime());
}
}
}
// Returns null if no decoded buffer is available, and otherwise a DecodedTextureBuffer.
// Throws IllegalStateException if call is made on the wrong thread, if color format changes to an
// unsupported format, or if |mediaCodec| is not in the Executing state. Throws CodecException
// upon codec error. If |dequeueTimeoutMs| > 0, the oldest decoded frame will be dropped if
// a frame can't be returned.
private DecodedTextureBuffer dequeueTextureBuffer(int dequeueTimeoutMs) {
checkOnMediaCodecThread();
if (!useSurface) {
throw new IllegalStateException("dequeueTexture() called for byte buffer decoding.");
}
DecodedOutputBuffer outputBuffer = dequeueOutputBuffer(dequeueTimeoutMs);
if (outputBuffer != null) {
dequeuedSurfaceOutputBuffers.add(outputBuffer);
}
MaybeRenderDecodedTextureBuffer();
// Check if there is texture ready now by waiting max |dequeueTimeoutMs|.
DecodedTextureBuffer renderedBuffer = textureListener.dequeueTextureBuffer(dequeueTimeoutMs);
if (renderedBuffer != null) {
MaybeRenderDecodedTextureBuffer();
return renderedBuffer;
}
if ((dequeuedSurfaceOutputBuffers.size()
>= Math.min(MAX_QUEUED_OUTPUTBUFFERS, outputBuffers.length)
|| (dequeueTimeoutMs > 0 && !dequeuedSurfaceOutputBuffers.isEmpty()))) {
++droppedFrames;
// Drop the oldest frame still in dequeuedSurfaceOutputBuffers.
// The oldest frame is owned by |textureListener| and can't be dropped since
// mediaCodec.releaseOutputBuffer has already been called.
final DecodedOutputBuffer droppedFrame = dequeuedSurfaceOutputBuffers.remove();
if (dequeueTimeoutMs > 0) {
// TODO(perkj): Re-add the below log when VideoRenderGUI has been removed or fixed to
// return the one and only texture even if it does not render.
Logging.w(TAG, "Draining decoder. Dropping frame with TS: "
+ droppedFrame.presentationTimeStampMs + ". Total number of dropped frames: "
+ droppedFrames);
} else {
Logging.w(TAG, "Too many output buffers " + dequeuedSurfaceOutputBuffers.size()
+ ". Dropping frame with TS: " + droppedFrame.presentationTimeStampMs
+ ". Total number of dropped frames: " + droppedFrames);
}
mediaCodec.releaseOutputBuffer(droppedFrame.index, false /* render */);
return new DecodedTextureBuffer(0, null, droppedFrame.presentationTimeStampMs,
droppedFrame.timeStampMs, droppedFrame.ntpTimeStampMs, droppedFrame.decodeTimeMs,
SystemClock.elapsedRealtime() - droppedFrame.endDecodeTimeMs);
}
return null;
}
private void MaybeRenderDecodedTextureBuffer() {
if (dequeuedSurfaceOutputBuffers.isEmpty() || textureListener.isWaitingForTexture()) {
return;
}
// Get the first frame in the queue and render to the decoder output surface.
final DecodedOutputBuffer buffer = dequeuedSurfaceOutputBuffers.remove();
textureListener.addBufferToRender(buffer);
mediaCodec.releaseOutputBuffer(buffer.index, true /* render */);
}
// Release a dequeued output byte buffer back to the codec for re-use. Should only be called for
// non-surface decoding.
// Throws IllegalStateException if the call is made on the wrong thread, if codec is configured
// for surface decoding, or if |mediaCodec| is not in the Executing state. Throws
// MediaCodec.CodecException upon codec error.
private void returnDecodedOutputBuffer(int index)
throws IllegalStateException, MediaCodec.CodecException {
checkOnMediaCodecThread();
if (useSurface) {
throw new IllegalStateException("returnDecodedOutputBuffer() called for surface decoding.");
}
mediaCodec.releaseOutputBuffer(index, false /* render */);
}
}

View File

@ -0,0 +1,927 @@
/*
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.annotation.TargetApi;
import android.graphics.Matrix;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecInfo.CodecCapabilities;
import android.media.MediaCodecList;
import android.media.MediaFormat;
import android.opengl.GLES20;
import android.os.Build;
import android.os.Bundle;
import android.view.Surface;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
// Java-side of peerconnection_jni.cc:MediaCodecVideoEncoder.
// This class is an implementation detail of the Java PeerConnection API.
@TargetApi(19)
@SuppressWarnings("deprecation")
public class MediaCodecVideoEncoder {
// This class is constructed, operated, and destroyed by its C++ incarnation,
// so the class and its methods have non-public visibility. The API this
// class exposes aims to mimic the webrtc::VideoEncoder API as closely as
// possibly to minimize the amount of translation work necessary.
private static final String TAG = "MediaCodecVideoEncoder";
// Tracks webrtc::VideoCodecType.
public enum VideoCodecType { VIDEO_CODEC_VP8, VIDEO_CODEC_VP9, VIDEO_CODEC_H264 }
private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000; // Timeout for codec releasing.
private static final int DEQUEUE_TIMEOUT = 0; // Non-blocking, no wait.
private static final int BITRATE_ADJUSTMENT_FPS = 30;
private static final int MAXIMUM_INITIAL_FPS = 30;
private static final double BITRATE_CORRECTION_SEC = 3.0;
// Maximum bitrate correction scale - no more than 4 times.
private static final double BITRATE_CORRECTION_MAX_SCALE = 4;
// Amount of correction steps to reach correction maximum scale.
private static final int BITRATE_CORRECTION_STEPS = 20;
// Forced key frame interval - used to reduce color distortions on Qualcomm platform.
private static final long QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_L_MS = 15000;
private static final long QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_M_MS = 20000;
private static final long QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_N_MS = 15000;
// Active running encoder instance. Set in initEncode() (called from native code)
// and reset to null in release() call.
private static MediaCodecVideoEncoder runningInstance = null;
private static MediaCodecVideoEncoderErrorCallback errorCallback = null;
private static int codecErrors = 0;
// List of disabled codec types - can be set from application.
private static Set<String> hwEncoderDisabledTypes = new HashSet<String>();
private Thread mediaCodecThread;
private MediaCodec mediaCodec;
private ByteBuffer[] outputBuffers;
private EglBase14 eglBase;
private int profile;
private int width;
private int height;
private Surface inputSurface;
private GlRectDrawer drawer;
private static final String VP8_MIME_TYPE = "video/x-vnd.on2.vp8";
private static final String VP9_MIME_TYPE = "video/x-vnd.on2.vp9";
private static final String H264_MIME_TYPE = "video/avc";
private static final int VIDEO_AVCProfileHigh = 8;
private static final int VIDEO_AVCLevel3 = 0x100;
// Type of bitrate adjustment for video encoder.
public enum BitrateAdjustmentType {
// No adjustment - video encoder has no known bitrate problem.
NO_ADJUSTMENT,
// Framerate based bitrate adjustment is required - HW encoder does not use frame
// timestamps to calculate frame bitrate budget and instead is relying on initial
// fps configuration assuming that all frames are coming at fixed initial frame rate.
FRAMERATE_ADJUSTMENT,
// Dynamic bitrate adjustment is required - HW encoder used frame timestamps, but actual
// bitrate deviates too much from the target value.
DYNAMIC_ADJUSTMENT
}
// Should be in sync with webrtc::H264::Profile.
public static enum H264Profile {
CONSTRAINED_BASELINE(0),
BASELINE(1),
MAIN(2),
CONSTRAINED_HIGH(3),
HIGH(4);
private final int value;
H264Profile(int value) {
this.value = value;
}
public int getValue() {
return value;
}
}
// Class describing supported media codec properties.
private static class MediaCodecProperties {
public final String codecPrefix;
// Minimum Android SDK required for this codec to be used.
public final int minSdk;
// Flag if encoder implementation does not use frame timestamps to calculate frame bitrate
// budget and instead is relying on initial fps configuration assuming that all frames are
// coming at fixed initial frame rate. Bitrate adjustment is required for this case.
public final BitrateAdjustmentType bitrateAdjustmentType;
MediaCodecProperties(
String codecPrefix, int minSdk, BitrateAdjustmentType bitrateAdjustmentType) {
this.codecPrefix = codecPrefix;
this.minSdk = minSdk;
this.bitrateAdjustmentType = bitrateAdjustmentType;
}
}
// List of supported HW VP8 encoders.
private static final MediaCodecProperties qcomVp8HwProperties = new MediaCodecProperties(
"OMX.qcom.", Build.VERSION_CODES.KITKAT, BitrateAdjustmentType.NO_ADJUSTMENT);
private static final MediaCodecProperties exynosVp8HwProperties = new MediaCodecProperties(
"OMX.Exynos.", Build.VERSION_CODES.M, BitrateAdjustmentType.DYNAMIC_ADJUSTMENT);
private static final MediaCodecProperties intelVp8HwProperties = new MediaCodecProperties(
"OMX.Intel.", Build.VERSION_CODES.LOLLIPOP, BitrateAdjustmentType.NO_ADJUSTMENT);
private static MediaCodecProperties[] vp8HwList() {
final ArrayList<MediaCodecProperties> supported_codecs = new ArrayList<MediaCodecProperties>();
supported_codecs.add(qcomVp8HwProperties);
supported_codecs.add(exynosVp8HwProperties);
if (PeerConnectionFactory.fieldTrialsFindFullName("WebRTC-IntelVP8").equals("Enabled")) {
supported_codecs.add(intelVp8HwProperties);
}
return supported_codecs.toArray(new MediaCodecProperties[supported_codecs.size()]);
}
// List of supported HW VP9 encoders.
private static final MediaCodecProperties qcomVp9HwProperties = new MediaCodecProperties(
"OMX.qcom.", Build.VERSION_CODES.N, BitrateAdjustmentType.NO_ADJUSTMENT);
private static final MediaCodecProperties exynosVp9HwProperties = new MediaCodecProperties(
"OMX.Exynos.", Build.VERSION_CODES.N, BitrateAdjustmentType.FRAMERATE_ADJUSTMENT);
private static final MediaCodecProperties[] vp9HwList =
new MediaCodecProperties[] {qcomVp9HwProperties, exynosVp9HwProperties};
// List of supported HW H.264 encoders.
private static final MediaCodecProperties qcomH264HwProperties = new MediaCodecProperties(
"OMX.qcom.", Build.VERSION_CODES.KITKAT, BitrateAdjustmentType.NO_ADJUSTMENT);
private static final MediaCodecProperties exynosH264HwProperties = new MediaCodecProperties(
"OMX.Exynos.", Build.VERSION_CODES.LOLLIPOP, BitrateAdjustmentType.FRAMERATE_ADJUSTMENT);
private static final MediaCodecProperties[] h264HwList =
new MediaCodecProperties[] {qcomH264HwProperties, exynosH264HwProperties};
// List of supported HW H.264 high profile encoders.
private static final MediaCodecProperties exynosH264HighProfileHwProperties =
new MediaCodecProperties(
"OMX.Exynos.", Build.VERSION_CODES.M, BitrateAdjustmentType.FRAMERATE_ADJUSTMENT);
private static final MediaCodecProperties[] h264HighProfileHwList =
new MediaCodecProperties[] {exynosH264HighProfileHwProperties};
// List of devices with poor H.264 encoder quality.
// HW H.264 encoder on below devices has poor bitrate control - actual
// bitrates deviates a lot from the target value.
private static final String[] H264_HW_EXCEPTION_MODELS =
new String[] {"SAMSUNG-SGH-I337", "Nexus 7", "Nexus 4"};
// Bitrate modes - should be in sync with OMX_VIDEO_CONTROLRATETYPE defined
// in OMX_Video.h
private static final int VIDEO_ControlRateConstant = 2;
// NV12 color format supported by QCOM codec, but not declared in MediaCodec -
// see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h
private static final int COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04;
// Allowable color formats supported by codec - in order of preference.
private static final int[] supportedColorList = {CodecCapabilities.COLOR_FormatYUV420Planar,
CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m};
private static final int[] supportedSurfaceColorList = {CodecCapabilities.COLOR_FormatSurface};
private VideoCodecType type;
private int colorFormat; // Used by native code.
// Variables used for dynamic bitrate adjustment.
private BitrateAdjustmentType bitrateAdjustmentType = BitrateAdjustmentType.NO_ADJUSTMENT;
private double bitrateAccumulator;
private double bitrateAccumulatorMax;
private double bitrateObservationTimeMs;
private int bitrateAdjustmentScaleExp;
private int targetBitrateBps;
private int targetFps;
// Interval in ms to force key frame generation. Used to reduce the time of color distortions
// happened sometime when using Qualcomm video encoder.
private long forcedKeyFrameMs;
private long lastKeyFrameMs;
// SPS and PPS NALs (Config frame) for H.264.
private ByteBuffer configData = null;
// MediaCodec error handler - invoked when critical error happens which may prevent
// further use of media codec API. Now it means that one of media codec instances
// is hanging and can no longer be used in the next call.
public static interface MediaCodecVideoEncoderErrorCallback {
void onMediaCodecVideoEncoderCriticalError(int codecErrors);
}
public static void setErrorCallback(MediaCodecVideoEncoderErrorCallback errorCallback) {
Logging.d(TAG, "Set error callback");
MediaCodecVideoEncoder.errorCallback = errorCallback;
}
// Functions to disable HW encoding - can be called from applications for platforms
// which have known HW decoding problems.
public static void disableVp8HwCodec() {
Logging.w(TAG, "VP8 encoding is disabled by application.");
hwEncoderDisabledTypes.add(VP8_MIME_TYPE);
}
public static void disableVp9HwCodec() {
Logging.w(TAG, "VP9 encoding is disabled by application.");
hwEncoderDisabledTypes.add(VP9_MIME_TYPE);
}
public static void disableH264HwCodec() {
Logging.w(TAG, "H.264 encoding is disabled by application.");
hwEncoderDisabledTypes.add(H264_MIME_TYPE);
}
// Functions to query if HW encoding is supported.
public static boolean isVp8HwSupported() {
return !hwEncoderDisabledTypes.contains(VP8_MIME_TYPE)
&& (findHwEncoder(VP8_MIME_TYPE, vp8HwList(), supportedColorList) != null);
}
public static EncoderProperties vp8HwEncoderProperties() {
if (hwEncoderDisabledTypes.contains(VP8_MIME_TYPE)) {
return null;
} else {
return findHwEncoder(VP8_MIME_TYPE, vp8HwList(), supportedColorList);
}
}
public static boolean isVp9HwSupported() {
return !hwEncoderDisabledTypes.contains(VP9_MIME_TYPE)
&& (findHwEncoder(VP9_MIME_TYPE, vp9HwList, supportedColorList) != null);
}
public static boolean isH264HwSupported() {
return !hwEncoderDisabledTypes.contains(H264_MIME_TYPE)
&& (findHwEncoder(H264_MIME_TYPE, h264HwList, supportedColorList) != null);
}
public static boolean isH264HighProfileHwSupported() {
return !hwEncoderDisabledTypes.contains(H264_MIME_TYPE)
&& (findHwEncoder(H264_MIME_TYPE, h264HighProfileHwList, supportedColorList) != null);
}
public static boolean isVp8HwSupportedUsingTextures() {
return !hwEncoderDisabledTypes.contains(VP8_MIME_TYPE)
&& (findHwEncoder(VP8_MIME_TYPE, vp8HwList(), supportedSurfaceColorList) != null);
}
public static boolean isVp9HwSupportedUsingTextures() {
return !hwEncoderDisabledTypes.contains(VP9_MIME_TYPE)
&& (findHwEncoder(VP9_MIME_TYPE, vp9HwList, supportedSurfaceColorList) != null);
}
public static boolean isH264HwSupportedUsingTextures() {
return !hwEncoderDisabledTypes.contains(H264_MIME_TYPE)
&& (findHwEncoder(H264_MIME_TYPE, h264HwList, supportedSurfaceColorList) != null);
}
// Helper struct for findHwEncoder() below.
public static class EncoderProperties {
public EncoderProperties(
String codecName, int colorFormat, BitrateAdjustmentType bitrateAdjustmentType) {
this.codecName = codecName;
this.colorFormat = colorFormat;
this.bitrateAdjustmentType = bitrateAdjustmentType;
}
public final String codecName; // OpenMax component name for HW codec.
public final int colorFormat; // Color format supported by codec.
public final BitrateAdjustmentType bitrateAdjustmentType; // Bitrate adjustment type
}
private static EncoderProperties findHwEncoder(
String mime, MediaCodecProperties[] supportedHwCodecProperties, int[] colorList) {
// MediaCodec.setParameters is missing for JB and below, so bitrate
// can not be adjusted dynamically.
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
return null;
}
// Check if device is in H.264 exception list.
if (mime.equals(H264_MIME_TYPE)) {
List<String> exceptionModels = Arrays.asList(H264_HW_EXCEPTION_MODELS);
if (exceptionModels.contains(Build.MODEL)) {
Logging.w(TAG, "Model: " + Build.MODEL + " has black listed H.264 encoder.");
return null;
}
}
for (int i = 0; i < MediaCodecList.getCodecCount(); ++i) {
MediaCodecInfo info = null;
try {
info = MediaCodecList.getCodecInfoAt(i);
} catch (IllegalArgumentException e) {
Logging.e(TAG, "Cannot retrieve encoder codec info", e);
}
if (info == null || !info.isEncoder()) {
continue;
}
String name = null;
for (String mimeType : info.getSupportedTypes()) {
if (mimeType.equals(mime)) {
name = info.getName();
break;
}
}
if (name == null) {
continue; // No HW support in this codec; try the next one.
}
Logging.v(TAG, "Found candidate encoder " + name);
// Check if this is supported HW encoder.
boolean supportedCodec = false;
BitrateAdjustmentType bitrateAdjustmentType = BitrateAdjustmentType.NO_ADJUSTMENT;
for (MediaCodecProperties codecProperties : supportedHwCodecProperties) {
if (name.startsWith(codecProperties.codecPrefix)) {
if (Build.VERSION.SDK_INT < codecProperties.minSdk) {
Logging.w(
TAG, "Codec " + name + " is disabled due to SDK version " + Build.VERSION.SDK_INT);
continue;
}
if (codecProperties.bitrateAdjustmentType != BitrateAdjustmentType.NO_ADJUSTMENT) {
bitrateAdjustmentType = codecProperties.bitrateAdjustmentType;
Logging.w(
TAG, "Codec " + name + " requires bitrate adjustment: " + bitrateAdjustmentType);
}
supportedCodec = true;
break;
}
}
if (!supportedCodec) {
continue;
}
// Check if HW codec supports known color format.
CodecCapabilities capabilities;
try {
capabilities = info.getCapabilitiesForType(mime);
} catch (IllegalArgumentException e) {
Logging.e(TAG, "Cannot retrieve encoder capabilities", e);
continue;
}
for (int colorFormat : capabilities.colorFormats) {
Logging.v(TAG, " Color: 0x" + Integer.toHexString(colorFormat));
}
for (int supportedColorFormat : colorList) {
for (int codecColorFormat : capabilities.colorFormats) {
if (codecColorFormat == supportedColorFormat) {
// Found supported HW encoder.
Logging.d(TAG, "Found target encoder for mime " + mime + " : " + name + ". Color: 0x"
+ Integer.toHexString(codecColorFormat) + ". Bitrate adjustment: "
+ bitrateAdjustmentType);
return new EncoderProperties(name, codecColorFormat, bitrateAdjustmentType);
}
}
}
}
return null; // No HW encoder.
}
private void checkOnMediaCodecThread() {
if (mediaCodecThread.getId() != Thread.currentThread().getId()) {
throw new RuntimeException("MediaCodecVideoEncoder previously operated on " + mediaCodecThread
+ " but is now called on " + Thread.currentThread());
}
}
public static void printStackTrace() {
if (runningInstance != null && runningInstance.mediaCodecThread != null) {
StackTraceElement[] mediaCodecStackTraces = runningInstance.mediaCodecThread.getStackTrace();
if (mediaCodecStackTraces.length > 0) {
Logging.d(TAG, "MediaCodecVideoEncoder stacks trace:");
for (StackTraceElement stackTrace : mediaCodecStackTraces) {
Logging.d(TAG, stackTrace.toString());
}
}
}
}
static MediaCodec createByCodecName(String codecName) {
try {
// In the L-SDK this call can throw IOException so in order to work in
// both cases catch an exception.
return MediaCodec.createByCodecName(codecName);
} catch (Exception e) {
return null;
}
}
boolean initEncode(VideoCodecType type, int profile, int width, int height, int kbps, int fps,
EglBase14.Context sharedContext) {
final boolean useSurface = sharedContext != null;
Logging.d(TAG,
"Java initEncode: " + type + ". Profile: " + profile + " : " + width + " x " + height
+ ". @ " + kbps + " kbps. Fps: " + fps + ". Encode from texture : " + useSurface);
this.profile = profile;
this.width = width;
this.height = height;
if (mediaCodecThread != null) {
throw new RuntimeException("Forgot to release()?");
}
EncoderProperties properties = null;
String mime = null;
int keyFrameIntervalSec = 0;
boolean configureH264HighProfile = false;
if (type == VideoCodecType.VIDEO_CODEC_VP8) {
mime = VP8_MIME_TYPE;
properties = findHwEncoder(
VP8_MIME_TYPE, vp8HwList(), useSurface ? supportedSurfaceColorList : supportedColorList);
keyFrameIntervalSec = 100;
} else if (type == VideoCodecType.VIDEO_CODEC_VP9) {
mime = VP9_MIME_TYPE;
properties = findHwEncoder(
VP9_MIME_TYPE, vp9HwList, useSurface ? supportedSurfaceColorList : supportedColorList);
keyFrameIntervalSec = 100;
} else if (type == VideoCodecType.VIDEO_CODEC_H264) {
mime = H264_MIME_TYPE;
properties = findHwEncoder(
H264_MIME_TYPE, h264HwList, useSurface ? supportedSurfaceColorList : supportedColorList);
if (profile == H264Profile.CONSTRAINED_HIGH.getValue()) {
EncoderProperties h264HighProfileProperties = findHwEncoder(H264_MIME_TYPE,
h264HighProfileHwList, useSurface ? supportedSurfaceColorList : supportedColorList);
if (h264HighProfileProperties != null) {
Logging.d(TAG, "High profile H.264 encoder supported.");
configureH264HighProfile = true;
} else {
Logging.d(TAG, "High profile H.264 encoder requested, but not supported. Use baseline.");
}
}
keyFrameIntervalSec = 20;
}
if (properties == null) {
throw new RuntimeException("Can not find HW encoder for " + type);
}
runningInstance = this; // Encoder is now running and can be queried for stack traces.
colorFormat = properties.colorFormat;
bitrateAdjustmentType = properties.bitrateAdjustmentType;
if (bitrateAdjustmentType == BitrateAdjustmentType.FRAMERATE_ADJUSTMENT) {
fps = BITRATE_ADJUSTMENT_FPS;
} else {
fps = Math.min(fps, MAXIMUM_INITIAL_FPS);
}
forcedKeyFrameMs = 0;
lastKeyFrameMs = -1;
if (type == VideoCodecType.VIDEO_CODEC_VP8
&& properties.codecName.startsWith(qcomVp8HwProperties.codecPrefix)) {
if (Build.VERSION.SDK_INT == Build.VERSION_CODES.LOLLIPOP
|| Build.VERSION.SDK_INT == Build.VERSION_CODES.LOLLIPOP_MR1) {
forcedKeyFrameMs = QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_L_MS;
} else if (Build.VERSION.SDK_INT == Build.VERSION_CODES.M) {
forcedKeyFrameMs = QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_M_MS;
} else if (Build.VERSION.SDK_INT > Build.VERSION_CODES.M) {
forcedKeyFrameMs = QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_N_MS;
}
}
Logging.d(TAG, "Color format: " + colorFormat + ". Bitrate adjustment: " + bitrateAdjustmentType
+ ". Key frame interval: " + forcedKeyFrameMs + " . Initial fps: " + fps);
targetBitrateBps = 1000 * kbps;
targetFps = fps;
bitrateAccumulatorMax = targetBitrateBps / 8.0;
bitrateAccumulator = 0;
bitrateObservationTimeMs = 0;
bitrateAdjustmentScaleExp = 0;
mediaCodecThread = Thread.currentThread();
try {
MediaFormat format = MediaFormat.createVideoFormat(mime, width, height);
format.setInteger(MediaFormat.KEY_BIT_RATE, targetBitrateBps);
format.setInteger("bitrate-mode", VIDEO_ControlRateConstant);
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat);
format.setInteger(MediaFormat.KEY_FRAME_RATE, targetFps);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, keyFrameIntervalSec);
if (configureH264HighProfile) {
format.setInteger("profile", VIDEO_AVCProfileHigh);
format.setInteger("level", VIDEO_AVCLevel3);
}
Logging.d(TAG, " Format: " + format);
mediaCodec = createByCodecName(properties.codecName);
this.type = type;
if (mediaCodec == null) {
Logging.e(TAG, "Can not create media encoder");
release();
return false;
}
mediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
if (useSurface) {
eglBase = new EglBase14(sharedContext, EglBase.CONFIG_RECORDABLE);
// Create an input surface and keep a reference since we must release the surface when done.
inputSurface = mediaCodec.createInputSurface();
eglBase.createSurface(inputSurface);
drawer = new GlRectDrawer();
}
mediaCodec.start();
outputBuffers = mediaCodec.getOutputBuffers();
Logging.d(TAG, "Output buffers: " + outputBuffers.length);
} catch (IllegalStateException e) {
Logging.e(TAG, "initEncode failed", e);
release();
return false;
}
return true;
}
ByteBuffer[] getInputBuffers() {
ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
Logging.d(TAG, "Input buffers: " + inputBuffers.length);
return inputBuffers;
}
void checkKeyFrameRequired(boolean requestedKeyFrame, long presentationTimestampUs) {
long presentationTimestampMs = (presentationTimestampUs + 500) / 1000;
if (lastKeyFrameMs < 0) {
lastKeyFrameMs = presentationTimestampMs;
}
boolean forcedKeyFrame = false;
if (!requestedKeyFrame && forcedKeyFrameMs > 0
&& presentationTimestampMs > lastKeyFrameMs + forcedKeyFrameMs) {
forcedKeyFrame = true;
}
if (requestedKeyFrame || forcedKeyFrame) {
// Ideally MediaCodec would honor BUFFER_FLAG_SYNC_FRAME so we could
// indicate this in queueInputBuffer() below and guarantee _this_ frame
// be encoded as a key frame, but sadly that flag is ignored. Instead,
// we request a key frame "soon".
if (requestedKeyFrame) {
Logging.d(TAG, "Sync frame request");
} else {
Logging.d(TAG, "Sync frame forced");
}
Bundle b = new Bundle();
b.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
mediaCodec.setParameters(b);
lastKeyFrameMs = presentationTimestampMs;
}
}
boolean encodeBuffer(
boolean isKeyframe, int inputBuffer, int size, long presentationTimestampUs) {
checkOnMediaCodecThread();
try {
checkKeyFrameRequired(isKeyframe, presentationTimestampUs);
mediaCodec.queueInputBuffer(inputBuffer, 0, size, presentationTimestampUs, 0);
return true;
} catch (IllegalStateException e) {
Logging.e(TAG, "encodeBuffer failed", e);
return false;
}
}
boolean encodeTexture(boolean isKeyframe, int oesTextureId, float[] transformationMatrix,
long presentationTimestampUs) {
checkOnMediaCodecThread();
try {
checkKeyFrameRequired(isKeyframe, presentationTimestampUs);
eglBase.makeCurrent();
// TODO(perkj): glClear() shouldn't be necessary since every pixel is covered anyway,
// but it's a workaround for bug webrtc:5147.
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
drawer.drawOes(oesTextureId, transformationMatrix, width, height, 0, 0, width, height);
eglBase.swapBuffers(TimeUnit.MICROSECONDS.toNanos(presentationTimestampUs));
return true;
} catch (RuntimeException e) {
Logging.e(TAG, "encodeTexture failed", e);
return false;
}
}
/**
* Encodes a new style VideoFrame. Called by JNI. |bufferIndex| is -1 if we are not encoding in
* surface mode.
*/
boolean encodeFrame(long nativeEncoder, boolean isKeyframe, VideoFrame frame, int bufferIndex) {
checkOnMediaCodecThread();
try {
long presentationTimestampUs = TimeUnit.NANOSECONDS.toMicros(frame.getTimestampNs());
checkKeyFrameRequired(isKeyframe, presentationTimestampUs);
VideoFrame.Buffer buffer = frame.getBuffer();
if (buffer instanceof VideoFrame.TextureBuffer) {
VideoFrame.TextureBuffer textureBuffer = (VideoFrame.TextureBuffer) buffer;
eglBase.makeCurrent();
// TODO(perkj): glClear() shouldn't be necessary since every pixel is covered anyway,
// but it's a workaround for bug webrtc:5147.
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
VideoFrameDrawer.drawTexture(drawer, textureBuffer, new Matrix() /* renderMatrix */, width,
height, 0 /* viewportX */, 0 /* viewportY */, width, height);
eglBase.swapBuffers(frame.getTimestampNs());
} else {
VideoFrame.I420Buffer i420Buffer = buffer.toI420();
nativeFillBuffer(nativeEncoder, bufferIndex, i420Buffer.getDataY(), i420Buffer.getStrideY(),
i420Buffer.getDataU(), i420Buffer.getStrideU(), i420Buffer.getDataV(),
i420Buffer.getStrideV());
i420Buffer.release();
// I420 consists of one full-resolution and two half-resolution planes.
// 1 + 1 / 4 + 1 / 4 = 3 / 2
int yuvSize = width * height * 3 / 2;
mediaCodec.queueInputBuffer(bufferIndex, 0, yuvSize, presentationTimestampUs, 0);
}
return true;
} catch (RuntimeException e) {
Logging.e(TAG, "encodeFrame failed", e);
return false;
}
}
void release() {
Logging.d(TAG, "Java releaseEncoder");
checkOnMediaCodecThread();
class CaughtException {
Exception e;
}
final CaughtException caughtException = new CaughtException();
boolean stopHung = false;
if (mediaCodec != null) {
// Run Mediacodec stop() and release() on separate thread since sometime
// Mediacodec.stop() may hang.
final CountDownLatch releaseDone = new CountDownLatch(1);
Runnable runMediaCodecRelease = new Runnable() {
@Override
public void run() {
Logging.d(TAG, "Java releaseEncoder on release thread");
try {
mediaCodec.stop();
} catch (Exception e) {
Logging.e(TAG, "Media encoder stop failed", e);
}
try {
mediaCodec.release();
} catch (Exception e) {
Logging.e(TAG, "Media encoder release failed", e);
caughtException.e = e;
}
Logging.d(TAG, "Java releaseEncoder on release thread done");
releaseDone.countDown();
}
};
new Thread(runMediaCodecRelease).start();
if (!ThreadUtils.awaitUninterruptibly(releaseDone, MEDIA_CODEC_RELEASE_TIMEOUT_MS)) {
Logging.e(TAG, "Media encoder release timeout");
stopHung = true;
}
mediaCodec = null;
}
mediaCodecThread = null;
if (drawer != null) {
drawer.release();
drawer = null;
}
if (eglBase != null) {
eglBase.release();
eglBase = null;
}
if (inputSurface != null) {
inputSurface.release();
inputSurface = null;
}
runningInstance = null;
if (stopHung) {
codecErrors++;
if (errorCallback != null) {
Logging.e(TAG, "Invoke codec error callback. Errors: " + codecErrors);
errorCallback.onMediaCodecVideoEncoderCriticalError(codecErrors);
}
throw new RuntimeException("Media encoder release timeout.");
}
// Re-throw any runtime exception caught inside the other thread. Since this is an invoke, add
// stack trace for the waiting thread as well.
if (caughtException.e != null) {
final RuntimeException runtimeException = new RuntimeException(caughtException.e);
runtimeException.setStackTrace(ThreadUtils.concatStackTraces(
caughtException.e.getStackTrace(), runtimeException.getStackTrace()));
throw runtimeException;
}
Logging.d(TAG, "Java releaseEncoder done");
}
private boolean setRates(int kbps, int frameRate) {
checkOnMediaCodecThread();
int codecBitrateBps = 1000 * kbps;
if (bitrateAdjustmentType == BitrateAdjustmentType.DYNAMIC_ADJUSTMENT) {
bitrateAccumulatorMax = codecBitrateBps / 8.0;
if (targetBitrateBps > 0 && codecBitrateBps < targetBitrateBps) {
// Rescale the accumulator level if the accumulator max decreases
bitrateAccumulator = bitrateAccumulator * codecBitrateBps / targetBitrateBps;
}
}
targetBitrateBps = codecBitrateBps;
targetFps = frameRate;
// Adjust actual encoder bitrate based on bitrate adjustment type.
if (bitrateAdjustmentType == BitrateAdjustmentType.FRAMERATE_ADJUSTMENT && targetFps > 0) {
codecBitrateBps = BITRATE_ADJUSTMENT_FPS * targetBitrateBps / targetFps;
Logging.v(TAG,
"setRates: " + kbps + " -> " + (codecBitrateBps / 1000) + " kbps. Fps: " + targetFps);
} else if (bitrateAdjustmentType == BitrateAdjustmentType.DYNAMIC_ADJUSTMENT) {
Logging.v(TAG, "setRates: " + kbps + " kbps. Fps: " + targetFps + ". ExpScale: "
+ bitrateAdjustmentScaleExp);
if (bitrateAdjustmentScaleExp != 0) {
codecBitrateBps = (int) (codecBitrateBps * getBitrateScale(bitrateAdjustmentScaleExp));
}
} else {
Logging.v(TAG, "setRates: " + kbps + " kbps. Fps: " + targetFps);
}
try {
Bundle params = new Bundle();
params.putInt(MediaCodec.PARAMETER_KEY_VIDEO_BITRATE, codecBitrateBps);
mediaCodec.setParameters(params);
return true;
} catch (IllegalStateException e) {
Logging.e(TAG, "setRates failed", e);
return false;
}
}
// Dequeue an input buffer and return its index, -1 if no input buffer is
// available, or -2 if the codec is no longer operative.
int dequeueInputBuffer() {
checkOnMediaCodecThread();
try {
return mediaCodec.dequeueInputBuffer(DEQUEUE_TIMEOUT);
} catch (IllegalStateException e) {
Logging.e(TAG, "dequeueIntputBuffer failed", e);
return -2;
}
}
// Helper struct for dequeueOutputBuffer() below.
static class OutputBufferInfo {
public OutputBufferInfo(
int index, ByteBuffer buffer, boolean isKeyFrame, long presentationTimestampUs) {
this.index = index;
this.buffer = buffer;
this.isKeyFrame = isKeyFrame;
this.presentationTimestampUs = presentationTimestampUs;
}
public final int index;
public final ByteBuffer buffer;
public final boolean isKeyFrame;
public final long presentationTimestampUs;
}
// Dequeue and return an output buffer, or null if no output is ready. Return
// a fake OutputBufferInfo with index -1 if the codec is no longer operable.
OutputBufferInfo dequeueOutputBuffer() {
checkOnMediaCodecThread();
try {
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
int result = mediaCodec.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT);
// Check if this is config frame and save configuration data.
if (result >= 0) {
boolean isConfigFrame = (info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0;
if (isConfigFrame) {
Logging.d(TAG, "Config frame generated. Offset: " + info.offset + ". Size: " + info.size);
configData = ByteBuffer.allocateDirect(info.size);
outputBuffers[result].position(info.offset);
outputBuffers[result].limit(info.offset + info.size);
configData.put(outputBuffers[result]);
// Log few SPS header bytes to check profile and level.
String spsData = "";
for (int i = 0; i < (info.size < 8 ? info.size : 8); i++) {
spsData += Integer.toHexString(configData.get(i) & 0xff) + " ";
}
Logging.d(TAG, spsData);
// Release buffer back.
mediaCodec.releaseOutputBuffer(result, false);
// Query next output.
result = mediaCodec.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT);
}
}
if (result >= 0) {
// MediaCodec doesn't care about Buffer position/remaining/etc so we can
// mess with them to get a slice and avoid having to pass extra
// (BufferInfo-related) parameters back to C++.
ByteBuffer outputBuffer = outputBuffers[result].duplicate();
outputBuffer.position(info.offset);
outputBuffer.limit(info.offset + info.size);
reportEncodedFrame(info.size);
// Check key frame flag.
boolean isKeyFrame = (info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
if (isKeyFrame) {
Logging.d(TAG, "Sync frame generated");
}
if (isKeyFrame && type == VideoCodecType.VIDEO_CODEC_H264) {
Logging.d(TAG, "Appending config frame of size " + configData.capacity()
+ " to output buffer with offset " + info.offset + ", size " + info.size);
// For H.264 key frame append SPS and PPS NALs at the start
ByteBuffer keyFrameBuffer = ByteBuffer.allocateDirect(configData.capacity() + info.size);
configData.rewind();
keyFrameBuffer.put(configData);
keyFrameBuffer.put(outputBuffer);
keyFrameBuffer.position(0);
return new OutputBufferInfo(result, keyFrameBuffer, isKeyFrame, info.presentationTimeUs);
} else {
return new OutputBufferInfo(
result, outputBuffer.slice(), isKeyFrame, info.presentationTimeUs);
}
} else if (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
outputBuffers = mediaCodec.getOutputBuffers();
return dequeueOutputBuffer();
} else if (result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
return dequeueOutputBuffer();
} else if (result == MediaCodec.INFO_TRY_AGAIN_LATER) {
return null;
}
throw new RuntimeException("dequeueOutputBuffer: " + result);
} catch (IllegalStateException e) {
Logging.e(TAG, "dequeueOutputBuffer failed", e);
return new OutputBufferInfo(-1, null, false, -1);
}
}
private double getBitrateScale(int bitrateAdjustmentScaleExp) {
return Math.pow(BITRATE_CORRECTION_MAX_SCALE,
(double) bitrateAdjustmentScaleExp / BITRATE_CORRECTION_STEPS);
}
private void reportEncodedFrame(int size) {
if (targetFps == 0 || bitrateAdjustmentType != BitrateAdjustmentType.DYNAMIC_ADJUSTMENT) {
return;
}
// Accumulate the difference between actial and expected frame sizes.
double expectedBytesPerFrame = targetBitrateBps / (8.0 * targetFps);
bitrateAccumulator += (size - expectedBytesPerFrame);
bitrateObservationTimeMs += 1000.0 / targetFps;
// Put a cap on the accumulator, i.e., don't let it grow beyond some level to avoid
// using too old data for bitrate adjustment.
double bitrateAccumulatorCap = BITRATE_CORRECTION_SEC * bitrateAccumulatorMax;
bitrateAccumulator = Math.min(bitrateAccumulator, bitrateAccumulatorCap);
bitrateAccumulator = Math.max(bitrateAccumulator, -bitrateAccumulatorCap);
// Do bitrate adjustment every 3 seconds if actual encoder bitrate deviates too much
// form the target value.
if (bitrateObservationTimeMs > 1000 * BITRATE_CORRECTION_SEC) {
Logging.d(TAG, "Acc: " + (int) bitrateAccumulator + ". Max: " + (int) bitrateAccumulatorMax
+ ". ExpScale: " + bitrateAdjustmentScaleExp);
boolean bitrateAdjustmentScaleChanged = false;
if (bitrateAccumulator > bitrateAccumulatorMax) {
// Encoder generates too high bitrate - need to reduce the scale.
int bitrateAdjustmentInc = (int) (bitrateAccumulator / bitrateAccumulatorMax + 0.5);
bitrateAdjustmentScaleExp -= bitrateAdjustmentInc;
bitrateAccumulator = bitrateAccumulatorMax;
bitrateAdjustmentScaleChanged = true;
} else if (bitrateAccumulator < -bitrateAccumulatorMax) {
// Encoder generates too low bitrate - need to increase the scale.
int bitrateAdjustmentInc = (int) (-bitrateAccumulator / bitrateAccumulatorMax + 0.5);
bitrateAdjustmentScaleExp += bitrateAdjustmentInc;
bitrateAccumulator = -bitrateAccumulatorMax;
bitrateAdjustmentScaleChanged = true;
}
if (bitrateAdjustmentScaleChanged) {
bitrateAdjustmentScaleExp = Math.min(bitrateAdjustmentScaleExp, BITRATE_CORRECTION_STEPS);
bitrateAdjustmentScaleExp = Math.max(bitrateAdjustmentScaleExp, -BITRATE_CORRECTION_STEPS);
Logging.d(TAG, "Adjusting bitrate scale to " + bitrateAdjustmentScaleExp + ". Value: "
+ getBitrateScale(bitrateAdjustmentScaleExp));
setRates(targetBitrateBps / 1000, targetFps);
}
bitrateObservationTimeMs = 0;
}
}
// Release a dequeued output buffer back to the codec for re-use. Return
// false if the codec is no longer operable.
boolean releaseOutputBuffer(int index) {
checkOnMediaCodecThread();
try {
mediaCodec.releaseOutputBuffer(index, false);
return true;
} catch (IllegalStateException e) {
Logging.e(TAG, "releaseOutputBuffer failed", e);
return false;
}
}
/** Fills an inputBuffer with the given index with data from the byte buffers. */
private static native void nativeFillBuffer(long nativeEncoder, int inputBuffer, ByteBuffer dataY,
int strideY, ByteBuffer dataU, int strideU, ByteBuffer dataV, int strideV);
}

View File

@ -0,0 +1,84 @@
/*
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import java.util.LinkedList;
import java.util.List;
/**
* Description of media constraints for {@code MediaStream} and
* {@code PeerConnection}.
*/
public class MediaConstraints {
/** Simple String key/value pair. */
public static class KeyValuePair {
private final String key;
private final String value;
public KeyValuePair(String key, String value) {
this.key = key;
this.value = value;
}
public String getKey() {
return key;
}
public String getValue() {
return value;
}
public String toString() {
return key + ": " + value;
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
KeyValuePair that = (KeyValuePair) other;
return key.equals(that.key) && value.equals(that.value);
}
@Override
public int hashCode() {
return key.hashCode() + value.hashCode();
}
}
public final List<KeyValuePair> mandatory;
public final List<KeyValuePair> optional;
public MediaConstraints() {
mandatory = new LinkedList<KeyValuePair>();
optional = new LinkedList<KeyValuePair>();
}
private static String stringifyKeyValuePairList(List<KeyValuePair> list) {
StringBuilder builder = new StringBuilder("[");
for (KeyValuePair pair : list) {
if (builder.length() > 1) {
builder.append(", ");
}
builder.append(pair.toString());
}
return builder.append("]").toString();
}
public String toString() {
return "mandatory: " + stringifyKeyValuePairList(mandatory) + ", optional: "
+ stringifyKeyValuePairList(optional);
}
}

View File

@ -0,0 +1,33 @@
/*
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/** Java wrapper for a C++ MediaSourceInterface. */
public class MediaSource {
/** Tracks MediaSourceInterface.SourceState */
public enum State { INITIALIZING, LIVE, ENDED, MUTED }
final long nativeSource; // Package-protected for PeerConnectionFactory.
public MediaSource(long nativeSource) {
this.nativeSource = nativeSource;
}
public State state() {
return nativeState(nativeSource);
}
public void dispose() {
JniCommon.nativeReleaseRef(nativeSource);
}
private static native State nativeState(long pointer);
}

View File

@ -0,0 +1,106 @@
/*
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import java.util.LinkedList;
/** Java wrapper for a C++ MediaStreamInterface. */
public class MediaStream {
public final LinkedList<AudioTrack> audioTracks;
public final LinkedList<VideoTrack> videoTracks;
public final LinkedList<VideoTrack> preservedVideoTracks;
// Package-protected for PeerConnection.
final long nativeStream;
public MediaStream(long nativeStream) {
audioTracks = new LinkedList<AudioTrack>();
videoTracks = new LinkedList<VideoTrack>();
preservedVideoTracks = new LinkedList<VideoTrack>();
this.nativeStream = nativeStream;
}
public boolean addTrack(AudioTrack track) {
if (nativeAddAudioTrack(nativeStream, track.nativeTrack)) {
audioTracks.add(track);
return true;
}
return false;
}
public boolean addTrack(VideoTrack track) {
if (nativeAddVideoTrack(nativeStream, track.nativeTrack)) {
videoTracks.add(track);
return true;
}
return false;
}
// Tracks added in addTrack() call will be auto released once MediaStream.dispose()
// is called. If video track need to be preserved after MediaStream is destroyed it
// should be added to MediaStream using addPreservedTrack() call.
public boolean addPreservedTrack(VideoTrack track) {
if (nativeAddVideoTrack(nativeStream, track.nativeTrack)) {
preservedVideoTracks.add(track);
return true;
}
return false;
}
public boolean removeTrack(AudioTrack track) {
audioTracks.remove(track);
return nativeRemoveAudioTrack(nativeStream, track.nativeTrack);
}
public boolean removeTrack(VideoTrack track) {
videoTracks.remove(track);
preservedVideoTracks.remove(track);
return nativeRemoveVideoTrack(nativeStream, track.nativeTrack);
}
public void dispose() {
// Remove and release previously added audio and video tracks.
while (!audioTracks.isEmpty()) {
AudioTrack track = audioTracks.getFirst();
removeTrack(track);
track.dispose();
}
while (!videoTracks.isEmpty()) {
VideoTrack track = videoTracks.getFirst();
removeTrack(track);
track.dispose();
}
// Remove, but do not release preserved video tracks.
while (!preservedVideoTracks.isEmpty()) {
removeTrack(preservedVideoTracks.getFirst());
}
free(nativeStream);
}
public String label() {
return nativeLabel(nativeStream);
}
public String toString() {
return "[" + label() + ":A=" + audioTracks.size() + ":V=" + videoTracks.size() + "]";
}
private static native boolean nativeAddAudioTrack(long nativeStream, long nativeAudioTrack);
private static native boolean nativeAddVideoTrack(long nativeStream, long nativeVideoTrack);
private static native boolean nativeRemoveAudioTrack(long nativeStream, long nativeAudioTrack);
private static native boolean nativeRemoveVideoTrack(long nativeStream, long nativeVideoTrack);
private static native String nativeLabel(long nativeStream);
private static native void free(long nativeStream);
}

View File

@ -0,0 +1,62 @@
/*
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/** Java wrapper for a C++ MediaStreamTrackInterface. */
public class MediaStreamTrack {
/** Tracks MediaStreamTrackInterface.TrackState */
public enum State { LIVE, ENDED }
public enum MediaType {
MEDIA_TYPE_AUDIO,
MEDIA_TYPE_VIDEO,
}
final long nativeTrack;
public MediaStreamTrack(long nativeTrack) {
this.nativeTrack = nativeTrack;
}
public String id() {
return nativeId(nativeTrack);
}
public String kind() {
return nativeKind(nativeTrack);
}
public boolean enabled() {
return nativeEnabled(nativeTrack);
}
public boolean setEnabled(boolean enable) {
return nativeSetEnabled(nativeTrack, enable);
}
public State state() {
return nativeState(nativeTrack);
}
public void dispose() {
JniCommon.nativeReleaseRef(nativeTrack);
}
private static native String nativeId(long nativeTrack);
private static native String nativeKind(long nativeTrack);
private static native boolean nativeEnabled(long nativeTrack);
private static native boolean nativeSetEnabled(long nativeTrack, boolean enabled);
private static native State nativeState(long nativeTrack);
}

View File

@ -0,0 +1,79 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import java.util.HashMap;
import java.util.Map;
// Java-side of androidmetrics_jni.cc.
//
// Rtc histograms can be queried through the API, getAndReset().
// The returned map holds the name of a histogram and its samples.
//
// Example of |map| with one histogram:
// |name|: "WebRTC.Video.InputFramesPerSecond"
// |min|: 1
// |max|: 100
// |bucketCount|: 50
// |samples|: [30]:1
//
// Most histograms are not updated frequently (e.g. most video metrics are an
// average over the call and recorded when a stream is removed).
// The metrics can for example be retrieved when a peer connection is closed.
public class Metrics {
private static final String TAG = "Metrics";
static {
System.loadLibrary("jingle_peerconnection_so");
}
public final Map<String, HistogramInfo> map =
new HashMap<String, HistogramInfo>(); // <name, HistogramInfo>
/**
* Class holding histogram information.
*/
public static class HistogramInfo {
public final int min;
public final int max;
public final int bucketCount;
public final Map<Integer, Integer> samples =
new HashMap<Integer, Integer>(); // <value, # of events>
public HistogramInfo(int min, int max, int bucketCount) {
this.min = min;
this.max = max;
this.bucketCount = bucketCount;
}
public void addSample(int value, int numEvents) {
samples.put(value, numEvents);
}
}
private void add(String name, HistogramInfo info) {
map.put(name, info);
}
// Enables gathering of metrics (which can be fetched with getAndReset()).
// Must be called before PeerConnectionFactory is created.
public static void enable() {
nativeEnable();
}
// Gets and clears native histograms.
public static Metrics getAndReset() {
return nativeGetAndReset();
}
private static native void nativeEnable();
private static native Metrics nativeGetAndReset();
}

View File

@ -0,0 +1,238 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import static org.webrtc.NetworkMonitorAutoDetect.ConnectionType;
import static org.webrtc.NetworkMonitorAutoDetect.INVALID_NET_ID;
import static org.webrtc.NetworkMonitorAutoDetect.NetworkInformation;
import android.annotation.SuppressLint;
import android.content.Context;
import android.os.Build;
import java.util.ArrayList;
import java.util.List;
/**
* Borrowed from Chromium's src/net/android/java/src/org/chromium/net/NetworkChangeNotifier.java
*
* Triggers updates to the underlying network state from OS networking events.
*
* WARNING: This class is not thread-safe.
*/
public class NetworkMonitor {
/**
* Alerted when the connection type of the network changes.
* The alert is fired on the UI thread.
*/
public interface NetworkObserver {
public void onConnectionTypeChanged(ConnectionType connectionType);
}
private static final String TAG = "NetworkMonitor";
// We are storing application context so it is okay.
private static NetworkMonitor instance;
// Native observers of the connection type changes.
private final ArrayList<Long> nativeNetworkObservers;
// Java observers of the connection type changes.
private final ArrayList<NetworkObserver> networkObservers;
// Object that detects the connection type changes.
private NetworkMonitorAutoDetect autoDetector;
private ConnectionType currentConnectionType = ConnectionType.CONNECTION_UNKNOWN;
private NetworkMonitor() {
nativeNetworkObservers = new ArrayList<Long>();
networkObservers = new ArrayList<NetworkObserver>();
}
// TODO(sakal): Remove once downstream dependencies have been updated.
@Deprecated
public static void init(Context context) {}
/**
* Returns the singleton instance.
*/
public static NetworkMonitor getInstance() {
if (instance == null) {
instance = new NetworkMonitor();
}
return instance;
}
private static void assertIsTrue(boolean condition) {
if (!condition) {
throw new AssertionError("Expected to be true");
}
}
/**
* Called by the native code.
*
* Enables auto detection of the current network state based on notifications
* from the system. Note that this requires the embedding app have the
* platform ACCESS_NETWORK_STATE permission.
*/
private void startMonitoring(long nativeObserver) {
Logging.d(TAG, "Start monitoring from native observer " + nativeObserver);
nativeNetworkObservers.add(nativeObserver);
if (autoDetector == null) {
createAutoDetector();
}
// The observers expect a network list update after they call startMonitoring.
final NetworkMonitorAutoDetect.NetworkState networkState =
autoDetector.getCurrentNetworkState();
updateCurrentConnectionType(NetworkMonitorAutoDetect.getConnectionType(networkState));
updateObserverActiveNetworkList(nativeObserver);
}
// Called by the native code.
private void stopMonitoring(long nativeObserver) {
Logging.d(TAG, "Stop monitoring from native observer " + nativeObserver);
nativeNetworkObservers.remove(nativeObserver);
if (nativeNetworkObservers.isEmpty()) {
autoDetector.destroy();
autoDetector = null;
}
}
// Called by the native code to determine if network binding is supported
// on this platform.
private boolean networkBindingSupported() {
return autoDetector != null && autoDetector.supportNetworkCallback();
}
// Called by the native code to get the Android SDK version.
private static int androidSdkInt() {
return Build.VERSION.SDK_INT;
}
private ConnectionType getCurrentConnectionType() {
return currentConnectionType;
}
private long getCurrentDefaultNetId() {
return autoDetector == null ? INVALID_NET_ID : autoDetector.getDefaultNetId();
}
private void createAutoDetector() {
autoDetector = new NetworkMonitorAutoDetect(new NetworkMonitorAutoDetect.Observer() {
@Override
public void onConnectionTypeChanged(ConnectionType newConnectionType) {
updateCurrentConnectionType(newConnectionType);
}
@Override
public void onNetworkConnect(NetworkInformation networkInfo) {
notifyObserversOfNetworkConnect(networkInfo);
}
@Override
public void onNetworkDisconnect(long networkHandle) {
notifyObserversOfNetworkDisconnect(networkHandle);
}
}, ContextUtils.getApplicationContext());
}
private void updateCurrentConnectionType(ConnectionType newConnectionType) {
currentConnectionType = newConnectionType;
notifyObserversOfConnectionTypeChange(newConnectionType);
}
/**
* Alerts all observers of a connection change.
*/
private void notifyObserversOfConnectionTypeChange(ConnectionType newConnectionType) {
for (long nativeObserver : nativeNetworkObservers) {
nativeNotifyConnectionTypeChanged(nativeObserver);
}
for (NetworkObserver observer : networkObservers) {
observer.onConnectionTypeChanged(newConnectionType);
}
}
private void notifyObserversOfNetworkConnect(NetworkInformation networkInfo) {
for (long nativeObserver : nativeNetworkObservers) {
nativeNotifyOfNetworkConnect(nativeObserver, networkInfo);
}
}
private void notifyObserversOfNetworkDisconnect(long networkHandle) {
for (long nativeObserver : nativeNetworkObservers) {
nativeNotifyOfNetworkDisconnect(nativeObserver, networkHandle);
}
}
private void updateObserverActiveNetworkList(long nativeObserver) {
List<NetworkInformation> networkInfoList = autoDetector.getActiveNetworkList();
if (networkInfoList == null || networkInfoList.size() == 0) {
return;
}
NetworkInformation[] networkInfos = new NetworkInformation[networkInfoList.size()];
networkInfos = networkInfoList.toArray(networkInfos);
nativeNotifyOfActiveNetworkList(nativeObserver, networkInfos);
}
/**
* Adds an observer for any connection type changes.
*/
public static void addNetworkObserver(NetworkObserver observer) {
getInstance().addNetworkObserverInternal(observer);
}
private void addNetworkObserverInternal(NetworkObserver observer) {
networkObservers.add(observer);
}
/**
* Removes an observer for any connection type changes.
*/
public static void removeNetworkObserver(NetworkObserver observer) {
getInstance().removeNetworkObserverInternal(observer);
}
private void removeNetworkObserverInternal(NetworkObserver observer) {
networkObservers.remove(observer);
}
/**
* Checks if there currently is connectivity.
*/
public static boolean isOnline() {
ConnectionType connectionType = getInstance().getCurrentConnectionType();
return connectionType != ConnectionType.CONNECTION_NONE;
}
private native void nativeNotifyConnectionTypeChanged(long nativePtr);
private native void nativeNotifyOfNetworkConnect(long nativePtr, NetworkInformation networkInfo);
private native void nativeNotifyOfNetworkDisconnect(long nativePtr, long networkHandle);
private native void nativeNotifyOfActiveNetworkList(
long nativePtr, NetworkInformation[] networkInfos);
// For testing only.
static void resetInstanceForTests() {
instance = new NetworkMonitor();
}
// For testing only.
static void createAutoDetectorForTest() {
getInstance().createAutoDetector();
}
// For testing only.
static NetworkMonitorAutoDetect getAutoDetectorForTest() {
return getInstance().autoDetector;
}
}

View File

@ -0,0 +1,740 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.annotation.SuppressLint;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.net.ConnectivityManager;
import android.net.ConnectivityManager.NetworkCallback;
import android.net.LinkAddress;
import android.net.LinkProperties;
import android.net.Network;
import android.net.NetworkCapabilities;
import android.net.NetworkInfo;
import android.net.NetworkRequest;
import android.net.wifi.WifiInfo;
import android.net.wifi.WifiManager;
import android.net.wifi.p2p.WifiP2pGroup;
import android.net.wifi.p2p.WifiP2pManager;
import android.os.Build;
import android.telephony.TelephonyManager;
import java.net.InetAddress;
import java.net.NetworkInterface;
import java.net.SocketException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* Borrowed from Chromium's
* src/net/android/java/src/org/chromium/net/NetworkChangeNotifierAutoDetect.java
*
* Used by the NetworkMonitor to listen to platform changes in connectivity.
* Note that use of this class requires that the app have the platform
* ACCESS_NETWORK_STATE permission.
*/
public class NetworkMonitorAutoDetect extends BroadcastReceiver {
public static enum ConnectionType {
CONNECTION_UNKNOWN,
CONNECTION_ETHERNET,
CONNECTION_WIFI,
CONNECTION_4G,
CONNECTION_3G,
CONNECTION_2G,
CONNECTION_UNKNOWN_CELLULAR,
CONNECTION_BLUETOOTH,
CONNECTION_NONE
}
public static class IPAddress {
public final byte[] address;
public IPAddress(byte[] address) {
this.address = address;
}
}
/** Java version of NetworkMonitor.NetworkInformation */
public static class NetworkInformation {
public final String name;
public final ConnectionType type;
public final long handle;
public final IPAddress[] ipAddresses;
public NetworkInformation(
String name, ConnectionType type, long handle, IPAddress[] addresses) {
this.name = name;
this.type = type;
this.handle = handle;
this.ipAddresses = addresses;
}
};
static class NetworkState {
private final boolean connected;
// Defined from ConnectivityManager.TYPE_XXX for non-mobile; for mobile, it is
// further divided into 2G, 3G, or 4G from the subtype.
private final int type;
// Defined from NetworkInfo.subtype, which is one of the TelephonyManager.NETWORK_TYPE_XXXs.
// Will be useful to find the maximum bandwidth.
private final int subtype;
public NetworkState(boolean connected, int type, int subtype) {
this.connected = connected;
this.type = type;
this.subtype = subtype;
}
public boolean isConnected() {
return connected;
}
public int getNetworkType() {
return type;
}
public int getNetworkSubType() {
return subtype;
}
}
/**
* The methods in this class get called when the network changes if the callback
* is registered with a proper network request. It is only available in Android Lollipop
* and above.
*/
@SuppressLint("NewApi")
private class SimpleNetworkCallback extends NetworkCallback {
@Override
public void onAvailable(Network network) {
Logging.d(TAG, "Network becomes available: " + network.toString());
onNetworkChanged(network);
}
@Override
public void onCapabilitiesChanged(Network network, NetworkCapabilities networkCapabilities) {
// A capabilities change may indicate the ConnectionType has changed,
// so forward the new NetworkInformation along to the observer.
Logging.d(TAG, "capabilities changed: " + networkCapabilities.toString());
onNetworkChanged(network);
}
@Override
public void onLinkPropertiesChanged(Network network, LinkProperties linkProperties) {
// A link property change may indicate the IP address changes.
// so forward the new NetworkInformation to the observer.
Logging.d(TAG, "link properties changed: " + linkProperties.toString());
onNetworkChanged(network);
}
@Override
public void onLosing(Network network, int maxMsToLive) {
// Tell the network is going to lose in MaxMsToLive milliseconds.
// We may use this signal later.
Logging.d(
TAG, "Network " + network.toString() + " is about to lose in " + maxMsToLive + "ms");
}
@Override
public void onLost(Network network) {
Logging.d(TAG, "Network " + network.toString() + " is disconnected");
observer.onNetworkDisconnect(networkToNetId(network));
}
private void onNetworkChanged(Network network) {
NetworkInformation networkInformation = connectivityManagerDelegate.networkToInfo(network);
if (networkInformation != null) {
observer.onNetworkConnect(networkInformation);
}
}
}
/** Queries the ConnectivityManager for information about the current connection. */
static class ConnectivityManagerDelegate {
/**
* Note: In some rare Android systems connectivityManager is null. We handle that
* gracefully below.
*/
private final ConnectivityManager connectivityManager;
ConnectivityManagerDelegate(Context context) {
connectivityManager =
(ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE);
}
// For testing.
ConnectivityManagerDelegate() {
// All the methods below should be overridden.
connectivityManager = null;
}
/**
* Returns connection type and status information about the current
* default network.
*/
NetworkState getNetworkState() {
if (connectivityManager == null) {
return new NetworkState(false, -1, -1);
}
return getNetworkState(connectivityManager.getActiveNetworkInfo());
}
/**
* Returns connection type and status information about |network|.
* Only callable on Lollipop and newer releases.
*/
@SuppressLint("NewApi")
NetworkState getNetworkState(Network network) {
if (connectivityManager == null) {
return new NetworkState(false, -1, -1);
}
return getNetworkState(connectivityManager.getNetworkInfo(network));
}
/**
* Returns connection type and status information gleaned from networkInfo.
*/
NetworkState getNetworkState(NetworkInfo networkInfo) {
if (networkInfo == null || !networkInfo.isConnected()) {
return new NetworkState(false, -1, -1);
}
return new NetworkState(true, networkInfo.getType(), networkInfo.getSubtype());
}
/**
* Returns all connected networks.
* Only callable on Lollipop and newer releases.
*/
@SuppressLint("NewApi")
Network[] getAllNetworks() {
if (connectivityManager == null) {
return new Network[0];
}
return connectivityManager.getAllNetworks();
}
List<NetworkInformation> getActiveNetworkList() {
if (!supportNetworkCallback()) {
return null;
}
ArrayList<NetworkInformation> netInfoList = new ArrayList<NetworkInformation>();
for (Network network : getAllNetworks()) {
NetworkInformation info = networkToInfo(network);
if (info != null) {
netInfoList.add(info);
}
}
return netInfoList;
}
/**
* Returns the NetID of the current default network. Returns
* INVALID_NET_ID if no current default network connected.
* Only callable on Lollipop and newer releases.
*/
@SuppressLint("NewApi")
long getDefaultNetId() {
if (!supportNetworkCallback()) {
return INVALID_NET_ID;
}
// Android Lollipop had no API to get the default network; only an
// API to return the NetworkInfo for the default network. To
// determine the default network one can find the network with
// type matching that of the default network.
final NetworkInfo defaultNetworkInfo = connectivityManager.getActiveNetworkInfo();
if (defaultNetworkInfo == null) {
return INVALID_NET_ID;
}
final Network[] networks = getAllNetworks();
long defaultNetId = INVALID_NET_ID;
for (Network network : networks) {
if (!hasInternetCapability(network)) {
continue;
}
final NetworkInfo networkInfo = connectivityManager.getNetworkInfo(network);
if (networkInfo != null && networkInfo.getType() == defaultNetworkInfo.getType()) {
// There should not be multiple connected networks of the
// same type. At least as of Android Marshmallow this is
// not supported. If this becomes supported this assertion
// may trigger. At that point we could consider using
// ConnectivityManager.getDefaultNetwork() though this
// may give confusing results with VPNs and is only
// available with Android Marshmallow.
if (defaultNetId != INVALID_NET_ID) {
throw new RuntimeException(
"Multiple connected networks of same type are not supported.");
}
defaultNetId = networkToNetId(network);
}
}
return defaultNetId;
}
@SuppressLint("NewApi")
private NetworkInformation networkToInfo(Network network) {
LinkProperties linkProperties = connectivityManager.getLinkProperties(network);
// getLinkProperties will return null if the network is unknown.
if (linkProperties == null) {
Logging.w(TAG, "Detected unknown network: " + network.toString());
return null;
}
if (linkProperties.getInterfaceName() == null) {
Logging.w(TAG, "Null interface name for network " + network.toString());
return null;
}
NetworkState networkState = getNetworkState(network);
if (networkState.connected && networkState.getNetworkType() == ConnectivityManager.TYPE_VPN) {
// If a VPN network is in place, we can find the underlying network type via querying the
// active network info thanks to
// https://android.googlesource.com/platform/frameworks/base/+/d6a7980d
networkState = getNetworkState();
}
ConnectionType connectionType = getConnectionType(networkState);
if (connectionType == ConnectionType.CONNECTION_NONE) {
// This may not be an error. The OS may signal a network event with connection type
// NONE when the network disconnects.
Logging.d(TAG, "Network " + network.toString() + " is disconnected");
return null;
}
// Some android device may return a CONNECTION_UNKNOWN_CELLULAR or CONNECTION_UNKNOWN type,
// which appears to be usable. Just log them here.
if (connectionType == ConnectionType.CONNECTION_UNKNOWN
|| connectionType == ConnectionType.CONNECTION_UNKNOWN_CELLULAR) {
Logging.d(TAG, "Network " + network.toString() + " connection type is " + connectionType
+ " because it has type " + networkState.getNetworkType() + " and subtype "
+ networkState.getNetworkSubType());
}
NetworkInformation networkInformation =
new NetworkInformation(linkProperties.getInterfaceName(), connectionType,
networkToNetId(network), getIPAddresses(linkProperties));
return networkInformation;
}
/**
* Returns true if {@code network} can provide Internet access. Can be used to
* ignore specialized networks (e.g. IMS, FOTA).
*/
@SuppressLint("NewApi")
boolean hasInternetCapability(Network network) {
if (connectivityManager == null) {
return false;
}
final NetworkCapabilities capabilities = connectivityManager.getNetworkCapabilities(network);
return capabilities != null
&& capabilities.hasCapability(NetworkCapabilities.NET_CAPABILITY_INTERNET);
}
/** Only callable on Lollipop and newer releases. */
@SuppressLint("NewApi")
public void registerNetworkCallback(NetworkCallback networkCallback) {
connectivityManager.registerNetworkCallback(
new NetworkRequest.Builder()
.addCapability(NetworkCapabilities.NET_CAPABILITY_INTERNET)
.build(),
networkCallback);
}
/** Only callable on Lollipop and newer releases. */
@SuppressLint("NewApi")
public void requestMobileNetwork(NetworkCallback networkCallback) {
NetworkRequest.Builder builder = new NetworkRequest.Builder();
builder.addCapability(NetworkCapabilities.NET_CAPABILITY_INTERNET)
.addTransportType(NetworkCapabilities.TRANSPORT_CELLULAR);
connectivityManager.requestNetwork(builder.build(), networkCallback);
}
@SuppressLint("NewApi")
IPAddress[] getIPAddresses(LinkProperties linkProperties) {
IPAddress[] ipAddresses = new IPAddress[linkProperties.getLinkAddresses().size()];
int i = 0;
for (LinkAddress linkAddress : linkProperties.getLinkAddresses()) {
ipAddresses[i] = new IPAddress(linkAddress.getAddress().getAddress());
++i;
}
return ipAddresses;
}
@SuppressLint("NewApi")
public void releaseCallback(NetworkCallback networkCallback) {
if (supportNetworkCallback()) {
Logging.d(TAG, "Unregister network callback");
connectivityManager.unregisterNetworkCallback(networkCallback);
}
}
public boolean supportNetworkCallback() {
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP && connectivityManager != null;
}
}
/** Queries the WifiManager for SSID of the current Wifi connection. */
static class WifiManagerDelegate {
private final Context context;
WifiManagerDelegate(Context context) {
this.context = context;
}
// For testing.
WifiManagerDelegate() {
// All the methods below should be overridden.
context = null;
}
String getWifiSSID() {
final Intent intent = context.registerReceiver(
null, new IntentFilter(WifiManager.NETWORK_STATE_CHANGED_ACTION));
if (intent != null) {
final WifiInfo wifiInfo = intent.getParcelableExtra(WifiManager.EXTRA_WIFI_INFO);
if (wifiInfo != null) {
final String ssid = wifiInfo.getSSID();
if (ssid != null) {
return ssid;
}
}
}
return "";
}
}
/** Maintains the information about wifi direct (aka WifiP2p) networks. */
static class WifiDirectManagerDelegate extends BroadcastReceiver {
// Network "handle" for the Wifi P2p network. We have to bind to the default network id
// (NETWORK_UNSPECIFIED) for these addresses.
private static final int WIFI_P2P_NETWORK_HANDLE = 0;
private final Context context;
private final Observer observer;
// Network information about a WifiP2p (aka WiFi-Direct) network, or null if no such network is
// connected.
private NetworkInformation wifiP2pNetworkInfo = null;
WifiDirectManagerDelegate(Observer observer, Context context) {
this.context = context;
this.observer = observer;
IntentFilter intentFilter = new IntentFilter();
intentFilter.addAction(WifiP2pManager.WIFI_P2P_STATE_CHANGED_ACTION);
intentFilter.addAction(WifiP2pManager.WIFI_P2P_CONNECTION_CHANGED_ACTION);
context.registerReceiver(this, intentFilter);
}
// BroadcastReceiver
@Override
@SuppressLint("InlinedApi")
public void onReceive(Context context, Intent intent) {
if (WifiP2pManager.WIFI_P2P_CONNECTION_CHANGED_ACTION.equals(intent.getAction())) {
WifiP2pGroup wifiP2pGroup = intent.getParcelableExtra(WifiP2pManager.EXTRA_WIFI_P2P_GROUP);
onWifiP2pGroupChange(wifiP2pGroup);
} else if (WifiP2pManager.WIFI_P2P_STATE_CHANGED_ACTION.equals(intent.getAction())) {
int state = intent.getIntExtra(WifiP2pManager.EXTRA_WIFI_STATE, 0 /* default to unknown */);
onWifiP2pStateChange(state);
}
}
/** Releases the broadcast receiver. */
public void release() {
context.unregisterReceiver(this);
}
public List<NetworkInformation> getActiveNetworkList() {
if (wifiP2pNetworkInfo != null) {
return Collections.singletonList(wifiP2pNetworkInfo);
}
return Collections.emptyList();
}
/** Handle a change notification about the wifi p2p group. */
private void onWifiP2pGroupChange(WifiP2pGroup wifiP2pGroup) {
if (wifiP2pGroup == null || wifiP2pGroup.getInterface() == null) {
return;
}
NetworkInterface wifiP2pInterface;
try {
wifiP2pInterface = NetworkInterface.getByName(wifiP2pGroup.getInterface());
} catch (SocketException e) {
Logging.e(TAG, "Unable to get WifiP2p network interface", e);
return;
}
List<InetAddress> interfaceAddresses = Collections.list(wifiP2pInterface.getInetAddresses());
IPAddress[] ipAddresses = new IPAddress[interfaceAddresses.size()];
for (int i = 0; i < interfaceAddresses.size(); ++i) {
ipAddresses[i] = new IPAddress(interfaceAddresses.get(i).getAddress());
}
wifiP2pNetworkInfo =
new NetworkInformation(
wifiP2pGroup.getInterface(),
ConnectionType.CONNECTION_WIFI,
WIFI_P2P_NETWORK_HANDLE,
ipAddresses);
observer.onNetworkConnect(wifiP2pNetworkInfo);
}
/** Handle a state change notification about wifi p2p. */
private void onWifiP2pStateChange(int state) {
if (state == WifiP2pManager.WIFI_P2P_STATE_DISABLED) {
wifiP2pNetworkInfo = null;
observer.onNetworkDisconnect(WIFI_P2P_NETWORK_HANDLE);
}
}
}
static final long INVALID_NET_ID = -1;
private static final String TAG = "NetworkMonitorAutoDetect";
// Observer for the connection type change.
private final Observer observer;
private final IntentFilter intentFilter;
private final Context context;
// Used to request mobile network. It does not do anything except for keeping
// the callback for releasing the request.
private final NetworkCallback mobileNetworkCallback;
// Used to receive updates on all networks.
private final NetworkCallback allNetworkCallback;
// connectivityManagerDelegate and wifiManagerDelegate are only non-final for testing.
private ConnectivityManagerDelegate connectivityManagerDelegate;
private WifiManagerDelegate wifiManagerDelegate;
private WifiDirectManagerDelegate wifiDirectManagerDelegate;
private boolean isRegistered;
private ConnectionType connectionType;
private String wifiSSID;
/**
* Observer interface by which observer is notified of network changes.
*/
public static interface Observer {
/**
* Called when default network changes.
*/
public void onConnectionTypeChanged(ConnectionType newConnectionType);
public void onNetworkConnect(NetworkInformation networkInfo);
public void onNetworkDisconnect(long networkHandle);
}
/**
* Constructs a NetworkMonitorAutoDetect. Should only be called on UI thread.
*/
@SuppressLint("NewApi")
public NetworkMonitorAutoDetect(Observer observer, Context context) {
this.observer = observer;
this.context = context;
connectivityManagerDelegate = new ConnectivityManagerDelegate(context);
wifiManagerDelegate = new WifiManagerDelegate(context);
final NetworkState networkState = connectivityManagerDelegate.getNetworkState();
connectionType = getConnectionType(networkState);
wifiSSID = getWifiSSID(networkState);
intentFilter = new IntentFilter(ConnectivityManager.CONNECTIVITY_ACTION);
if (PeerConnectionFactory.fieldTrialsFindFullName("IncludeWifiDirect").equals("Enabled")) {
wifiDirectManagerDelegate = new WifiDirectManagerDelegate(observer, context);
}
registerReceiver();
if (connectivityManagerDelegate.supportNetworkCallback()) {
// On Android 6.0.0, the WRITE_SETTINGS permission is necessary for
// requestNetwork, so it will fail. This was fixed in Android 6.0.1.
NetworkCallback tempNetworkCallback = new NetworkCallback();
try {
connectivityManagerDelegate.requestMobileNetwork(tempNetworkCallback);
} catch (java.lang.SecurityException e) {
Logging.w(TAG, "Unable to obtain permission to request a cellular network.");
tempNetworkCallback = null;
}
mobileNetworkCallback = tempNetworkCallback;
allNetworkCallback = new SimpleNetworkCallback();
connectivityManagerDelegate.registerNetworkCallback(allNetworkCallback);
} else {
mobileNetworkCallback = null;
allNetworkCallback = null;
}
}
public boolean supportNetworkCallback() {
return connectivityManagerDelegate.supportNetworkCallback();
}
/**
* Allows overriding the ConnectivityManagerDelegate for tests.
*/
void setConnectivityManagerDelegateForTests(ConnectivityManagerDelegate delegate) {
connectivityManagerDelegate = delegate;
}
/**
* Allows overriding the WifiManagerDelegate for tests.
*/
void setWifiManagerDelegateForTests(WifiManagerDelegate delegate) {
wifiManagerDelegate = delegate;
}
/**
* Returns whether the object has registered to receive network connectivity intents.
* Visible for testing.
*/
boolean isReceiverRegisteredForTesting() {
return isRegistered;
}
List<NetworkInformation> getActiveNetworkList() {
List<NetworkInformation> connectivityManagerList =
connectivityManagerDelegate.getActiveNetworkList();
if (connectivityManagerList == null) {
return null;
}
ArrayList<NetworkInformation> result =
new ArrayList<NetworkInformation>(connectivityManagerList);
if (wifiDirectManagerDelegate != null) {
result.addAll(wifiDirectManagerDelegate.getActiveNetworkList());
}
return result;
}
public void destroy() {
if (allNetworkCallback != null) {
connectivityManagerDelegate.releaseCallback(allNetworkCallback);
}
if (mobileNetworkCallback != null) {
connectivityManagerDelegate.releaseCallback(mobileNetworkCallback);
}
if (wifiDirectManagerDelegate != null) {
wifiDirectManagerDelegate.release();
}
unregisterReceiver();
}
/**
* Registers a BroadcastReceiver in the given context.
*/
private void registerReceiver() {
if (isRegistered)
return;
isRegistered = true;
context.registerReceiver(this, intentFilter);
}
/**
* Unregisters the BroadcastReceiver in the given context.
*/
private void unregisterReceiver() {
if (!isRegistered)
return;
isRegistered = false;
context.unregisterReceiver(this);
}
public NetworkState getCurrentNetworkState() {
return connectivityManagerDelegate.getNetworkState();
}
/**
* Returns NetID of device's current default connected network used for
* communication.
* Only implemented on Lollipop and newer releases, returns INVALID_NET_ID
* when not implemented.
*/
public long getDefaultNetId() {
return connectivityManagerDelegate.getDefaultNetId();
}
public static ConnectionType getConnectionType(NetworkState networkState) {
if (!networkState.isConnected()) {
return ConnectionType.CONNECTION_NONE;
}
switch (networkState.getNetworkType()) {
case ConnectivityManager.TYPE_ETHERNET:
return ConnectionType.CONNECTION_ETHERNET;
case ConnectivityManager.TYPE_WIFI:
return ConnectionType.CONNECTION_WIFI;
case ConnectivityManager.TYPE_WIMAX:
return ConnectionType.CONNECTION_4G;
case ConnectivityManager.TYPE_BLUETOOTH:
return ConnectionType.CONNECTION_BLUETOOTH;
case ConnectivityManager.TYPE_MOBILE:
// Use information from TelephonyManager to classify the connection.
switch (networkState.getNetworkSubType()) {
case TelephonyManager.NETWORK_TYPE_GPRS:
case TelephonyManager.NETWORK_TYPE_EDGE:
case TelephonyManager.NETWORK_TYPE_CDMA:
case TelephonyManager.NETWORK_TYPE_1xRTT:
case TelephonyManager.NETWORK_TYPE_IDEN:
return ConnectionType.CONNECTION_2G;
case TelephonyManager.NETWORK_TYPE_UMTS:
case TelephonyManager.NETWORK_TYPE_EVDO_0:
case TelephonyManager.NETWORK_TYPE_EVDO_A:
case TelephonyManager.NETWORK_TYPE_HSDPA:
case TelephonyManager.NETWORK_TYPE_HSUPA:
case TelephonyManager.NETWORK_TYPE_HSPA:
case TelephonyManager.NETWORK_TYPE_EVDO_B:
case TelephonyManager.NETWORK_TYPE_EHRPD:
case TelephonyManager.NETWORK_TYPE_HSPAP:
return ConnectionType.CONNECTION_3G;
case TelephonyManager.NETWORK_TYPE_LTE:
return ConnectionType.CONNECTION_4G;
default:
return ConnectionType.CONNECTION_UNKNOWN_CELLULAR;
}
default:
return ConnectionType.CONNECTION_UNKNOWN;
}
}
private String getWifiSSID(NetworkState networkState) {
if (getConnectionType(networkState) != ConnectionType.CONNECTION_WIFI)
return "";
return wifiManagerDelegate.getWifiSSID();
}
// BroadcastReceiver
@Override
public void onReceive(Context context, Intent intent) {
final NetworkState networkState = getCurrentNetworkState();
if (ConnectivityManager.CONNECTIVITY_ACTION.equals(intent.getAction())) {
connectionTypeChanged(networkState);
}
}
private void connectionTypeChanged(NetworkState networkState) {
ConnectionType newConnectionType = getConnectionType(networkState);
String newWifiSSID = getWifiSSID(networkState);
if (newConnectionType == connectionType && newWifiSSID.equals(wifiSSID))
return;
connectionType = newConnectionType;
wifiSSID = newWifiSSID;
Logging.d(TAG, "Network connectivity changed, type is: " + connectionType);
observer.onConnectionTypeChanged(newConnectionType);
}
/**
* Extracts NetID of network on Lollipop and NetworkHandle (which is mungled
* NetID) on Marshmallow and newer releases. Only available on Lollipop and
* newer releases. Returns long since getNetworkHandle returns long.
*/
@SuppressLint("NewApi")
private static long networkToNetId(Network network) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
return network.getNetworkHandle();
}
// NOTE(honghaiz): This depends on Android framework implementation details.
// These details cannot change because Lollipop has been released.
return Integer.parseInt(network.toString());
}
}

View File

@ -0,0 +1,3 @@
per-file Camera*=sakal@webrtc.org
per-file Histogram.java=sakal@webrtc.org
per-file Metrics.java=sakal@webrtc.org

View File

@ -0,0 +1,553 @@
/*
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
/**
* Java-land version of the PeerConnection APIs; wraps the C++ API
* http://www.webrtc.org/reference/native-apis, which in turn is inspired by the
* JS APIs: http://dev.w3.org/2011/webrtc/editor/webrtc.html and
* http://www.w3.org/TR/mediacapture-streams/
*/
public class PeerConnection {
static {
System.loadLibrary("jingle_peerconnection_so");
}
/** Tracks PeerConnectionInterface::IceGatheringState */
public enum IceGatheringState { NEW, GATHERING, COMPLETE }
/** Tracks PeerConnectionInterface::IceConnectionState */
public enum IceConnectionState {
NEW,
CHECKING,
CONNECTED,
COMPLETED,
FAILED,
DISCONNECTED,
CLOSED
}
/** Tracks PeerConnectionInterface::TlsCertPolicy */
public enum TlsCertPolicy {
TLS_CERT_POLICY_SECURE,
TLS_CERT_POLICY_INSECURE_NO_CHECK,
}
/** Tracks PeerConnectionInterface::SignalingState */
public enum SignalingState {
STABLE,
HAVE_LOCAL_OFFER,
HAVE_LOCAL_PRANSWER,
HAVE_REMOTE_OFFER,
HAVE_REMOTE_PRANSWER,
CLOSED
}
/** Java version of PeerConnectionObserver. */
public static interface Observer {
/** Triggered when the SignalingState changes. */
public void onSignalingChange(SignalingState newState);
/** Triggered when the IceConnectionState changes. */
public void onIceConnectionChange(IceConnectionState newState);
/** Triggered when the ICE connection receiving status changes. */
public void onIceConnectionReceivingChange(boolean receiving);
/** Triggered when the IceGatheringState changes. */
public void onIceGatheringChange(IceGatheringState newState);
/** Triggered when a new ICE candidate has been found. */
public void onIceCandidate(IceCandidate candidate);
/** Triggered when some ICE candidates have been removed. */
public void onIceCandidatesRemoved(IceCandidate[] candidates);
/** Triggered when media is received on a new stream from remote peer. */
public void onAddStream(MediaStream stream);
/** Triggered when a remote peer close a stream. */
public void onRemoveStream(MediaStream stream);
/** Triggered when a remote peer opens a DataChannel. */
public void onDataChannel(DataChannel dataChannel);
/** Triggered when renegotiation is necessary. */
public void onRenegotiationNeeded();
/**
* Triggered when a new track is signaled by the remote peer, as a result of
* setRemoteDescription.
*/
public void onAddTrack(RtpReceiver receiver, MediaStream[] mediaStreams);
}
/** Java version of PeerConnectionInterface.IceServer. */
public static class IceServer {
// List of URIs associated with this server. Valid formats are described
// in RFC7064 and RFC7065, and more may be added in the future. The "host"
// part of the URI may contain either an IP address or a hostname.
@Deprecated public final String uri;
public final List<String> urls;
public final String username;
public final String password;
public final TlsCertPolicy tlsCertPolicy;
// If the URIs in |urls| only contain IP addresses, this field can be used
// to indicate the hostname, which may be necessary for TLS (using the SNI
// extension). If |urls| itself contains the hostname, this isn't
// necessary.
public final String hostname;
// List of protocols to be used in the TLS ALPN extension.
public final List<String> tlsAlpnProtocols;
// List of elliptic curves to be used in the TLS elliptic curves extension.
// Only curve names supported by OpenSSL should be used (eg. "P-256","X25519").
public final List<String> tlsEllipticCurves;
/** Convenience constructor for STUN servers. */
@Deprecated
public IceServer(String uri) {
this(uri, "", "");
}
@Deprecated
public IceServer(String uri, String username, String password) {
this(uri, username, password, TlsCertPolicy.TLS_CERT_POLICY_SECURE);
}
@Deprecated
public IceServer(String uri, String username, String password, TlsCertPolicy tlsCertPolicy) {
this(uri, username, password, tlsCertPolicy, "");
}
@Deprecated
public IceServer(String uri, String username, String password, TlsCertPolicy tlsCertPolicy,
String hostname) {
this(uri, Collections.singletonList(uri), username, password, tlsCertPolicy, hostname, null,
null);
}
private IceServer(String uri, List<String> urls, String username, String password,
TlsCertPolicy tlsCertPolicy, String hostname, List<String> tlsAlpnProtocols,
List<String> tlsEllipticCurves) {
if (uri == null || urls == null || urls.isEmpty()) {
throw new IllegalArgumentException("uri == null || urls == null || urls.isEmpty()");
}
for (String it : urls) {
if (it == null) {
throw new IllegalArgumentException("urls element is null: " + urls);
}
}
if (username == null) {
throw new IllegalArgumentException("username == null");
}
if (password == null) {
throw new IllegalArgumentException("password == null");
}
if (hostname == null) {
throw new IllegalArgumentException("hostname == null");
}
this.uri = uri;
this.urls = urls;
this.username = username;
this.password = password;
this.tlsCertPolicy = tlsCertPolicy;
this.hostname = hostname;
this.tlsAlpnProtocols = tlsAlpnProtocols;
this.tlsEllipticCurves = tlsEllipticCurves;
}
public String toString() {
return urls + " [" + username + ":" + password + "] [" + tlsCertPolicy + "] [" + hostname
+ "] [" + tlsAlpnProtocols + "] [" + tlsEllipticCurves + "]";
}
public static Builder builder(String uri) {
return new Builder(Collections.singletonList(uri));
}
public static Builder builder(List<String> urls) {
return new Builder(urls);
}
public static class Builder {
private final List<String> urls;
private String username = "";
private String password = "";
private TlsCertPolicy tlsCertPolicy = TlsCertPolicy.TLS_CERT_POLICY_SECURE;
private String hostname = "";
private List<String> tlsAlpnProtocols;
private List<String> tlsEllipticCurves;
private Builder(List<String> urls) {
if (urls == null || urls.isEmpty()) {
throw new IllegalArgumentException("urls == null || urls.isEmpty(): " + urls);
}
this.urls = urls;
}
public Builder setUsername(String username) {
this.username = username;
return this;
}
public Builder setPassword(String password) {
this.password = password;
return this;
}
public Builder setTlsCertPolicy(TlsCertPolicy tlsCertPolicy) {
this.tlsCertPolicy = tlsCertPolicy;
return this;
}
public Builder setHostname(String hostname) {
this.hostname = hostname;
return this;
}
public Builder setTlsAlpnProtocols(List<String> tlsAlpnProtocols) {
this.tlsAlpnProtocols = tlsAlpnProtocols;
return this;
}
public Builder setTlsEllipticCurves(List<String> tlsEllipticCurves) {
this.tlsEllipticCurves = tlsEllipticCurves;
return this;
}
public IceServer createIceServer() {
return new IceServer(urls.get(0), urls, username, password, tlsCertPolicy, hostname,
tlsAlpnProtocols, tlsEllipticCurves);
}
}
}
/** Java version of PeerConnectionInterface.IceTransportsType */
public enum IceTransportsType { NONE, RELAY, NOHOST, ALL }
/** Java version of PeerConnectionInterface.BundlePolicy */
public enum BundlePolicy { BALANCED, MAXBUNDLE, MAXCOMPAT }
/** Java version of PeerConnectionInterface.RtcpMuxPolicy */
public enum RtcpMuxPolicy { NEGOTIATE, REQUIRE }
/** Java version of PeerConnectionInterface.TcpCandidatePolicy */
public enum TcpCandidatePolicy { ENABLED, DISABLED }
/** Java version of PeerConnectionInterface.CandidateNetworkPolicy */
public enum CandidateNetworkPolicy { ALL, LOW_COST }
/** Java version of rtc::KeyType */
public enum KeyType { RSA, ECDSA }
/** Java version of PeerConnectionInterface.ContinualGatheringPolicy */
public enum ContinualGatheringPolicy { GATHER_ONCE, GATHER_CONTINUALLY }
/** Java version of rtc::IntervalRange */
public static class IntervalRange {
private final int min;
private final int max;
public IntervalRange(int min, int max) {
this.min = min;
this.max = max;
}
public int getMin() {
return min;
}
public int getMax() {
return max;
}
}
/** Java version of PeerConnectionInterface.RTCConfiguration */
public static class RTCConfiguration {
public IceTransportsType iceTransportsType;
public List<IceServer> iceServers;
public BundlePolicy bundlePolicy;
public RtcpMuxPolicy rtcpMuxPolicy;
public TcpCandidatePolicy tcpCandidatePolicy;
public CandidateNetworkPolicy candidateNetworkPolicy;
public int audioJitterBufferMaxPackets;
public boolean audioJitterBufferFastAccelerate;
public int iceConnectionReceivingTimeout;
public int iceBackupCandidatePairPingInterval;
public KeyType keyType;
public ContinualGatheringPolicy continualGatheringPolicy;
public int iceCandidatePoolSize;
public boolean pruneTurnPorts;
public boolean presumeWritableWhenFullyRelayed;
public Integer iceCheckMinInterval;
public boolean disableIPv6OnWifi;
// By default, PeerConnection will use a limited number of IPv6 network
// interfaces, in order to avoid too many ICE candidate pairs being created
// and delaying ICE completion.
//
// Can be set to Integer.MAX_VALUE to effectively disable the limit.
public int maxIPv6Networks;
public IntervalRange iceRegatherIntervalRange;
// TODO(deadbeef): Instead of duplicating the defaults here, we should do
// something to pick up the defaults from C++. The Objective-C equivalent
// of RTCConfiguration does that.
public RTCConfiguration(List<IceServer> iceServers) {
iceTransportsType = IceTransportsType.ALL;
bundlePolicy = BundlePolicy.BALANCED;
rtcpMuxPolicy = RtcpMuxPolicy.REQUIRE;
tcpCandidatePolicy = TcpCandidatePolicy.ENABLED;
candidateNetworkPolicy = candidateNetworkPolicy.ALL;
this.iceServers = iceServers;
audioJitterBufferMaxPackets = 50;
audioJitterBufferFastAccelerate = false;
iceConnectionReceivingTimeout = -1;
iceBackupCandidatePairPingInterval = -1;
keyType = KeyType.ECDSA;
continualGatheringPolicy = ContinualGatheringPolicy.GATHER_ONCE;
iceCandidatePoolSize = 0;
pruneTurnPorts = false;
presumeWritableWhenFullyRelayed = false;
iceCheckMinInterval = null;
disableIPv6OnWifi = false;
maxIPv6Networks = 5;
iceRegatherIntervalRange = null;
}
};
private final List<MediaStream> localStreams;
private final long nativePeerConnection;
private final long nativeObserver;
private List<RtpSender> senders;
private List<RtpReceiver> receivers;
PeerConnection(long nativePeerConnection, long nativeObserver) {
this.nativePeerConnection = nativePeerConnection;
this.nativeObserver = nativeObserver;
localStreams = new LinkedList<MediaStream>();
senders = new LinkedList<RtpSender>();
receivers = new LinkedList<RtpReceiver>();
}
// JsepInterface.
public native SessionDescription getLocalDescription();
public native SessionDescription getRemoteDescription();
public native DataChannel createDataChannel(String label, DataChannel.Init init);
public native void createOffer(SdpObserver observer, MediaConstraints constraints);
public native void createAnswer(SdpObserver observer, MediaConstraints constraints);
public native void setLocalDescription(SdpObserver observer, SessionDescription sdp);
public native void setRemoteDescription(SdpObserver observer, SessionDescription sdp);
public boolean setConfiguration(RTCConfiguration config) {
return nativeSetConfiguration(config, nativeObserver);
}
public boolean addIceCandidate(IceCandidate candidate) {
return nativeAddIceCandidate(candidate.sdpMid, candidate.sdpMLineIndex, candidate.sdp);
}
public boolean removeIceCandidates(final IceCandidate[] candidates) {
return nativeRemoveIceCandidates(candidates);
}
public boolean addStream(MediaStream stream) {
boolean ret = nativeAddLocalStream(stream.nativeStream);
if (!ret) {
return false;
}
localStreams.add(stream);
return true;
}
public void removeStream(MediaStream stream) {
nativeRemoveLocalStream(stream.nativeStream);
localStreams.remove(stream);
}
/**
* Creates an RtpSender without a track.
* <p>
* This method allows an application to cause the PeerConnection to negotiate
* sending/receiving a specific media type, but without having a track to
* send yet.
* <p>
* When the application does want to begin sending a track, it can call
* RtpSender.setTrack, which doesn't require any additional SDP negotiation.
* <p>
* Example use:
* <pre>
* {@code
* audioSender = pc.createSender("audio", "stream1");
* videoSender = pc.createSender("video", "stream1");
* // Do normal SDP offer/answer, which will kick off ICE/DTLS and negotiate
* // media parameters....
* // Later, when the endpoint is ready to actually begin sending:
* audioSender.setTrack(audioTrack, false);
* videoSender.setTrack(videoTrack, false);
* }
* </pre>
* Note: This corresponds most closely to "addTransceiver" in the official
* WebRTC API, in that it creates a sender without a track. It was
* implemented before addTransceiver because it provides useful
* functionality, and properly implementing transceivers would have required
* a great deal more work.
*
* @param kind Corresponds to MediaStreamTrack kinds (must be "audio" or
* "video").
* @param stream_id The ID of the MediaStream that this sender's track will
* be associated with when SDP is applied to the remote
* PeerConnection. If createSender is used to create an
* audio and video sender that should be synchronized, they
* should use the same stream ID.
* @return A new RtpSender object if successful, or null otherwise.
*/
public RtpSender createSender(String kind, String stream_id) {
RtpSender new_sender = nativeCreateSender(kind, stream_id);
if (new_sender != null) {
senders.add(new_sender);
}
return new_sender;
}
// Note that calling getSenders will dispose of the senders previously
// returned (and same goes for getReceivers).
public List<RtpSender> getSenders() {
for (RtpSender sender : senders) {
sender.dispose();
}
senders = nativeGetSenders();
return Collections.unmodifiableList(senders);
}
public List<RtpReceiver> getReceivers() {
for (RtpReceiver receiver : receivers) {
receiver.dispose();
}
receivers = nativeGetReceivers();
return Collections.unmodifiableList(receivers);
}
// Older, non-standard implementation of getStats.
@Deprecated
public boolean getStats(StatsObserver observer, MediaStreamTrack track) {
return nativeOldGetStats(observer, (track == null) ? 0 : track.nativeTrack);
}
// Gets stats using the new stats collection API, see webrtc/api/stats/. These
// will replace old stats collection API when the new API has matured enough.
public void getStats(RTCStatsCollectorCallback callback) {
nativeNewGetStats(callback);
}
// Limits the bandwidth allocated for all RTP streams sent by this
// PeerConnection. Pass null to leave a value unchanged.
public native boolean setBitrate(Integer min, Integer current, Integer max);
// Starts recording an RTC event log. Ownership of the file is transfered to
// the native code. If an RTC event log is already being recorded, it will be
// stopped and a new one will start using the provided file. Logging will
// continue until the stopRtcEventLog function is called. The max_size_bytes
// argument is ignored, it is added for future use.
public boolean startRtcEventLog(int file_descriptor, int max_size_bytes) {
return nativeStartRtcEventLog(file_descriptor, max_size_bytes);
}
// Stops recording an RTC event log. If no RTC event log is currently being
// recorded, this call will have no effect.
public void stopRtcEventLog() {
nativeStopRtcEventLog();
}
// TODO(fischman): add support for DTMF-related methods once that API
// stabilizes.
public native SignalingState signalingState();
public native IceConnectionState iceConnectionState();
public native IceGatheringState iceGatheringState();
public native void close();
/**
* Free native resources associated with this PeerConnection instance.
* <p>
* This method removes a reference count from the C++ PeerConnection object,
* which should result in it being destroyed. It also calls equivalent
* "dispose" methods on the Java objects attached to this PeerConnection
* (streams, senders, receivers), such that their associated C++ objects
* will also be destroyed.
* <p>
* Note that this method cannot be safely called from an observer callback
* (PeerConnection.Observer, DataChannel.Observer, etc.). If you want to, for
* example, destroy the PeerConnection after an "ICE failed" callback, you
* must do this asynchronously (in other words, unwind the stack first). See
* <a href="https://bugs.chromium.org/p/webrtc/issues/detail?id=3721">bug
* 3721</a> for more details.
*/
public void dispose() {
close();
for (MediaStream stream : localStreams) {
nativeRemoveLocalStream(stream.nativeStream);
stream.dispose();
}
localStreams.clear();
for (RtpSender sender : senders) {
sender.dispose();
}
senders.clear();
for (RtpReceiver receiver : receivers) {
receiver.dispose();
}
receivers.clear();
JniCommon.nativeReleaseRef(nativePeerConnection);
freeObserver(nativeObserver);
}
private static native void freeObserver(long nativeObserver);
public native boolean nativeSetConfiguration(RTCConfiguration config, long nativeObserver);
private native boolean nativeAddIceCandidate(
String sdpMid, int sdpMLineIndex, String iceCandidateSdp);
private native boolean nativeRemoveIceCandidates(final IceCandidate[] candidates);
private native boolean nativeAddLocalStream(long nativeStream);
private native void nativeRemoveLocalStream(long nativeStream);
private native boolean nativeOldGetStats(StatsObserver observer, long nativeTrack);
private native void nativeNewGetStats(RTCStatsCollectorCallback callback);
private native RtpSender nativeCreateSender(String kind, String stream_id);
private native List<RtpSender> nativeGetSenders();
private native List<RtpReceiver> nativeGetReceivers();
private native boolean nativeStartRtcEventLog(int file_descriptor, int max_size_bytes);
private native void nativeStopRtcEventLog();
}

View File

@ -0,0 +1,293 @@
/*
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.content.Context;
import java.util.List;
/**
* Java wrapper for a C++ PeerConnectionFactoryInterface. Main entry point to
* the PeerConnection API for clients.
*/
public class PeerConnectionFactory {
private static volatile boolean nativeLibLoaded;
static {
try {
System.loadLibrary("jingle_peerconnection_so");
nativeLibLoaded = true;
} catch (UnsatisfiedLinkError t) {
nativeLibLoaded = false;
}
}
public static final String TRIAL_ENABLED = "Enabled";
public static final String VIDEO_FRAME_EMIT_TRIAL = "VideoFrameEmit";
private static final String TAG = "PeerConnectionFactory";
private static final String VIDEO_CAPTURER_THREAD_NAME = "VideoCapturerThread";
private final long nativeFactory;
private static Context applicationContext;
private static Thread networkThread;
private static Thread workerThread;
private static Thread signalingThread;
private EglBase localEglbase;
private EglBase remoteEglbase;
public static class Options {
// Keep in sync with webrtc/rtc_base/network.h!
static final int ADAPTER_TYPE_UNKNOWN = 0;
static final int ADAPTER_TYPE_ETHERNET = 1 << 0;
static final int ADAPTER_TYPE_WIFI = 1 << 1;
static final int ADAPTER_TYPE_CELLULAR = 1 << 2;
static final int ADAPTER_TYPE_VPN = 1 << 3;
static final int ADAPTER_TYPE_LOOPBACK = 1 << 4;
public int networkIgnoreMask;
public boolean disableEncryption;
public boolean disableNetworkMonitor;
}
// Must be called at least once before creating a PeerConnectionFactory
// (for example, at application startup time).
public static native void nativeInitializeAndroidGlobals(
Context context, boolean videoHwAcceleration);
public static void initializeAndroidGlobals(Context context, boolean videoHwAcceleration) {
ContextUtils.initialize(context);
nativeInitializeAndroidGlobals(context, videoHwAcceleration);
}
// Older signature of initializeAndroidGlobals. The extra parameters are now meaningless.
@Deprecated
public static boolean initializeAndroidGlobals(Object context, boolean initializeAudio,
boolean initializeVideo, boolean videoHwAcceleration) {
initializeAndroidGlobals((Context) context, videoHwAcceleration);
return true;
}
// Field trial initialization. Must be called before PeerConnectionFactory
// is created.
public static native void initializeFieldTrials(String fieldTrialsInitString);
// Wrapper of webrtc::field_trial::FindFullName. Develop the feature with default behaviour off.
// Example usage:
// if (PeerConnectionFactory.fieldTrialsFindFullName("WebRTCExperiment").equals("Enabled")) {
// method1();
// } else {
// method2();
// }
public static String fieldTrialsFindFullName(String name) {
return nativeLibLoaded ? nativeFieldTrialsFindFullName(name) : "";
}
private static native String nativeFieldTrialsFindFullName(String name);
// Internal tracing initialization. Must be called before PeerConnectionFactory is created to
// prevent racing with tracing code.
public static native void initializeInternalTracer();
// Internal tracing shutdown, called to prevent resource leaks. Must be called after
// PeerConnectionFactory is gone to prevent races with code performing tracing.
public static native void shutdownInternalTracer();
// Start/stop internal capturing of internal tracing.
public static native boolean startInternalTracingCapture(String tracing_filename);
public static native void stopInternalTracingCapture();
@Deprecated
public PeerConnectionFactory() {
this(null);
}
// Note: initializeAndroidGlobals must be called at least once before
// constructing a PeerConnectionFactory.
public PeerConnectionFactory(Options options) {
this(options, null /* encoderFactory */, null /* decoderFactory */);
}
public PeerConnectionFactory(
Options options, VideoEncoderFactory encoderFactory, VideoDecoderFactory decoderFactory) {
nativeFactory = nativeCreatePeerConnectionFactory(options, encoderFactory, decoderFactory);
if (nativeFactory == 0) {
throw new RuntimeException("Failed to initialize PeerConnectionFactory!");
}
}
public PeerConnection createPeerConnection(PeerConnection.RTCConfiguration rtcConfig,
MediaConstraints constraints, PeerConnection.Observer observer) {
long nativeObserver = nativeCreateObserver(observer);
if (nativeObserver == 0) {
return null;
}
long nativePeerConnection =
nativeCreatePeerConnection(nativeFactory, rtcConfig, constraints, nativeObserver);
if (nativePeerConnection == 0) {
return null;
}
return new PeerConnection(nativePeerConnection, nativeObserver);
}
public PeerConnection createPeerConnection(List<PeerConnection.IceServer> iceServers,
MediaConstraints constraints, PeerConnection.Observer observer) {
PeerConnection.RTCConfiguration rtcConfig = new PeerConnection.RTCConfiguration(iceServers);
return createPeerConnection(rtcConfig, constraints, observer);
}
public MediaStream createLocalMediaStream(String label) {
return new MediaStream(nativeCreateLocalMediaStream(nativeFactory, label));
}
public VideoSource createVideoSource(VideoCapturer capturer) {
final EglBase.Context eglContext =
localEglbase == null ? null : localEglbase.getEglBaseContext();
final SurfaceTextureHelper surfaceTextureHelper =
SurfaceTextureHelper.create(VIDEO_CAPTURER_THREAD_NAME, eglContext);
long nativeAndroidVideoTrackSource =
nativeCreateVideoSource(nativeFactory, surfaceTextureHelper, capturer.isScreencast());
VideoCapturer.CapturerObserver capturerObserver =
new AndroidVideoTrackSourceObserver(nativeAndroidVideoTrackSource);
capturer.initialize(
surfaceTextureHelper, ContextUtils.getApplicationContext(), capturerObserver);
return new VideoSource(nativeAndroidVideoTrackSource);
}
public VideoTrack createVideoTrack(String id, VideoSource source) {
return new VideoTrack(nativeCreateVideoTrack(nativeFactory, id, source.nativeSource));
}
public AudioSource createAudioSource(MediaConstraints constraints) {
return new AudioSource(nativeCreateAudioSource(nativeFactory, constraints));
}
public AudioTrack createAudioTrack(String id, AudioSource source) {
return new AudioTrack(nativeCreateAudioTrack(nativeFactory, id, source.nativeSource));
}
// Starts recording an AEC dump. Ownership of the file is transfered to the
// native code. If an AEC dump is already in progress, it will be stopped and
// a new one will start using the provided file.
public boolean startAecDump(int file_descriptor, int filesize_limit_bytes) {
return nativeStartAecDump(nativeFactory, file_descriptor, filesize_limit_bytes);
}
// Stops recording an AEC dump. If no AEC dump is currently being recorded,
// this call will have no effect.
public void stopAecDump() {
nativeStopAecDump(nativeFactory);
}
@Deprecated
public void setOptions(Options options) {
nativeSetOptions(nativeFactory, options);
}
/** Set the EGL context used by HW Video encoding and decoding.
*
* @param localEglContext Must be the same as used by VideoCapturerAndroid and any local video
* renderer.
* @param remoteEglContext Must be the same as used by any remote video renderer.
*/
public void setVideoHwAccelerationOptions(
EglBase.Context localEglContext, EglBase.Context remoteEglContext) {
if (localEglbase != null) {
Logging.w(TAG, "Egl context already set.");
localEglbase.release();
}
if (remoteEglbase != null) {
Logging.w(TAG, "Egl context already set.");
remoteEglbase.release();
}
localEglbase = EglBase.create(localEglContext);
remoteEglbase = EglBase.create(remoteEglContext);
nativeSetVideoHwAccelerationOptions(
nativeFactory, localEglbase.getEglBaseContext(), remoteEglbase.getEglBaseContext());
}
public void dispose() {
nativeFreeFactory(nativeFactory);
networkThread = null;
workerThread = null;
signalingThread = null;
if (localEglbase != null)
localEglbase.release();
if (remoteEglbase != null)
remoteEglbase.release();
}
public void threadsCallbacks() {
nativeThreadsCallbacks(nativeFactory);
}
private static void printStackTrace(Thread thread, String threadName) {
if (thread != null) {
StackTraceElement[] stackTraces = thread.getStackTrace();
if (stackTraces.length > 0) {
Logging.d(TAG, threadName + " stacks trace:");
for (StackTraceElement stackTrace : stackTraces) {
Logging.d(TAG, stackTrace.toString());
}
}
}
}
public static void printStackTraces() {
printStackTrace(networkThread, "Network thread");
printStackTrace(workerThread, "Worker thread");
printStackTrace(signalingThread, "Signaling thread");
}
private static void onNetworkThreadReady() {
networkThread = Thread.currentThread();
Logging.d(TAG, "onNetworkThreadReady");
}
private static void onWorkerThreadReady() {
workerThread = Thread.currentThread();
Logging.d(TAG, "onWorkerThreadReady");
}
private static void onSignalingThreadReady() {
signalingThread = Thread.currentThread();
Logging.d(TAG, "onSignalingThreadReady");
}
private static native long nativeCreatePeerConnectionFactory(
Options options, VideoEncoderFactory encoderFactory, VideoDecoderFactory decoderFactory);
private static native long nativeCreateObserver(PeerConnection.Observer observer);
private static native long nativeCreatePeerConnection(long nativeFactory,
PeerConnection.RTCConfiguration rtcConfig, MediaConstraints constraints, long nativeObserver);
private static native long nativeCreateLocalMediaStream(long nativeFactory, String label);
private static native long nativeCreateVideoSource(
long nativeFactory, SurfaceTextureHelper surfaceTextureHelper, boolean is_screencast);
private static native long nativeCreateVideoTrack(
long nativeFactory, String id, long nativeVideoSource);
private static native long nativeCreateAudioSource(
long nativeFactory, MediaConstraints constraints);
private static native long nativeCreateAudioTrack(
long nativeFactory, String id, long nativeSource);
private static native boolean nativeStartAecDump(
long nativeFactory, int file_descriptor, int filesize_limit_bytes);
private static native void nativeStopAecDump(long nativeFactory);
@Deprecated public native void nativeSetOptions(long nativeFactory, Options options);
private static native void nativeSetVideoHwAccelerationOptions(
long nativeFactory, Object localEGLContext, Object remoteEGLContext);
private static native void nativeThreadsCallbacks(long nativeFactory);
private static native void nativeFreeFactory(long nativeFactory);
}

View File

@ -0,0 +1,105 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import java.util.Map;
/**
* Java version of webrtc::RTCStats. Represents an RTCStats object, as
* described in https://w3c.github.io/webrtc-stats/. The |id|, |timestampUs|
* and |type| accessors have the same meaning for this class as for the
* RTCStats dictionary. Each RTCStatsReport produced by getStats contains
* multiple RTCStats objects; one for each underlying object (codec, stream,
* transport, etc.) that was inspected to produce the stats.
*/
public class RTCStats {
private final long timestampUs;
private final String type;
private final String id;
private final Map<String, Object> members;
public RTCStats(long timestampUs, String type, String id, Map<String, Object> members) {
this.timestampUs = timestampUs;
this.type = type;
this.id = id;
this.members = members;
}
// Timestamp in microseconds.
public double getTimestampUs() {
return timestampUs;
}
// Equivalent to RTCStatsType in the stats spec. Indicates the type of the
// object that was inspected to produce the stats.
public String getType() {
return type;
}
// Unique ID representing this stats object. May be referred to by members of
// other stats objects.
public String getId() {
return id;
}
/**
* Returns map of member names to values. Returns as an ordered map so that
* the stats object can be serialized with a consistent ordering.
*
* Values will be one of the following objects:
* - Boolean
* - Integer (for 32-bit signed integers)
* - Long (for 32-bit unsigned and 64-bit signed integers)
* - BigInteger (for 64-bit unsigned integers)
* - Double
* - String
* - The array form of any of the above (e.g., Integer[])
*/
public Map<String, Object> getMembers() {
return members;
}
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("{ timestampUs: ")
.append(timestampUs)
.append(", type: ")
.append(type)
.append(", id: ")
.append(id);
boolean first = true;
for (Map.Entry<String, Object> entry : members.entrySet()) {
builder.append(", ").append(entry.getKey()).append(": ");
appendValue(builder, entry.getValue());
}
builder.append(" }");
return builder.toString();
}
private static void appendValue(StringBuilder builder, Object value) {
if (value instanceof Object[]) {
Object[] arrayValue = (Object[]) value;
builder.append('[');
for (int i = 0; i < arrayValue.length; ++i) {
if (i != 0) {
builder.append(", ");
}
appendValue(builder, arrayValue[i]);
}
builder.append(']');
} else if (value instanceof String) {
// Enclose strings in quotes to make it clear they're strings.
builder.append('"').append(value).append('"');
} else {
builder.append(value);
}
}
}

View File

@ -0,0 +1,17 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/** Interface for receiving stats reports (see webrtc::RTCStatsCollectorCallback). */
public interface RTCStatsCollectorCallback {
/** Called when the stats report is ready. */
public void onStatsDelivered(RTCStatsReport report);
}

View File

@ -0,0 +1,54 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import java.util.Map;
/**
* Java version of webrtc::RTCStatsReport. Each RTCStatsReport produced by
* getStats contains multiple RTCStats objects; one for each underlying object
* (codec, stream, transport, etc.) that was inspected to produce the stats.
*/
public class RTCStatsReport {
private final long timestampUs;
private final Map<String, RTCStats> stats;
public RTCStatsReport(long timestampUs, Map<String, RTCStats> stats) {
this.timestampUs = timestampUs;
this.stats = stats;
}
// Timestamp in microseconds.
public double getTimestampUs() {
return timestampUs;
}
// Map of stats object IDs to stats objects. Can be used to easily look up
// other stats objects, when they refer to each other by ID.
public Map<String, RTCStats> getStatsMap() {
return stats;
}
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("{ timestampUs: ").append(timestampUs).append(", stats: [\n");
boolean first = true;
for (RTCStats stat : stats.values()) {
if (!first) {
builder.append(",\n");
}
builder.append(stat);
first = false;
}
builder.append(" ] }");
return builder.toString();
}
}

View File

@ -0,0 +1,290 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.graphics.Point;
import android.opengl.Matrix;
import android.view.View;
/**
* Static helper functions for renderer implementations.
*/
public class RendererCommon {
/** Interface for reporting rendering events. */
public static interface RendererEvents {
/**
* Callback fired once first frame is rendered.
*/
public void onFirstFrameRendered();
/**
* Callback fired when rendered frame resolution or rotation has changed.
*/
public void onFrameResolutionChanged(int videoWidth, int videoHeight, int rotation);
}
/** Interface for rendering frames on an EGLSurface. */
public static interface GlDrawer {
/**
* Functions for drawing frames with different sources. The rendering surface target is
* implied by the current EGL context of the calling thread and requires no explicit argument.
* The coordinates specify the viewport location on the surface target.
*/
void drawOes(int oesTextureId, float[] texMatrix, int frameWidth, int frameHeight,
int viewportX, int viewportY, int viewportWidth, int viewportHeight);
void drawRgb(int textureId, float[] texMatrix, int frameWidth, int frameHeight, int viewportX,
int viewportY, int viewportWidth, int viewportHeight);
void drawYuv(int[] yuvTextures, float[] texMatrix, int frameWidth, int frameHeight,
int viewportX, int viewportY, int viewportWidth, int viewportHeight);
/**
* Release all GL resources. This needs to be done manually, otherwise resources may leak.
*/
void release();
}
/**
* Helper class for determining layout size based on layout requirements, scaling type, and video
* aspect ratio.
*/
public static class VideoLayoutMeasure {
// The scaling type determines how the video will fill the allowed layout area in measure(). It
// can be specified separately for the case when video has matched orientation with layout size
// and when there is an orientation mismatch.
private ScalingType scalingTypeMatchOrientation = ScalingType.SCALE_ASPECT_BALANCED;
private ScalingType scalingTypeMismatchOrientation = ScalingType.SCALE_ASPECT_BALANCED;
public void setScalingType(ScalingType scalingType) {
this.scalingTypeMatchOrientation = scalingType;
this.scalingTypeMismatchOrientation = scalingType;
}
public void setScalingType(
ScalingType scalingTypeMatchOrientation, ScalingType scalingTypeMismatchOrientation) {
this.scalingTypeMatchOrientation = scalingTypeMatchOrientation;
this.scalingTypeMismatchOrientation = scalingTypeMismatchOrientation;
}
public Point measure(int widthSpec, int heightSpec, int frameWidth, int frameHeight) {
// Calculate max allowed layout size.
final int maxWidth = View.getDefaultSize(Integer.MAX_VALUE, widthSpec);
final int maxHeight = View.getDefaultSize(Integer.MAX_VALUE, heightSpec);
if (frameWidth == 0 || frameHeight == 0 || maxWidth == 0 || maxHeight == 0) {
return new Point(maxWidth, maxHeight);
}
// Calculate desired display size based on scaling type, video aspect ratio,
// and maximum layout size.
final float frameAspect = frameWidth / (float) frameHeight;
final float displayAspect = maxWidth / (float) maxHeight;
final ScalingType scalingType = (frameAspect > 1.0f) == (displayAspect > 1.0f)
? scalingTypeMatchOrientation
: scalingTypeMismatchOrientation;
final Point layoutSize = getDisplaySize(scalingType, frameAspect, maxWidth, maxHeight);
// If the measure specification is forcing a specific size - yield.
if (View.MeasureSpec.getMode(widthSpec) == View.MeasureSpec.EXACTLY) {
layoutSize.x = maxWidth;
}
if (View.MeasureSpec.getMode(heightSpec) == View.MeasureSpec.EXACTLY) {
layoutSize.y = maxHeight;
}
return layoutSize;
}
}
// Types of video scaling:
// SCALE_ASPECT_FIT - video frame is scaled to fit the size of the view by
// maintaining the aspect ratio (black borders may be displayed).
// SCALE_ASPECT_FILL - video frame is scaled to fill the size of the view by
// maintaining the aspect ratio. Some portion of the video frame may be
// clipped.
// SCALE_ASPECT_BALANCED - Compromise between FIT and FILL. Video frame will fill as much as
// possible of the view while maintaining aspect ratio, under the constraint that at least
// |BALANCED_VISIBLE_FRACTION| of the frame content will be shown.
public static enum ScalingType { SCALE_ASPECT_FIT, SCALE_ASPECT_FILL, SCALE_ASPECT_BALANCED }
// The minimum fraction of the frame content that will be shown for |SCALE_ASPECT_BALANCED|.
// This limits excessive cropping when adjusting display size.
private static float BALANCED_VISIBLE_FRACTION = 0.5625f;
// clang-format off
public static final float[] identityMatrix() {
return new float[] {
1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
0, 0, 0, 1};
}
// Matrix with transform y' = 1 - y.
public static final float[] verticalFlipMatrix() {
return new float[] {
1, 0, 0, 0,
0, -1, 0, 0,
0, 0, 1, 0,
0, 1, 0, 1};
}
// Matrix with transform x' = 1 - x.
public static final float[] horizontalFlipMatrix() {
return new float[] {
-1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
1, 0, 0, 1};
}
// clang-format on
/**
* Returns texture matrix that will have the effect of rotating the frame |rotationDegree|
* clockwise when rendered.
*/
public static float[] rotateTextureMatrix(float[] textureMatrix, float rotationDegree) {
final float[] rotationMatrix = new float[16];
Matrix.setRotateM(rotationMatrix, 0, rotationDegree, 0, 0, 1);
adjustOrigin(rotationMatrix);
return multiplyMatrices(textureMatrix, rotationMatrix);
}
/**
* Returns new matrix with the result of a * b.
*/
public static float[] multiplyMatrices(float[] a, float[] b) {
final float[] resultMatrix = new float[16];
Matrix.multiplyMM(resultMatrix, 0, a, 0, b, 0);
return resultMatrix;
}
/**
* Returns layout transformation matrix that applies an optional mirror effect and compensates
* for video vs display aspect ratio.
*/
public static float[] getLayoutMatrix(
boolean mirror, float videoAspectRatio, float displayAspectRatio) {
float scaleX = 1;
float scaleY = 1;
// Scale X or Y dimension so that video and display size have same aspect ratio.
if (displayAspectRatio > videoAspectRatio) {
scaleY = videoAspectRatio / displayAspectRatio;
} else {
scaleX = displayAspectRatio / videoAspectRatio;
}
// Apply optional horizontal flip.
if (mirror) {
scaleX *= -1;
}
final float matrix[] = new float[16];
Matrix.setIdentityM(matrix, 0);
Matrix.scaleM(matrix, 0, scaleX, scaleY, 1);
adjustOrigin(matrix);
return matrix;
}
/** Converts a float[16] matrix array to android.graphics.Matrix. */
public static android.graphics.Matrix convertMatrixToAndroidGraphicsMatrix(float[] matrix4x4) {
// clang-format off
float[] values = {
matrix4x4[0 * 4 + 0], matrix4x4[1 * 4 + 0], matrix4x4[3 * 4 + 0],
matrix4x4[0 * 4 + 1], matrix4x4[1 * 4 + 1], matrix4x4[3 * 4 + 1],
matrix4x4[0 * 4 + 3], matrix4x4[1 * 4 + 3], matrix4x4[3 * 4 + 3],
};
// clang-format on
android.graphics.Matrix matrix = new android.graphics.Matrix();
matrix.setValues(values);
return matrix;
}
/** Converts android.graphics.Matrix to a float[16] matrix array. */
public static float[] convertMatrixFromAndroidGraphicsMatrix(android.graphics.Matrix matrix) {
float[] values = new float[9];
matrix.getValues(values);
// The android.graphics.Matrix looks like this:
// [x1 y1 w1]
// [x2 y2 w2]
// [x3 y3 w3]
// We want to contruct a matrix that looks like this:
// [x1 y1 0 w1]
// [x2 y2 0 w2]
// [ 0 0 1 0]
// [x3 y3 0 w3]
// Since it is stored in column-major order, it looks like this:
// [x1 x2 0 x3
// y1 y2 0 y3
// 0 0 1 0
// w1 w2 0 w3]
// clang-format off
float[] matrix4x4 = {
values[0 * 3 + 0], values[1 * 3 + 0], 0, values[2 * 3 + 0],
values[0 * 3 + 1], values[1 * 3 + 1], 0, values[2 * 3 + 1],
0, 0, 1, 0,
values[0 * 3 + 2], values[1 * 3 + 2], 0, values[2 * 3 + 2],
};
// clang-format on
return matrix4x4;
}
/**
* Calculate display size based on scaling type, video aspect ratio, and maximum display size.
*/
public static Point getDisplaySize(
ScalingType scalingType, float videoAspectRatio, int maxDisplayWidth, int maxDisplayHeight) {
return getDisplaySize(convertScalingTypeToVisibleFraction(scalingType), videoAspectRatio,
maxDisplayWidth, maxDisplayHeight);
}
/**
* Move |matrix| transformation origin to (0.5, 0.5). This is the origin for texture coordinates
* that are in the range 0 to 1.
*/
private static void adjustOrigin(float[] matrix) {
// Note that OpenGL is using column-major order.
// Pre translate with -0.5 to move coordinates to range [-0.5, 0.5].
matrix[12] -= 0.5f * (matrix[0] + matrix[4]);
matrix[13] -= 0.5f * (matrix[1] + matrix[5]);
// Post translate with 0.5 to move coordinates to range [0, 1].
matrix[12] += 0.5f;
matrix[13] += 0.5f;
}
/**
* Each scaling type has a one-to-one correspondence to a numeric minimum fraction of the video
* that must remain visible.
*/
private static float convertScalingTypeToVisibleFraction(ScalingType scalingType) {
switch (scalingType) {
case SCALE_ASPECT_FIT:
return 1.0f;
case SCALE_ASPECT_FILL:
return 0.0f;
case SCALE_ASPECT_BALANCED:
return BALANCED_VISIBLE_FRACTION;
default:
throw new IllegalArgumentException();
}
}
/**
* Calculate display size based on minimum fraction of the video that must remain visible,
* video aspect ratio, and maximum display size.
*/
private static Point getDisplaySize(
float minVisibleFraction, float videoAspectRatio, int maxDisplayWidth, int maxDisplayHeight) {
// If there is no constraint on the amount of cropping, fill the allowed display area.
if (minVisibleFraction == 0 || videoAspectRatio == 0) {
return new Point(maxDisplayWidth, maxDisplayHeight);
}
// Each dimension is constrained on max display size and how much we are allowed to crop.
final int width = Math.min(
maxDisplayWidth, Math.round(maxDisplayHeight / minVisibleFraction * videoAspectRatio));
final int height = Math.min(
maxDisplayHeight, Math.round(maxDisplayWidth / minVisibleFraction / videoAspectRatio));
return new Point(width, height);
}
}

View File

@ -0,0 +1,61 @@
/*
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import java.util.LinkedList;
/**
* The parameters for an {@code RtpSender}, as defined in
* http://w3c.github.io/webrtc-pc/#rtcrtpsender-interface.
*
* Note: These structures use nullable Integer/etc. types because in the
* future, they may be used to construct ORTC RtpSender/RtpReceivers, in
* which case "null" will be used to represent "choose the implementation
* default value".
*/
public class RtpParameters {
public static class Encoding {
// Set to true to cause this encoding to be sent, and false for it not to
// be sent.
public boolean active = true;
// If non-null, this represents the Transport Independent Application
// Specific maximum bandwidth defined in RFC3890. If null, there is no
// maximum bitrate.
public Integer maxBitrateBps;
// SSRC to be used by this encoding.
// Can't be changed between getParameters/setParameters.
public Long ssrc;
}
public static class Codec {
// Payload type used to identify this codec in RTP packets.
public int payloadType;
// Name used to identify the codec. Equivalent to MIME subtype.
public String name;
// The media type of this codec. Equivalent to MIME top-level type.
MediaStreamTrack.MediaType kind;
// Clock rate in Hertz.
public Integer clockRate;
// The number of audio channels used. Set to null for video codecs.
public Integer numChannels;
}
public final LinkedList<Encoding> encodings;
// Codec parameters can't currently be changed between getParameters and
// setParameters. Though in the future it will be possible to reorder them or
// remove them.
public final LinkedList<Codec> codecs;
public RtpParameters() {
encodings = new LinkedList<Encoding>();
codecs = new LinkedList<Codec>();
}
}

View File

@ -0,0 +1,80 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/** Java wrapper for a C++ RtpReceiverInterface. */
public class RtpReceiver {
/** Java wrapper for a C++ RtpReceiverObserverInterface*/
public static interface Observer {
// Called when the first audio or video packet is received.
public void onFirstPacketReceived(MediaStreamTrack.MediaType media_type);
}
final long nativeRtpReceiver;
private long nativeObserver;
private MediaStreamTrack cachedTrack;
public RtpReceiver(long nativeRtpReceiver) {
this.nativeRtpReceiver = nativeRtpReceiver;
long track = nativeGetTrack(nativeRtpReceiver);
// We can assume that an RtpReceiver always has an associated track.
cachedTrack = new MediaStreamTrack(track);
}
public MediaStreamTrack track() {
return cachedTrack;
}
public boolean setParameters(RtpParameters parameters) {
return nativeSetParameters(nativeRtpReceiver, parameters);
}
public RtpParameters getParameters() {
return nativeGetParameters(nativeRtpReceiver);
}
public String id() {
return nativeId(nativeRtpReceiver);
}
public void dispose() {
cachedTrack.dispose();
if (nativeObserver != 0) {
nativeUnsetObserver(nativeRtpReceiver, nativeObserver);
nativeObserver = 0;
}
JniCommon.nativeReleaseRef(nativeRtpReceiver);
}
public void SetObserver(Observer observer) {
// Unset the existing one before setting a new one.
if (nativeObserver != 0) {
nativeUnsetObserver(nativeRtpReceiver, nativeObserver);
}
nativeObserver = nativeSetObserver(nativeRtpReceiver, observer);
}
// This should increment the reference count of the track.
// Will be released in dispose().
private static native long nativeGetTrack(long nativeRtpReceiver);
private static native boolean nativeSetParameters(
long nativeRtpReceiver, RtpParameters parameters);
private static native RtpParameters nativeGetParameters(long nativeRtpReceiver);
private static native String nativeId(long nativeRtpReceiver);
private static native long nativeSetObserver(long nativeRtpReceiver, Observer observer);
private static native long nativeUnsetObserver(long nativeRtpReceiver, long nativeObserver);
};

View File

@ -0,0 +1,103 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/** Java wrapper for a C++ RtpSenderInterface. */
public class RtpSender {
final long nativeRtpSender;
private MediaStreamTrack cachedTrack;
private boolean ownsTrack = true;
private final DtmfSender dtmfSender;
public RtpSender(long nativeRtpSender) {
this.nativeRtpSender = nativeRtpSender;
long track = nativeGetTrack(nativeRtpSender);
// It may be possible for an RtpSender to be created without a track.
cachedTrack = (track != 0) ? new MediaStreamTrack(track) : null;
long nativeDtmfSender = nativeGetDtmfSender(nativeRtpSender);
dtmfSender = (nativeDtmfSender != 0) ? new DtmfSender(nativeDtmfSender) : null;
}
/**
* Starts sending a new track, without requiring additional SDP negotiation.
* <p>
* Note: This is equivalent to replaceTrack in the official WebRTC API. It
* was just implemented before the standards group settled on a name.
*
* @param takeOwnership If true, the RtpSender takes ownership of the track
* from the caller, and will auto-dispose of it when no
* longer needed. |takeOwnership| should only be used if
* the caller owns the track; it is not appropriate when
* the track is owned by, for example, another RtpSender
* or a MediaStream.
* @return true on success and false on failure.
*/
public boolean setTrack(MediaStreamTrack track, boolean takeOwnership) {
if (!nativeSetTrack(nativeRtpSender, (track == null) ? 0 : track.nativeTrack)) {
return false;
}
if (cachedTrack != null && ownsTrack) {
cachedTrack.dispose();
}
cachedTrack = track;
ownsTrack = takeOwnership;
return true;
}
public MediaStreamTrack track() {
return cachedTrack;
}
public boolean setParameters(RtpParameters parameters) {
return nativeSetParameters(nativeRtpSender, parameters);
}
public RtpParameters getParameters() {
return nativeGetParameters(nativeRtpSender);
}
public String id() {
return nativeId(nativeRtpSender);
}
public DtmfSender dtmf() {
return dtmfSender;
}
public void dispose() {
if (dtmfSender != null) {
dtmfSender.dispose();
}
if (cachedTrack != null && ownsTrack) {
cachedTrack.dispose();
}
JniCommon.nativeReleaseRef(nativeRtpSender);
}
private static native boolean nativeSetTrack(long nativeRtpSender, long nativeTrack);
// This should increment the reference count of the track.
// Will be released in dispose() or setTrack().
private static native long nativeGetTrack(long nativeRtpSender);
// This should increment the reference count of the DTMF sender.
// Will be released in dispose().
private static native long nativeGetDtmfSender(long nativeRtpSender);
private static native boolean nativeSetParameters(long nativeRtpSender, RtpParameters parameters);
private static native RtpParameters nativeGetParameters(long nativeRtpSender);
private static native String nativeId(long nativeRtpSender);
};

View File

@ -0,0 +1,203 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.annotation.TargetApi;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.hardware.display.DisplayManager;
import android.hardware.display.VirtualDisplay;
import android.media.projection.MediaProjection;
import android.media.projection.MediaProjectionManager;
import android.view.Surface;
import java.util.ArrayList;
import java.util.List;
/**
* An implementation of VideoCapturer to capture the screen content as a video stream.
* Capturing is done by {@code MediaProjection} on a {@code SurfaceTexture}. We interact with this
* {@code SurfaceTexture} using a {@code SurfaceTextureHelper}.
* The {@code SurfaceTextureHelper} is created by the native code and passed to this capturer in
* {@code VideoCapturer.initialize()}. On receiving a new frame, this capturer passes it
* as a texture to the native code via {@code CapturerObserver.onTextureFrameCaptured()}. This takes
* place on the HandlerThread of the given {@code SurfaceTextureHelper}. When done with each frame,
* the native code returns the buffer to the {@code SurfaceTextureHelper} to be used for new
* frames. At any time, at most one frame is being processed.
*/
@TargetApi(21)
public class ScreenCapturerAndroid
implements VideoCapturer, SurfaceTextureHelper.OnTextureFrameAvailableListener {
private static final int DISPLAY_FLAGS =
DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC | DisplayManager.VIRTUAL_DISPLAY_FLAG_PRESENTATION;
// DPI for VirtualDisplay, does not seem to matter for us.
private static final int VIRTUAL_DISPLAY_DPI = 400;
private final Intent mediaProjectionPermissionResultData;
private final MediaProjection.Callback mediaProjectionCallback;
private int width;
private int height;
private VirtualDisplay virtualDisplay;
private SurfaceTextureHelper surfaceTextureHelper;
private CapturerObserver capturerObserver;
private long numCapturedFrames = 0;
private MediaProjection mediaProjection;
private boolean isDisposed = false;
private MediaProjectionManager mediaProjectionManager;
/**
* Constructs a new Screen Capturer.
*
* @param mediaProjectionPermissionResultData the result data of MediaProjection permission
* activity; the calling app must validate that result code is Activity.RESULT_OK before
* calling this method.
* @param mediaProjectionCallback MediaProjection callback to implement application specific
* logic in events such as when the user revokes a previously granted capture permission.
**/
public ScreenCapturerAndroid(Intent mediaProjectionPermissionResultData,
MediaProjection.Callback mediaProjectionCallback) {
this.mediaProjectionPermissionResultData = mediaProjectionPermissionResultData;
this.mediaProjectionCallback = mediaProjectionCallback;
}
private void checkNotDisposed() {
if (isDisposed) {
throw new RuntimeException("capturer is disposed.");
}
}
@Override
public synchronized void initialize(final SurfaceTextureHelper surfaceTextureHelper,
final Context applicationContext, final VideoCapturer.CapturerObserver capturerObserver) {
checkNotDisposed();
if (capturerObserver == null) {
throw new RuntimeException("capturerObserver not set.");
}
this.capturerObserver = capturerObserver;
if (surfaceTextureHelper == null) {
throw new RuntimeException("surfaceTextureHelper not set.");
}
this.surfaceTextureHelper = surfaceTextureHelper;
mediaProjectionManager = (MediaProjectionManager) applicationContext.getSystemService(
Context.MEDIA_PROJECTION_SERVICE);
}
@Override
public synchronized void startCapture(
final int width, final int height, final int ignoredFramerate) {
checkNotDisposed();
this.width = width;
this.height = height;
mediaProjection = mediaProjectionManager.getMediaProjection(
Activity.RESULT_OK, mediaProjectionPermissionResultData);
// Let MediaProjection callback use the SurfaceTextureHelper thread.
mediaProjection.registerCallback(mediaProjectionCallback, surfaceTextureHelper.getHandler());
createVirtualDisplay();
capturerObserver.onCapturerStarted(true);
surfaceTextureHelper.startListening(ScreenCapturerAndroid.this);
}
@Override
public synchronized void stopCapture() {
checkNotDisposed();
ThreadUtils.invokeAtFrontUninterruptibly(surfaceTextureHelper.getHandler(), new Runnable() {
@Override
public void run() {
surfaceTextureHelper.stopListening();
capturerObserver.onCapturerStopped();
if (virtualDisplay != null) {
virtualDisplay.release();
virtualDisplay = null;
}
if (mediaProjection != null) {
// Unregister the callback before stopping, otherwise the callback recursively
// calls this method.
mediaProjection.unregisterCallback(mediaProjectionCallback);
mediaProjection.stop();
mediaProjection = null;
}
}
});
}
@Override
public synchronized void dispose() {
isDisposed = true;
}
/**
* Changes output video format. This method can be used to scale the output
* video, or to change orientation when the captured screen is rotated for example.
*
* @param width new output video width
* @param height new output video height
* @param ignoredFramerate ignored
*/
@Override
public synchronized void changeCaptureFormat(
final int width, final int height, final int ignoredFramerate) {
checkNotDisposed();
this.width = width;
this.height = height;
if (virtualDisplay == null) {
// Capturer is stopped, the virtual display will be created in startCaptuer().
return;
}
// Create a new virtual display on the surfaceTextureHelper thread to avoid interference
// with frame processing, which happens on the same thread (we serialize events by running
// them on the same thread).
ThreadUtils.invokeAtFrontUninterruptibly(surfaceTextureHelper.getHandler(), new Runnable() {
@Override
public void run() {
virtualDisplay.release();
createVirtualDisplay();
}
});
}
private void createVirtualDisplay() {
surfaceTextureHelper.getSurfaceTexture().setDefaultBufferSize(width, height);
virtualDisplay = mediaProjection.createVirtualDisplay("WebRTC_ScreenCapture", width, height,
VIRTUAL_DISPLAY_DPI, DISPLAY_FLAGS, new Surface(surfaceTextureHelper.getSurfaceTexture()),
null /* callback */, null /* callback handler */);
}
// This is called on the internal looper thread of {@Code SurfaceTextureHelper}.
@Override
public void onTextureFrameAvailable(int oesTextureId, float[] transformMatrix, long timestampNs) {
numCapturedFrames++;
capturerObserver.onTextureFrameCaptured(
width, height, oesTextureId, transformMatrix, 0 /* rotation */, timestampNs);
}
@Override
public boolean isScreencast() {
return true;
}
public long getNumCapturedFrames() {
return numCapturedFrames;
}
}

View File

@ -0,0 +1,26 @@
/*
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/** Interface for observing SDP-related events. */
public interface SdpObserver {
/** Called on success of Create{Offer,Answer}(). */
public void onCreateSuccess(SessionDescription sdp);
/** Called on success of Set{Local,Remote}Description(). */
public void onSetSuccess();
/** Called on error of Create{Offer,Answer}(). */
public void onCreateFailure(String error);
/** Called on error of Set{Local,Remote}Description(). */
public void onSetFailure(String error);
}

View File

@ -0,0 +1,43 @@
/*
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import java.util.Locale;
/**
* Description of an RFC 4566 Session.
* SDPs are passed as serialized Strings in Java-land and are materialized
* to SessionDescriptionInterface as appropriate in the JNI layer.
*/
public class SessionDescription {
/** Java-land enum version of SessionDescriptionInterface's type() string. */
public static enum Type {
OFFER,
PRANSWER,
ANSWER;
public String canonicalForm() {
return name().toLowerCase(Locale.US);
}
public static Type fromCanonicalForm(String canonical) {
return Type.valueOf(Type.class, canonical.toUpperCase(Locale.US));
}
}
public final Type type;
public final String description;
public SessionDescription(Type type, String description) {
this.type = type;
this.description = description;
}
}

View File

@ -0,0 +1,17 @@
/*
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/** Interface for observing Stats reports (see webrtc::StatsObservers). */
public interface StatsObserver {
/** Called when the reports are ready.*/
public void onComplete(StatsReport[] reports);
}

View File

@ -0,0 +1,59 @@
/*
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/** Java version of webrtc::StatsReport. */
public class StatsReport {
/** Java version of webrtc::StatsReport::Value. */
public static class Value {
public final String name;
public final String value;
public Value(String name, String value) {
this.name = name;
this.value = value;
}
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("[").append(name).append(": ").append(value).append("]");
return builder.toString();
}
}
public final String id;
public final String type;
// Time since 1970-01-01T00:00:00Z in milliseconds.
public final double timestamp;
public final Value[] values;
public StatsReport(String id, String type, double timestamp, Value[] values) {
this.id = id;
this.type = type;
this.timestamp = timestamp;
this.values = values;
}
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("id: ")
.append(id)
.append(", type: ")
.append(type)
.append(", timestamp: ")
.append(timestamp)
.append(", values: ");
for (int i = 0; i < values.length; ++i) {
builder.append(values[i].toString()).append(", ");
}
return builder.toString();
}
}

View File

@ -0,0 +1,311 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.annotation.TargetApi;
import android.graphics.Matrix;
import android.graphics.SurfaceTexture;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.os.Build;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.SystemClock;
import java.nio.ByteBuffer;
import java.nio.FloatBuffer;
import java.util.concurrent.Callable;
import java.util.concurrent.TimeUnit;
import org.webrtc.VideoFrame.I420Buffer;
import org.webrtc.VideoFrame.TextureBuffer;
/**
* Helper class to create and synchronize access to a SurfaceTexture. The caller will get notified
* of new frames in onTextureFrameAvailable(), and should call returnTextureFrame() when done with
* the frame. Only one texture frame can be in flight at once, so returnTextureFrame() must be
* called in order to receive a new frame. Call stopListening() to stop receiveing new frames. Call
* dispose to release all resources once the texture frame is returned.
* Note that there is a C++ counter part of this class that optionally can be used. It is used for
* wrapping texture frames into webrtc::VideoFrames and also handles calling returnTextureFrame()
* when the webrtc::VideoFrame is no longer used.
*/
public class SurfaceTextureHelper {
private static final String TAG = "SurfaceTextureHelper";
/**
* Callback interface for being notified that a new texture frame is available. The calls will be
* made on the SurfaceTextureHelper handler thread, with a bound EGLContext. The callee is not
* allowed to make another EGLContext current on the calling thread.
*/
public interface OnTextureFrameAvailableListener {
abstract void onTextureFrameAvailable(
int oesTextureId, float[] transformMatrix, long timestampNs);
}
/**
* Construct a new SurfaceTextureHelper sharing OpenGL resources with |sharedContext|. A dedicated
* thread and handler is created for handling the SurfaceTexture. May return null if EGL fails to
* initialize a pixel buffer surface and make it current.
*/
public static SurfaceTextureHelper create(
final String threadName, final EglBase.Context sharedContext) {
final HandlerThread thread = new HandlerThread(threadName);
thread.start();
final Handler handler = new Handler(thread.getLooper());
// The onFrameAvailable() callback will be executed on the SurfaceTexture ctor thread. See:
// http://grepcode.com/file/repository.grepcode.com/java/ext/com.google.android/android/5.1.1_r1/android/graphics/SurfaceTexture.java#195.
// Therefore, in order to control the callback thread on API lvl < 21, the SurfaceTextureHelper
// is constructed on the |handler| thread.
return ThreadUtils.invokeAtFrontUninterruptibly(handler, new Callable<SurfaceTextureHelper>() {
@Override
public SurfaceTextureHelper call() {
try {
return new SurfaceTextureHelper(sharedContext, handler);
} catch (RuntimeException e) {
Logging.e(TAG, threadName + " create failure", e);
return null;
}
}
});
}
private final Handler handler;
private final EglBase eglBase;
private final SurfaceTexture surfaceTexture;
private final int oesTextureId;
private YuvConverter yuvConverter;
// These variables are only accessed from the |handler| thread.
private OnTextureFrameAvailableListener listener;
// The possible states of this class.
private boolean hasPendingTexture = false;
private volatile boolean isTextureInUse = false;
private boolean isQuitting = false;
// |pendingListener| is set in setListener() and the runnable is posted to the handler thread.
// setListener() is not allowed to be called again before stopListening(), so this is thread safe.
private OnTextureFrameAvailableListener pendingListener;
final Runnable setListenerRunnable = new Runnable() {
@Override
public void run() {
Logging.d(TAG, "Setting listener to " + pendingListener);
listener = pendingListener;
pendingListener = null;
// May have a pending frame from the previous capture session - drop it.
if (hasPendingTexture) {
// Calling updateTexImage() is neccessary in order to receive new frames.
updateTexImage();
hasPendingTexture = false;
}
}
};
private SurfaceTextureHelper(EglBase.Context sharedContext, Handler handler) {
if (handler.getLooper().getThread() != Thread.currentThread()) {
throw new IllegalStateException("SurfaceTextureHelper must be created on the handler thread");
}
this.handler = handler;
eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PIXEL_BUFFER);
try {
// Both these statements have been observed to fail on rare occasions, see BUG=webrtc:5682.
eglBase.createDummyPbufferSurface();
eglBase.makeCurrent();
} catch (RuntimeException e) {
// Clean up before rethrowing the exception.
eglBase.release();
handler.getLooper().quit();
throw e;
}
oesTextureId = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
surfaceTexture = new SurfaceTexture(oesTextureId);
setOnFrameAvailableListener(surfaceTexture, (SurfaceTexture st) -> {
hasPendingTexture = true;
tryDeliverTextureFrame();
}, handler);
}
@TargetApi(21)
private static void setOnFrameAvailableListener(SurfaceTexture surfaceTexture,
SurfaceTexture.OnFrameAvailableListener listener, Handler handler) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
surfaceTexture.setOnFrameAvailableListener(listener, handler);
} else {
// The documentation states that the listener will be called on an arbitrary thread, but in
// pratice, it is always the thread on which the SurfaceTexture was constructed. There are
// assertions in place in case this ever changes. For API >= 21, we use the new API to
// explicitly specify the handler.
surfaceTexture.setOnFrameAvailableListener(listener);
}
}
/**
* Start to stream textures to the given |listener|. If you need to change listener, you need to
* call stopListening() first.
*/
public void startListening(final OnTextureFrameAvailableListener listener) {
if (this.listener != null || this.pendingListener != null) {
throw new IllegalStateException("SurfaceTextureHelper listener has already been set.");
}
this.pendingListener = listener;
handler.post(setListenerRunnable);
}
/**
* Stop listening. The listener set in startListening() is guaranteded to not receive any more
* onTextureFrameAvailable() callbacks after this function returns.
*/
public void stopListening() {
Logging.d(TAG, "stopListening()");
handler.removeCallbacks(setListenerRunnable);
ThreadUtils.invokeAtFrontUninterruptibly(handler, new Runnable() {
@Override
public void run() {
listener = null;
pendingListener = null;
}
});
}
/**
* Retrieve the underlying SurfaceTexture. The SurfaceTexture should be passed in to a video
* producer such as a camera or decoder.
*/
public SurfaceTexture getSurfaceTexture() {
return surfaceTexture;
}
/**
* Retrieve the handler that calls onTextureFrameAvailable(). This handler is valid until
* dispose() is called.
*/
public Handler getHandler() {
return handler;
}
/**
* Call this function to signal that you are done with the frame received in
* onTextureFrameAvailable(). Only one texture frame can be in flight at once, so you must call
* this function in order to receive a new frame.
*/
public void returnTextureFrame() {
handler.post(new Runnable() {
@Override
public void run() {
isTextureInUse = false;
if (isQuitting) {
release();
} else {
tryDeliverTextureFrame();
}
}
});
}
public boolean isTextureInUse() {
return isTextureInUse;
}
/**
* Call disconnect() to stop receiving frames. OpenGL resources are released and the handler is
* stopped when the texture frame has been returned by a call to returnTextureFrame(). You are
* guaranteed to not receive any more onTextureFrameAvailable() after this function returns.
*/
public void dispose() {
Logging.d(TAG, "dispose()");
ThreadUtils.invokeAtFrontUninterruptibly(handler, new Runnable() {
@Override
public void run() {
isQuitting = true;
if (!isTextureInUse) {
release();
}
}
});
}
public void textureToYUV(final ByteBuffer buf, final int width, final int height,
final int stride, final int textureId, final float[] transformMatrix) {
if (textureId != oesTextureId) {
throw new IllegalStateException("textureToByteBuffer called with unexpected textureId");
}
ThreadUtils.invokeAtFrontUninterruptibly(handler, new Runnable() {
@Override
public void run() {
if (yuvConverter == null) {
yuvConverter = new YuvConverter();
}
yuvConverter.convert(buf, width, height, stride, textureId, transformMatrix);
}
});
}
private void updateTexImage() {
// SurfaceTexture.updateTexImage apparently can compete and deadlock with eglSwapBuffers,
// as observed on Nexus 5. Therefore, synchronize it with the EGL functions.
// See https://bugs.chromium.org/p/webrtc/issues/detail?id=5702 for more info.
synchronized (EglBase.lock) {
surfaceTexture.updateTexImage();
}
}
private void tryDeliverTextureFrame() {
if (handler.getLooper().getThread() != Thread.currentThread()) {
throw new IllegalStateException("Wrong thread.");
}
if (isQuitting || !hasPendingTexture || isTextureInUse || listener == null) {
return;
}
isTextureInUse = true;
hasPendingTexture = false;
updateTexImage();
final float[] transformMatrix = new float[16];
surfaceTexture.getTransformMatrix(transformMatrix);
final long timestampNs = surfaceTexture.getTimestamp();
listener.onTextureFrameAvailable(oesTextureId, transformMatrix, timestampNs);
}
private void release() {
if (handler.getLooper().getThread() != Thread.currentThread()) {
throw new IllegalStateException("Wrong thread.");
}
if (isTextureInUse || !isQuitting) {
throw new IllegalStateException("Unexpected release.");
}
if (yuvConverter != null) {
yuvConverter.release();
}
GLES20.glDeleteTextures(1, new int[] {oesTextureId}, 0);
surfaceTexture.release();
eglBase.release();
handler.getLooper().quit();
}
/**
* Creates a VideoFrame buffer backed by this helper's texture. The |width| and |height| should
* match the dimensions of the data placed in the texture. The correct |transformMatrix| may be
* obtained from callbacks to OnTextureFrameAvailableListener.
*
* The returned TextureBuffer holds a reference to the SurfaceTextureHelper that created it. The
* buffer calls returnTextureFrame() when it is released.
*/
public TextureBuffer createTextureBuffer(int width, int height, Matrix transformMatrix) {
return new TextureBufferImpl(
width, height, TextureBuffer.Type.OES, oesTextureId, transformMatrix, this, new Runnable() {
@Override
public void run() {
returnTextureFrame();
}
});
}
}

View File

@ -0,0 +1,385 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.content.Context;
import android.content.res.Resources.NotFoundException;
import android.graphics.Point;
import android.util.AttributeSet;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import java.util.concurrent.CountDownLatch;
/**
* Implements org.webrtc.VideoRenderer.Callbacks by displaying the video stream on a SurfaceView.
* renderFrame() is asynchronous to avoid blocking the calling thread.
* This class is thread safe and handles access from potentially four different threads:
* Interaction from the main app in init, release, setMirror, and setScalingtype.
* Interaction from C++ rtc::VideoSinkInterface in renderFrame.
* Interaction from the Activity lifecycle in surfaceCreated, surfaceChanged, and surfaceDestroyed.
* Interaction with the layout framework in onMeasure and onSizeChanged.
*/
public class SurfaceViewRenderer
extends SurfaceView implements SurfaceHolder.Callback, VideoRenderer.Callbacks, VideoSink {
private static final String TAG = "SurfaceViewRenderer";
// Cached resource name.
private final String resourceName;
private final RendererCommon.VideoLayoutMeasure videoLayoutMeasure =
new RendererCommon.VideoLayoutMeasure();
private final EglRenderer eglRenderer;
// Callback for reporting renderer events. Read-only after initilization so no lock required.
private RendererCommon.RendererEvents rendererEvents;
private final Object layoutLock = new Object();
private boolean isRenderingPaused = false;
private boolean isFirstFrameRendered;
private int rotatedFrameWidth;
private int rotatedFrameHeight;
private int frameRotation;
// Accessed only on the main thread.
private boolean enableFixedSize;
private int surfaceWidth;
private int surfaceHeight;
/**
* Standard View constructor. In order to render something, you must first call init().
*/
public SurfaceViewRenderer(Context context) {
super(context);
this.resourceName = getResourceName();
eglRenderer = new EglRenderer(resourceName);
getHolder().addCallback(this);
}
/**
* Standard View constructor. In order to render something, you must first call init().
*/
public SurfaceViewRenderer(Context context, AttributeSet attrs) {
super(context, attrs);
this.resourceName = getResourceName();
eglRenderer = new EglRenderer(resourceName);
getHolder().addCallback(this);
}
/**
* Initialize this class, sharing resources with |sharedContext|. It is allowed to call init() to
* reinitialize the renderer after a previous init()/release() cycle.
*/
public void init(EglBase.Context sharedContext, RendererCommon.RendererEvents rendererEvents) {
init(sharedContext, rendererEvents, EglBase.CONFIG_PLAIN, new GlRectDrawer());
}
/**
* Initialize this class, sharing resources with |sharedContext|. The custom |drawer| will be used
* for drawing frames on the EGLSurface. This class is responsible for calling release() on
* |drawer|. It is allowed to call init() to reinitialize the renderer after a previous
* init()/release() cycle.
*/
public void init(final EglBase.Context sharedContext,
RendererCommon.RendererEvents rendererEvents, final int[] configAttributes,
RendererCommon.GlDrawer drawer) {
ThreadUtils.checkIsOnMainThread();
this.rendererEvents = rendererEvents;
synchronized (layoutLock) {
isFirstFrameRendered = false;
rotatedFrameWidth = 0;
rotatedFrameHeight = 0;
frameRotation = 0;
}
eglRenderer.init(sharedContext, configAttributes, drawer);
}
/**
* Block until any pending frame is returned and all GL resources released, even if an interrupt
* occurs. If an interrupt occurs during release(), the interrupt flag will be set. This function
* should be called before the Activity is destroyed and the EGLContext is still valid. If you
* don't call this function, the GL resources might leak.
*/
public void release() {
eglRenderer.release();
}
/**
* Register a callback to be invoked when a new video frame has been received.
*
* @param listener The callback to be invoked. The callback will be invoked on the render thread.
* It should be lightweight and must not call removeFrameListener.
* @param scale The scale of the Bitmap passed to the callback, or 0 if no Bitmap is
* required.
* @param drawer Custom drawer to use for this frame listener.
*/
public void addFrameListener(
EglRenderer.FrameListener listener, float scale, RendererCommon.GlDrawer drawerParam) {
eglRenderer.addFrameListener(listener, scale, drawerParam);
}
/**
* Register a callback to be invoked when a new video frame has been received. This version uses
* the drawer of the EglRenderer that was passed in init.
*
* @param listener The callback to be invoked. The callback will be invoked on the render thread.
* It should be lightweight and must not call removeFrameListener.
* @param scale The scale of the Bitmap passed to the callback, or 0 if no Bitmap is
* required.
*/
public void addFrameListener(EglRenderer.FrameListener listener, float scale) {
eglRenderer.addFrameListener(listener, scale);
}
public void removeFrameListener(EglRenderer.FrameListener listener) {
eglRenderer.removeFrameListener(listener);
}
/**
* Enables fixed size for the surface. This provides better performance but might be buggy on some
* devices. By default this is turned off.
*/
public void setEnableHardwareScaler(boolean enabled) {
ThreadUtils.checkIsOnMainThread();
enableFixedSize = enabled;
updateSurfaceSize();
}
/**
* Set if the video stream should be mirrored or not.
*/
public void setMirror(final boolean mirror) {
eglRenderer.setMirror(mirror);
}
/**
* Set how the video will fill the allowed layout area.
*/
public void setScalingType(RendererCommon.ScalingType scalingType) {
ThreadUtils.checkIsOnMainThread();
videoLayoutMeasure.setScalingType(scalingType);
requestLayout();
}
public void setScalingType(RendererCommon.ScalingType scalingTypeMatchOrientation,
RendererCommon.ScalingType scalingTypeMismatchOrientation) {
ThreadUtils.checkIsOnMainThread();
videoLayoutMeasure.setScalingType(scalingTypeMatchOrientation, scalingTypeMismatchOrientation);
requestLayout();
}
/**
* Limit render framerate.
*
* @param fps Limit render framerate to this value, or use Float.POSITIVE_INFINITY to disable fps
* reduction.
*/
public void setFpsReduction(float fps) {
synchronized (layoutLock) {
isRenderingPaused = fps == 0f;
}
eglRenderer.setFpsReduction(fps);
}
public void disableFpsReduction() {
synchronized (layoutLock) {
isRenderingPaused = false;
}
eglRenderer.disableFpsReduction();
}
public void pauseVideo() {
synchronized (layoutLock) {
isRenderingPaused = true;
}
eglRenderer.pauseVideo();
}
// VideoRenderer.Callbacks interface.
@Override
public void renderFrame(VideoRenderer.I420Frame frame) {
updateFrameDimensionsAndReportEvents(frame);
eglRenderer.renderFrame(frame);
}
// VideoSink interface.
@Override
public void onFrame(VideoFrame frame) {
updateFrameDimensionsAndReportEvents(frame);
eglRenderer.onFrame(frame);
}
// View layout interface.
@Override
protected void onMeasure(int widthSpec, int heightSpec) {
ThreadUtils.checkIsOnMainThread();
final Point size;
synchronized (layoutLock) {
size =
videoLayoutMeasure.measure(widthSpec, heightSpec, rotatedFrameWidth, rotatedFrameHeight);
}
setMeasuredDimension(size.x, size.y);
logD("onMeasure(). New size: " + size.x + "x" + size.y);
}
@Override
protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
ThreadUtils.checkIsOnMainThread();
eglRenderer.setLayoutAspectRatio((right - left) / (float) (bottom - top));
updateSurfaceSize();
}
private void updateSurfaceSize() {
ThreadUtils.checkIsOnMainThread();
synchronized (layoutLock) {
if (enableFixedSize && rotatedFrameWidth != 0 && rotatedFrameHeight != 0 && getWidth() != 0
&& getHeight() != 0) {
final float layoutAspectRatio = getWidth() / (float) getHeight();
final float frameAspectRatio = rotatedFrameWidth / (float) rotatedFrameHeight;
final int drawnFrameWidth;
final int drawnFrameHeight;
if (frameAspectRatio > layoutAspectRatio) {
drawnFrameWidth = (int) (rotatedFrameHeight * layoutAspectRatio);
drawnFrameHeight = rotatedFrameHeight;
} else {
drawnFrameWidth = rotatedFrameWidth;
drawnFrameHeight = (int) (rotatedFrameWidth / layoutAspectRatio);
}
// Aspect ratio of the drawn frame and the view is the same.
final int width = Math.min(getWidth(), drawnFrameWidth);
final int height = Math.min(getHeight(), drawnFrameHeight);
logD("updateSurfaceSize. Layout size: " + getWidth() + "x" + getHeight() + ", frame size: "
+ rotatedFrameWidth + "x" + rotatedFrameHeight + ", requested surface size: " + width
+ "x" + height + ", old surface size: " + surfaceWidth + "x" + surfaceHeight);
if (width != surfaceWidth || height != surfaceHeight) {
surfaceWidth = width;
surfaceHeight = height;
getHolder().setFixedSize(width, height);
}
} else {
surfaceWidth = surfaceHeight = 0;
getHolder().setSizeFromLayout();
}
}
}
// SurfaceHolder.Callback interface.
@Override
public void surfaceCreated(final SurfaceHolder holder) {
ThreadUtils.checkIsOnMainThread();
eglRenderer.createEglSurface(holder.getSurface());
surfaceWidth = surfaceHeight = 0;
updateSurfaceSize();
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
ThreadUtils.checkIsOnMainThread();
final CountDownLatch completionLatch = new CountDownLatch(1);
eglRenderer.releaseEglSurface(new Runnable() {
@Override
public void run() {
completionLatch.countDown();
}
});
ThreadUtils.awaitUninterruptibly(completionLatch);
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
ThreadUtils.checkIsOnMainThread();
logD("surfaceChanged: format: " + format + " size: " + width + "x" + height);
}
private String getResourceName() {
try {
return getResources().getResourceEntryName(getId()) + ": ";
} catch (NotFoundException e) {
return "";
}
}
/**
* Post a task to clear the SurfaceView to a transparent uniform color.
*/
public void clearImage() {
eglRenderer.clearImage();
}
// Update frame dimensions and report any changes to |rendererEvents|.
private void updateFrameDimensionsAndReportEvents(VideoRenderer.I420Frame frame) {
synchronized (layoutLock) {
if (isRenderingPaused) {
return;
}
if (!isFirstFrameRendered) {
isFirstFrameRendered = true;
logD("Reporting first rendered frame.");
if (rendererEvents != null) {
rendererEvents.onFirstFrameRendered();
}
}
if (rotatedFrameWidth != frame.rotatedWidth() || rotatedFrameHeight != frame.rotatedHeight()
|| frameRotation != frame.rotationDegree) {
logD("Reporting frame resolution changed to " + frame.width + "x" + frame.height
+ " with rotation " + frame.rotationDegree);
if (rendererEvents != null) {
rendererEvents.onFrameResolutionChanged(frame.width, frame.height, frame.rotationDegree);
}
rotatedFrameWidth = frame.rotatedWidth();
rotatedFrameHeight = frame.rotatedHeight();
frameRotation = frame.rotationDegree;
post(new Runnable() {
@Override
public void run() {
updateSurfaceSize();
requestLayout();
}
});
}
}
}
// Update frame dimensions and report any changes to |rendererEvents|.
private void updateFrameDimensionsAndReportEvents(VideoFrame frame) {
synchronized (layoutLock) {
if (isRenderingPaused) {
return;
}
if (!isFirstFrameRendered) {
isFirstFrameRendered = true;
logD("Reporting first rendered frame.");
if (rendererEvents != null) {
rendererEvents.onFirstFrameRendered();
}
}
if (rotatedFrameWidth != frame.getRotatedWidth()
|| rotatedFrameHeight != frame.getRotatedHeight()
|| frameRotation != frame.getRotation()) {
logD("Reporting frame resolution changed to " + frame.getBuffer().getWidth() + "x"
+ frame.getBuffer().getHeight() + " with rotation " + frame.getRotation());
if (rendererEvents != null) {
rendererEvents.onFrameResolutionChanged(
frame.getBuffer().getWidth(), frame.getBuffer().getHeight(), frame.getRotation());
}
rotatedFrameWidth = frame.getRotatedWidth();
rotatedFrameHeight = frame.getRotatedHeight();
frameRotation = frame.getRotation();
post(() -> {
updateSurfaceSize();
requestLayout();
});
}
}
}
private void logD(String string) {
Logging.d(TAG, resourceName + string);
}
}

View File

@ -0,0 +1,71 @@
/*
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.content.Context;
import java.util.List;
// Base interface for all VideoCapturers to implement.
public interface VideoCapturer {
// Interface used for providing callbacks to an observer.
public interface CapturerObserver {
// Notify if the camera have been started successfully or not.
// Called on a Java thread owned by VideoCapturer.
void onCapturerStarted(boolean success);
void onCapturerStopped();
// Delivers a captured frame. Called on a Java thread owned by VideoCapturer.
void onByteBufferFrameCaptured(
byte[] data, int width, int height, int rotation, long timeStamp);
// Delivers a captured frame in a texture with id |oesTextureId|. Called on a Java thread
// owned by VideoCapturer.
void onTextureFrameCaptured(int width, int height, int oesTextureId, float[] transformMatrix,
int rotation, long timestamp);
// Delivers a captured frame. Called on a Java thread owned by VideoCapturer.
void onFrameCaptured(VideoFrame frame);
}
/**
* This function is used to initialize the camera thread, the android application context, and the
* capture observer. It will be called only once and before any startCapture() request. The
* camera thread is guaranteed to be valid until dispose() is called. If the VideoCapturer wants
* to deliver texture frames, it should do this by rendering on the SurfaceTexture in
* |surfaceTextureHelper|, register itself as a listener, and forward the texture frames to
* CapturerObserver.onTextureFrameCaptured().
*/
void initialize(SurfaceTextureHelper surfaceTextureHelper, Context applicationContext,
CapturerObserver capturerObserver);
/**
* Start capturing frames in a format that is as close as possible to |width| x |height| and
* |framerate|.
*/
void startCapture(int width, int height, int framerate);
/**
* Stop capturing. This function should block until capture is actually stopped.
*/
void stopCapture() throws InterruptedException;
void changeCaptureFormat(int width, int height, int framerate);
/**
* Perform any final cleanup here. No more capturing will be done after this call.
*/
void dispose();
/**
* @return true if-and-only-if this is a screen capturer.
*/
boolean isScreencast();
}

View File

@ -0,0 +1,41 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import java.util.Map;
/**
* Represent a video codec as encoded in SDP.
*/
public class VideoCodecInfo {
// Keys for H264 VideoCodecInfo properties.
public static final String H264_FMTP_PROFILE_LEVEL_ID = "profile-level-id";
public static final String H264_FMTP_LEVEL_ASYMMETRY_ALLOWED = "level-asymmetry-allowed";
public static final String H264_FMTP_PACKETIZATION_MODE = "packetization-mode";
public static final String H264_PROFILE_CONSTRAINED_BASELINE = "4200";
public static final String H264_PROFILE_CONSTRAINED_HIGH = "640c";
public static final String H264_LEVEL_3_1 = "1f"; // 31 in hex.
public static final String H264_CONSTRAINED_HIGH_3_1 =
H264_PROFILE_CONSTRAINED_HIGH + H264_LEVEL_3_1;
public static final String H264_CONSTRAINED_BASELINE_3_1 =
H264_PROFILE_CONSTRAINED_BASELINE + H264_LEVEL_3_1;
public final int payload;
public final String name;
public final Map<String, String> params;
public VideoCodecInfo(int payload, String name, Map<String, String> params) {
this.payload = payload;
this.name = name;
this.params = params;
}
}

View File

@ -0,0 +1,41 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/**
* Status codes reported by video encoding/decoding components. This should be kept in sync with
* video_error_codes.h.
*/
public enum VideoCodecStatus {
REQUEST_SLI(2),
NO_OUTPUT(1),
OK(0),
ERROR(-1),
LEVEL_EXCEEDED(-2),
MEMORY(-3),
ERR_PARAMETER(-4),
ERR_SIZE(-5),
TIMEOUT(-6),
UNINITIALIZED(-7),
ERR_REQUEST_SLI(-12),
FALLBACK_SOFTWARE(-13),
TARGET_BITRATE_OVERSHOOT(-14);
private final int number;
private VideoCodecStatus(int number) {
this.number = number;
}
public int getNumber() {
return number;
}
}

View File

@ -0,0 +1,73 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/**
* Interface for a video decoder that can be used in WebRTC. All calls to the class will be made on
* a single decoding thread.
*/
public interface VideoDecoder {
/** Settings passed to the decoder by WebRTC. */
public class Settings {
public final int numberOfCores;
public final int width;
public final int height;
public Settings(int numberOfCores, int width, int height) {
this.numberOfCores = numberOfCores;
this.width = width;
this.height = height;
}
}
/** Additional info for decoding. */
public class DecodeInfo {
public final boolean isMissingFrames;
public final long renderTimeMs;
public DecodeInfo(boolean isMissingFrames, long renderTimeMs) {
this.isMissingFrames = isMissingFrames;
this.renderTimeMs = renderTimeMs;
}
}
public interface Callback {
/**
* Call to return a decoded frame. Can be called on any thread.
*
* @param frame Decoded frame
* @param decodeTimeMs Time it took to decode the frame in milliseconds or null if not available
* @param qp QP value of the decoded frame or null if not available
*/
void onDecodedFrame(VideoFrame frame, Integer decodeTimeMs, Integer qp);
}
/**
* Initializes the decoding process with specified settings. Will be called on the decoding thread
* before any decode calls.
*/
VideoCodecStatus initDecode(Settings settings, Callback decodeCallback);
/**
* Called when the decoder is no longer needed. Any more calls to decode will not be made.
*/
VideoCodecStatus release();
/**
* Request the decoder to decode a frame.
*/
VideoCodecStatus decode(EncodedImage frame, DecodeInfo info);
/**
* The decoder should return true if it prefers late decoding. That is, it can not decode
* infinite number of frames before the decoded frame is consumed.
*/
boolean getPrefersLateDecoding();
/** Should return a descriptive name for the implementation. */
String getImplementationName();
}

View File

@ -0,0 +1,20 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/** Factory for creating VideoDecoders. */
public interface VideoDecoderFactory {
/**
* Creates a VideoDecoder for the given codec. Supports the same codecs supported by
* VideoEncoderFactory.
*/
public VideoDecoder createDecoder(String codecType);
}

View File

@ -0,0 +1,148 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/**
* Interface for a video encoder that can be used with WebRTC. All calls will be made on the
* encoding thread.
*/
public interface VideoEncoder {
/** Settings passed to the encoder by WebRTC. */
public class Settings {
public final int numberOfCores;
public final int width;
public final int height;
public final int startBitrate; // Kilobits per second.
public final int maxFramerate;
public final boolean automaticResizeOn;
public Settings(int numberOfCores, int width, int height, int startBitrate, int maxFramerate,
boolean automaticResizeOn) {
this.numberOfCores = numberOfCores;
this.width = width;
this.height = height;
this.startBitrate = startBitrate;
this.maxFramerate = maxFramerate;
this.automaticResizeOn = automaticResizeOn;
}
}
/** Additional info for encoding. */
public class EncodeInfo {
public final EncodedImage.FrameType[] frameTypes;
public EncodeInfo(EncodedImage.FrameType[] frameTypes) {
this.frameTypes = frameTypes;
}
}
// TODO(sakal): Add values to these classes as necessary.
/** Codec specific information about the encoded frame. */
public class CodecSpecificInfo {}
public class CodecSpecificInfoVP8 extends CodecSpecificInfo {}
public class CodecSpecificInfoVP9 extends CodecSpecificInfo {}
public class CodecSpecificInfoH264 extends CodecSpecificInfo {}
/**
* Represents bitrate allocated for an encoder to produce frames. Bitrate can be divided between
* spatial and temporal layers.
*/
public class BitrateAllocation {
// First index is the spatial layer and second the temporal layer.
public final int[][] bitratesBbs;
/**
* Initializes the allocation with a two dimensional array of bitrates. The first index of the
* array is the spatial layer and the second index in the temporal layer.
*/
public BitrateAllocation(int[][] bitratesBbs) {
this.bitratesBbs = bitratesBbs;
}
/**
* Gets the total bitrate allocated for all layers.
*/
public int getSum() {
int sum = 0;
for (int[] spatialLayer : bitratesBbs) {
for (int bitrate : spatialLayer) {
sum += bitrate;
}
}
return sum;
}
}
/** Settings for WebRTC quality based scaling. */
public class ScalingSettings {
public final boolean on;
public final Integer low;
public final Integer high;
/**
* Creates quality based scaling setting.
*
* @param on True if quality scaling is turned on.
*/
public ScalingSettings(boolean on) {
this.on = on;
this.low = null;
this.high = null;
}
/**
* Creates quality based scaling settings with custom thresholds.
*
* @param on True if quality scaling is turned on.
* @param low Average QP at which to scale up the resolution.
* @param high Average QP at which to scale down the resolution.
*/
public ScalingSettings(boolean on, int low, int high) {
this.on = on;
this.low = low;
this.high = high;
}
}
public interface Callback {
/** Call to return an encoded frame. */
void onEncodedFrame(EncodedImage frame, CodecSpecificInfo info);
}
/**
* Initializes the encoding process. Call before any calls to encode.
*/
VideoCodecStatus initEncode(Settings settings, Callback encodeCallback);
/**
* Releases the encoder. No more calls to encode will be made after this call.
*/
VideoCodecStatus release();
/**
* Requests the encoder to encode a frame.
*/
VideoCodecStatus encode(VideoFrame frame, EncodeInfo info);
/**
* Informs the encoder of the packet loss and the round-trip time of the network.
*
* @param packetLoss How many packets are lost on average per 255 packets.
* @param roundTripTimeMs Round-trip time of the network in milliseconds.
*/
VideoCodecStatus setChannelParameters(short packetLoss, long roundTripTimeMs);
/** Sets the bitrate allocation and the target framerate for the encoder. */
VideoCodecStatus setRateAllocation(BitrateAllocation allocation, int framerate);
/** Any encoder that wants to use WebRTC provided quality scaler must implement this method. */
ScalingSettings getScalingSettings();
/** Should return a descriptive name for the implementation. Gets called once and cached. */
String getImplementationName();
}

View File

@ -0,0 +1,23 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/** Factory for creating VideoEncoders. */
public interface VideoEncoderFactory {
/** Creates an encoder for the given video codec. */
public VideoEncoder createEncoder(VideoCodecInfo info);
/**
* Enumerates the list of supported video codecs. This method will only be called once and the
* result will be cached.
*/
public VideoCodecInfo[] getSupportedCodecs();
}

View File

@ -0,0 +1,174 @@
/*
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.os.Handler;
import android.os.HandlerThread;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.concurrent.CountDownLatch;
import java.util.ArrayList;
/**
* Can be used to save the video frames to file.
*/
public class VideoFileRenderer implements VideoRenderer.Callbacks {
static {
System.loadLibrary("jingle_peerconnection_so");
}
private static final String TAG = "VideoFileRenderer";
private final HandlerThread renderThread;
private final Object handlerLock = new Object();
private final Handler renderThreadHandler;
private final FileOutputStream videoOutFile;
private final String outputFileName;
private final int outputFileWidth;
private final int outputFileHeight;
private final int outputFrameSize;
private final ByteBuffer outputFrameBuffer;
private EglBase eglBase;
private YuvConverter yuvConverter;
private ArrayList<ByteBuffer> rawFrames = new ArrayList<>();
public VideoFileRenderer(String outputFile, int outputFileWidth, int outputFileHeight,
final EglBase.Context sharedContext) throws IOException {
if ((outputFileWidth % 2) == 1 || (outputFileHeight % 2) == 1) {
throw new IllegalArgumentException("Does not support uneven width or height");
}
this.outputFileName = outputFile;
this.outputFileWidth = outputFileWidth;
this.outputFileHeight = outputFileHeight;
outputFrameSize = outputFileWidth * outputFileHeight * 3 / 2;
outputFrameBuffer = ByteBuffer.allocateDirect(outputFrameSize);
videoOutFile = new FileOutputStream(outputFile);
videoOutFile.write(
("YUV4MPEG2 C420 W" + outputFileWidth + " H" + outputFileHeight + " Ip F30:1 A1:1\n")
.getBytes());
renderThread = new HandlerThread(TAG);
renderThread.start();
renderThreadHandler = new Handler(renderThread.getLooper());
ThreadUtils.invokeAtFrontUninterruptibly(renderThreadHandler, new Runnable() {
@Override
public void run() {
eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PIXEL_BUFFER);
eglBase.createDummyPbufferSurface();
eglBase.makeCurrent();
yuvConverter = new YuvConverter();
}
});
}
@Override
public void renderFrame(final VideoRenderer.I420Frame frame) {
renderThreadHandler.post(new Runnable() {
@Override
public void run() {
renderFrameOnRenderThread(frame);
}
});
}
private void renderFrameOnRenderThread(VideoRenderer.I420Frame frame) {
final float frameAspectRatio = (float) frame.rotatedWidth() / (float) frame.rotatedHeight();
final float[] rotatedSamplingMatrix =
RendererCommon.rotateTextureMatrix(frame.samplingMatrix, frame.rotationDegree);
final float[] layoutMatrix = RendererCommon.getLayoutMatrix(
false, frameAspectRatio, (float) outputFileWidth / outputFileHeight);
final float[] texMatrix = RendererCommon.multiplyMatrices(rotatedSamplingMatrix, layoutMatrix);
try {
ByteBuffer buffer = nativeCreateNativeByteBuffer(outputFrameSize);
if (!frame.yuvFrame) {
yuvConverter.convert(outputFrameBuffer, outputFileWidth, outputFileHeight, outputFileWidth,
frame.textureId, texMatrix);
int stride = outputFileWidth;
byte[] data = outputFrameBuffer.array();
int offset = outputFrameBuffer.arrayOffset();
// Write Y
buffer.put(data, offset, outputFileWidth * outputFileHeight);
// Write U
for (int r = outputFileHeight; r < outputFileHeight * 3 / 2; ++r) {
buffer.put(data, offset + r * stride, stride / 2);
}
// Write V
for (int r = outputFileHeight; r < outputFileHeight * 3 / 2; ++r) {
buffer.put(data, offset + r * stride + stride / 2, stride / 2);
}
} else {
nativeI420Scale(frame.yuvPlanes[0], frame.yuvStrides[0], frame.yuvPlanes[1],
frame.yuvStrides[1], frame.yuvPlanes[2], frame.yuvStrides[2], frame.width, frame.height,
outputFrameBuffer, outputFileWidth, outputFileHeight);
buffer.put(outputFrameBuffer.array(), outputFrameBuffer.arrayOffset(), outputFrameSize);
}
buffer.rewind();
rawFrames.add(buffer);
} finally {
VideoRenderer.renderFrameDone(frame);
}
}
/**
* Release all resources. All already posted frames will be rendered first.
*/
public void release() {
final CountDownLatch cleanupBarrier = new CountDownLatch(1);
renderThreadHandler.post(new Runnable() {
@Override
public void run() {
yuvConverter.release();
eglBase.release();
renderThread.quit();
cleanupBarrier.countDown();
}
});
ThreadUtils.awaitUninterruptibly(cleanupBarrier);
try {
for (ByteBuffer buffer : rawFrames) {
videoOutFile.write("FRAME\n".getBytes());
byte[] data = new byte[outputFrameSize];
buffer.get(data);
videoOutFile.write(data);
nativeFreeNativeByteBuffer(buffer);
}
videoOutFile.close();
Logging.d(TAG, "Video written to disk as " + outputFileName + ". Number frames are "
+ rawFrames.size() + " and the dimension of the frames are " + outputFileWidth + "x"
+ outputFileHeight + ".");
} catch (IOException e) {
Logging.e(TAG, "Error writing video to disk", e);
}
}
public static native void nativeI420Scale(ByteBuffer srcY, int strideY, ByteBuffer srcU,
int strideU, ByteBuffer srcV, int strideV, int width, int height, ByteBuffer dst,
int dstWidth, int dstHeight);
public static native ByteBuffer nativeCreateNativeByteBuffer(int size);
public static native void nativeFreeNativeByteBuffer(ByteBuffer buffer);
}

View File

@ -0,0 +1,181 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.graphics.Matrix;
import java.nio.ByteBuffer;
/**
* Java version of webrtc::VideoFrame and webrtc::VideoFrameBuffer. A difference from the C++
* version is that no explicit tag is used, and clients are expected to use 'instanceof' to find the
* right subclass of the buffer. This allows clients to create custom VideoFrame.Buffer in
* arbitrary format in their custom VideoSources, and then cast it back to the correct subclass in
* their custom VideoSinks. All implementations must also implement the toI420() function,
* converting from the underlying representation if necessary. I420 is the most widely accepted
* format and serves as a fallback for video sinks that can only handle I420, e.g. the internal
* WebRTC software encoders.
*/
public class VideoFrame {
public interface Buffer {
/**
* Resolution of the buffer in pixels.
*/
int getWidth();
int getHeight();
/**
* Returns a memory-backed frame in I420 format. If the pixel data is in another format, a
* conversion will take place. All implementations must provide a fallback to I420 for
* compatibility with e.g. the internal WebRTC software encoders.
*/
I420Buffer toI420();
/**
* Reference counting is needed since a video buffer can be shared between multiple VideoSinks,
* and the buffer needs to be returned to the VideoSource as soon as all references are gone.
*/
void retain();
void release();
/**
* Crops a region defined by |cropx|, |cropY|, |cropWidth| and |cropHeight|. Scales it to size
* |scaleWidth| x |scaleHeight|.
*/
Buffer cropAndScale(
int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight);
}
/**
* Interface for I420 buffers.
*/
public interface I420Buffer extends Buffer {
ByteBuffer getDataY();
ByteBuffer getDataU();
ByteBuffer getDataV();
int getStrideY();
int getStrideU();
int getStrideV();
}
/**
* Interface for buffers that are stored as a single texture, either in OES or RGB format.
*/
public interface TextureBuffer extends Buffer {
enum Type { OES, RGB }
Type getType();
int getTextureId();
/**
* Retrieve the transform matrix associated with the frame. This transform matrix maps 2D
* homogeneous coordinates of the form (s, t, 1) with s and t in the inclusive range [0, 1] to
* the coordinate that should be used to sample that location from the buffer.
*/
public Matrix getTransformMatrix();
}
private final Buffer buffer;
private final int rotation;
private final long timestampNs;
public VideoFrame(Buffer buffer, int rotation, long timestampNs) {
if (buffer == null) {
throw new IllegalArgumentException("buffer not allowed to be null");
}
if (rotation % 90 != 0) {
throw new IllegalArgumentException("rotation must be a multiple of 90");
}
this.buffer = buffer;
this.rotation = rotation;
this.timestampNs = timestampNs;
}
public Buffer getBuffer() {
return buffer;
}
/**
* Rotation of the frame in degrees.
*/
public int getRotation() {
return rotation;
}
/**
* Timestamp of the frame in nano seconds.
*/
public long getTimestampNs() {
return timestampNs;
}
public int getRotatedWidth() {
if (rotation % 180 == 0) {
return buffer.getWidth();
}
return buffer.getHeight();
}
public int getRotatedHeight() {
if (rotation % 180 == 0) {
return buffer.getHeight();
}
return buffer.getWidth();
}
/**
* Reference counting of the underlying buffer.
*/
public void retain() {
buffer.retain();
}
public void release() {
buffer.release();
}
public static VideoFrame.Buffer cropAndScaleI420(final I420Buffer buffer, int cropX, int cropY,
int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
if (cropWidth == scaleWidth && cropHeight == scaleHeight) {
// No scaling.
ByteBuffer dataY = buffer.getDataY();
ByteBuffer dataU = buffer.getDataU();
ByteBuffer dataV = buffer.getDataV();
dataY.position(cropX + cropY * buffer.getStrideY());
dataU.position(cropX / 2 + cropY / 2 * buffer.getStrideU());
dataV.position(cropX / 2 + cropY / 2 * buffer.getStrideV());
buffer.retain();
return new I420BufferImpl(buffer.getWidth(), buffer.getHeight(), dataY.slice(),
buffer.getStrideY(), dataU.slice(), buffer.getStrideU(), dataV.slice(),
buffer.getStrideV(), new Runnable() {
@Override
public void run() {
buffer.release();
}
});
}
I420BufferImpl newBuffer = I420BufferImpl.allocate(scaleWidth, scaleHeight);
nativeCropAndScaleI420(buffer.getDataY(), buffer.getStrideY(), buffer.getDataU(),
buffer.getStrideU(), buffer.getDataV(), buffer.getStrideV(), cropX, cropY, cropWidth,
cropHeight, newBuffer.getDataY(), newBuffer.getStrideY(), newBuffer.getDataU(),
newBuffer.getStrideU(), newBuffer.getDataV(), newBuffer.getStrideV(), scaleWidth,
scaleHeight);
return newBuffer;
}
private static native void nativeCropAndScaleI420(ByteBuffer srcY, int srcStrideY,
ByteBuffer srcU, int srcStrideU, ByteBuffer srcV, int srcStrideV, int cropX, int cropY,
int cropWidth, int cropHeight, ByteBuffer dstY, int dstStrideY, ByteBuffer dstU,
int dstStrideU, ByteBuffer dstV, int dstStrideV, int scaleWidth, int scaleHeight);
}

View File

@ -0,0 +1,227 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.graphics.Matrix;
import android.graphics.Point;
import android.opengl.GLES20;
import java.nio.ByteBuffer;
/**
* Helper class to draw VideoFrames. Calls either drawer.drawOes, drawer.drawRgb, or
* drawer.drawYuv depending on the type of the buffer. The frame will be rendered with rotation
* taken into account. You can supply an additional render matrix for custom transformations.
*/
public class VideoFrameDrawer {
/**
* Draws a VideoFrame.TextureBuffer. Calls either drawer.drawOes or drawer.drawRgb
* depending on the type of the buffer. You can supply an additional render matrix. This is
* used multiplied together with the transformation matrix of the frame. (M = renderMatrix *
* transformationMatrix)
*/
static void drawTexture(RendererCommon.GlDrawer drawer, VideoFrame.TextureBuffer buffer,
Matrix renderMatrix, int frameWidth, int frameHeight, int viewportX, int viewportY,
int viewportWidth, int viewportHeight) {
Matrix finalMatrix = new Matrix(buffer.getTransformMatrix());
finalMatrix.preConcat(renderMatrix);
float[] finalGlMatrix = RendererCommon.convertMatrixFromAndroidGraphicsMatrix(finalMatrix);
switch (buffer.getType()) {
case OES:
drawer.drawOes(buffer.getTextureId(), finalGlMatrix, frameWidth, frameHeight, viewportX,
viewportY, viewportWidth, viewportHeight);
break;
case RGB:
drawer.drawRgb(buffer.getTextureId(), finalGlMatrix, frameWidth, frameHeight, viewportX,
viewportY, viewportWidth, viewportHeight);
break;
default:
throw new RuntimeException("Unknown texture type.");
}
}
/**
* Helper class for uploading YUV bytebuffer frames to textures that handles stride > width. This
* class keeps an internal ByteBuffer to avoid unnecessary allocations for intermediate copies.
*/
private static class YuvUploader {
// Intermediate copy buffer for uploading yuv frames that are not packed, i.e. stride > width.
// TODO(magjed): Investigate when GL_UNPACK_ROW_LENGTH is available, or make a custom shader
// that handles stride and compare performance with intermediate copy.
private ByteBuffer copyBuffer;
private int[] yuvTextures;
/**
* Upload |planes| into OpenGL textures, taking stride into consideration.
*
* @return Array of three texture indices corresponding to Y-, U-, and V-plane respectively.
*/
public int[] uploadYuvData(int width, int height, int[] strides, ByteBuffer[] planes) {
final int[] planeWidths = new int[] {width, width / 2, width / 2};
final int[] planeHeights = new int[] {height, height / 2, height / 2};
// Make a first pass to see if we need a temporary copy buffer.
int copyCapacityNeeded = 0;
for (int i = 0; i < 3; ++i) {
if (strides[i] > planeWidths[i]) {
copyCapacityNeeded = Math.max(copyCapacityNeeded, planeWidths[i] * planeHeights[i]);
}
}
// Allocate copy buffer if necessary.
if (copyCapacityNeeded > 0
&& (copyBuffer == null || copyBuffer.capacity() < copyCapacityNeeded)) {
copyBuffer = ByteBuffer.allocateDirect(copyCapacityNeeded);
}
// Make sure YUV textures are allocated.
if (yuvTextures == null) {
yuvTextures = new int[3];
for (int i = 0; i < 3; i++) {
yuvTextures[i] = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
}
}
// Upload each plane.
for (int i = 0; i < 3; ++i) {
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
// GLES only accepts packed data, i.e. stride == planeWidth.
final ByteBuffer packedByteBuffer;
if (strides[i] == planeWidths[i]) {
// Input is packed already.
packedByteBuffer = planes[i];
} else {
VideoRenderer.nativeCopyPlane(
planes[i], planeWidths[i], planeHeights[i], strides[i], copyBuffer, planeWidths[i]);
packedByteBuffer = copyBuffer;
}
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, planeWidths[i],
planeHeights[i], 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, packedByteBuffer);
}
return yuvTextures;
}
public int[] uploadFromBuffer(VideoFrame.I420Buffer buffer) {
int[] strides = {buffer.getStrideY(), buffer.getStrideU(), buffer.getStrideV()};
ByteBuffer[] planes = {buffer.getDataY(), buffer.getDataU(), buffer.getDataV()};
return uploadYuvData(buffer.getWidth(), buffer.getHeight(), strides, planes);
}
public int[] getYuvTextures() {
return yuvTextures;
}
/**
* Releases cached resources. Uploader can still be used and the resources will be reallocated
* on first use.
*/
public void release() {
copyBuffer = null;
if (yuvTextures != null) {
GLES20.glDeleteTextures(3, yuvTextures, 0);
yuvTextures = null;
}
}
}
private static int distance(float x0, float y0, float x1, float y1) {
return (int) Math.round(Math.hypot(x1 - x0, y1 - y0));
}
// These points are used to calculate the size of the part of the frame we are rendering.
final static float[] srcPoints =
new float[] {0f /* x0 */, 0f /* y0 */, 1f /* x1 */, 0f /* y1 */, 0f /* x2 */, 1f /* y2 */};
private final float[] dstPoints = new float[6];
private final Point renderSize = new Point();
private int renderWidth;
private int renderHeight;
// Calculate the frame size after |renderMatrix| is applied. Stores the output in member variables
// |renderWidth| and |renderHeight| to avoid allocations since this function is called for every
// frame.
private void calculateTransformedRenderSize(
int frameWidth, int frameHeight, Matrix renderMatrix) {
if (renderMatrix == null) {
renderWidth = frameWidth;
renderHeight = frameHeight;
return;
}
// Transform the texture coordinates (in the range [0, 1]) according to |renderMatrix|.
renderMatrix.mapPoints(dstPoints, srcPoints);
// Multiply with the width and height to get the positions in terms of pixels.
for (int i = 0; i < 3; ++i) {
dstPoints[i * 2 + 0] *= frameWidth;
dstPoints[i * 2 + 1] *= frameHeight;
}
// Get the length of the sides of the transformed rectangle in terms of pixels.
renderWidth = distance(dstPoints[0], dstPoints[1], dstPoints[2], dstPoints[3]);
renderHeight = distance(dstPoints[0], dstPoints[1], dstPoints[4], dstPoints[5]);
}
private final YuvUploader yuvUploader = new YuvUploader();
// This variable will only be used for checking reference equality and is used for caching I420
// textures.
private VideoFrame lastI420Frame;
private final Matrix renderMatrix = new Matrix();
public void drawFrame(VideoFrame frame, RendererCommon.GlDrawer drawer) {
drawFrame(frame, drawer, null /* additionalRenderMatrix */);
}
public void drawFrame(
VideoFrame frame, RendererCommon.GlDrawer drawer, Matrix additionalRenderMatrix) {
drawFrame(frame, drawer, additionalRenderMatrix, 0 /* viewportX */, 0 /* viewportY */,
frame.getRotatedWidth(), frame.getRotatedHeight());
}
public void drawFrame(VideoFrame frame, RendererCommon.GlDrawer drawer,
Matrix additionalRenderMatrix, int viewportX, int viewportY, int viewportWidth,
int viewportHeight) {
final int width = frame.getRotatedWidth();
final int height = frame.getRotatedHeight();
calculateTransformedRenderSize(width, height, additionalRenderMatrix);
final boolean isTextureFrame = frame.getBuffer() instanceof VideoFrame.TextureBuffer;
renderMatrix.reset();
renderMatrix.preTranslate(0.5f, 0.5f);
if (!isTextureFrame) {
renderMatrix.preScale(1f, -1f); // I420-frames are upside down
}
renderMatrix.preRotate(frame.getRotation());
renderMatrix.preTranslate(-0.5f, -0.5f);
if (additionalRenderMatrix != null) {
renderMatrix.preConcat(additionalRenderMatrix);
}
if (isTextureFrame) {
lastI420Frame = null;
drawTexture(drawer, (VideoFrame.TextureBuffer) frame.getBuffer(), renderMatrix, renderWidth,
renderHeight, viewportX, viewportY, viewportWidth, viewportHeight);
} else {
// Only upload the I420 data to textures once per frame, if we are called multiple times
// with the same frame.
if (frame != lastI420Frame) {
lastI420Frame = frame;
final VideoFrame.I420Buffer i420Buffer = frame.getBuffer().toI420();
yuvUploader.uploadFromBuffer(i420Buffer);
i420Buffer.release();
}
drawer.drawYuv(yuvUploader.getYuvTextures(),
RendererCommon.convertMatrixFromAndroidGraphicsMatrix(renderMatrix), renderWidth,
renderHeight, viewportX, viewportY, viewportWidth, viewportHeight);
}
}
public void release() {
yuvUploader.release();
lastI420Frame = null;
}
}

View File

@ -0,0 +1,223 @@
/*
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import java.nio.ByteBuffer;
/**
* Java version of VideoSinkInterface. In addition to allowing clients to
* define their own rendering behavior (by passing in a Callbacks object), this
* class also provides a createGui() method for creating a GUI-rendering window
* on various platforms.
*/
public class VideoRenderer {
/**
* Java version of webrtc::VideoFrame. Frames are only constructed from native code and test
* code.
*/
public static class I420Frame {
public final int width;
public final int height;
public final int[] yuvStrides;
public ByteBuffer[] yuvPlanes;
public final boolean yuvFrame;
// Matrix that transforms standard coordinates to their proper sampling locations in
// the texture. This transform compensates for any properties of the video source that
// cause it to appear different from a normalized texture. This matrix does not take
// |rotationDegree| into account.
public final float[] samplingMatrix;
public int textureId;
// Frame pointer in C++.
private long nativeFramePointer;
// rotationDegree is the degree that the frame must be rotated clockwisely
// to be rendered correctly.
public int rotationDegree;
// If this I420Frame was constructed from VideoFrame.Buffer, this points to
// the backing buffer.
private final VideoFrame.Buffer backingBuffer;
/**
* Construct a frame of the given dimensions with the specified planar data.
*/
public I420Frame(int width, int height, int rotationDegree, int[] yuvStrides,
ByteBuffer[] yuvPlanes, long nativeFramePointer) {
this.width = width;
this.height = height;
this.yuvStrides = yuvStrides;
this.yuvPlanes = yuvPlanes;
this.yuvFrame = true;
this.rotationDegree = rotationDegree;
this.nativeFramePointer = nativeFramePointer;
backingBuffer = null;
if (rotationDegree % 90 != 0) {
throw new IllegalArgumentException("Rotation degree not multiple of 90: " + rotationDegree);
}
// The convention in WebRTC is that the first element in a ByteBuffer corresponds to the
// top-left corner of the image, but in glTexImage2D() the first element corresponds to the
// bottom-left corner. This discrepancy is corrected by setting a vertical flip as sampling
// matrix.
samplingMatrix = RendererCommon.verticalFlipMatrix();
}
/**
* Construct a texture frame of the given dimensions with data in SurfaceTexture
*/
public I420Frame(int width, int height, int rotationDegree, int textureId,
float[] samplingMatrix, long nativeFramePointer) {
this.width = width;
this.height = height;
this.yuvStrides = null;
this.yuvPlanes = null;
this.samplingMatrix = samplingMatrix;
this.textureId = textureId;
this.yuvFrame = false;
this.rotationDegree = rotationDegree;
this.nativeFramePointer = nativeFramePointer;
backingBuffer = null;
if (rotationDegree % 90 != 0) {
throw new IllegalArgumentException("Rotation degree not multiple of 90: " + rotationDegree);
}
}
/**
* Construct a frame from VideoFrame.Buffer.
*/
public I420Frame(int rotationDegree, VideoFrame.Buffer buffer, long nativeFramePointer) {
this.width = buffer.getWidth();
this.height = buffer.getHeight();
this.rotationDegree = rotationDegree;
if (rotationDegree % 90 != 0) {
throw new IllegalArgumentException("Rotation degree not multiple of 90: " + rotationDegree);
}
if (buffer instanceof VideoFrame.TextureBuffer
&& ((VideoFrame.TextureBuffer) buffer).getType() == VideoFrame.TextureBuffer.Type.OES) {
VideoFrame.TextureBuffer textureBuffer = (VideoFrame.TextureBuffer) buffer;
this.yuvFrame = false;
this.textureId = textureBuffer.getTextureId();
this.samplingMatrix = RendererCommon.convertMatrixFromAndroidGraphicsMatrix(
textureBuffer.getTransformMatrix());
this.yuvStrides = null;
this.yuvPlanes = null;
} else if (buffer instanceof VideoFrame.I420Buffer) {
VideoFrame.I420Buffer i420Buffer = (VideoFrame.I420Buffer) buffer;
this.yuvFrame = true;
this.yuvStrides =
new int[] {i420Buffer.getStrideY(), i420Buffer.getStrideU(), i420Buffer.getStrideV()};
this.yuvPlanes =
new ByteBuffer[] {i420Buffer.getDataY(), i420Buffer.getDataU(), i420Buffer.getDataV()};
// The convention in WebRTC is that the first element in a ByteBuffer corresponds to the
// top-left corner of the image, but in glTexImage2D() the first element corresponds to the
// bottom-left corner. This discrepancy is corrected by multiplying the sampling matrix with
// a vertical flip matrix.
this.samplingMatrix = RendererCommon.verticalFlipMatrix();
this.textureId = 0;
} else {
this.yuvFrame = false;
this.textureId = 0;
this.samplingMatrix = null;
this.yuvStrides = null;
this.yuvPlanes = null;
}
this.nativeFramePointer = nativeFramePointer;
backingBuffer = buffer;
}
public int rotatedWidth() {
return (rotationDegree % 180 == 0) ? width : height;
}
public int rotatedHeight() {
return (rotationDegree % 180 == 0) ? height : width;
}
@Override
public String toString() {
final String type = yuvFrame
? "Y: " + yuvStrides[0] + ", U: " + yuvStrides[1] + ", V: " + yuvStrides[2]
: "Texture: " + textureId;
return width + "x" + height + ", " + type;
}
/**
* Convert the frame to VideoFrame. It is no longer safe to use the I420Frame after calling
* this.
*/
VideoFrame toVideoFrame() {
final VideoFrame.Buffer buffer;
if (backingBuffer != null) {
// We were construted from a VideoFrame.Buffer, just return it.
// Make sure webrtc::VideoFrame object is released.
backingBuffer.retain();
VideoRenderer.renderFrameDone(this);
buffer = backingBuffer;
} else if (yuvFrame) {
buffer = new I420BufferImpl(width, height, yuvPlanes[0], yuvStrides[0], yuvPlanes[1],
yuvStrides[1], yuvPlanes[2], yuvStrides[2],
() -> { VideoRenderer.renderFrameDone(this); });
} else {
// Note: surfaceTextureHelper being null means calling toI420 will crash.
buffer = new TextureBufferImpl(width, height, VideoFrame.TextureBuffer.Type.OES, textureId,
RendererCommon.convertMatrixToAndroidGraphicsMatrix(samplingMatrix),
null /* surfaceTextureHelper */, () -> { VideoRenderer.renderFrameDone(this); });
}
return new VideoFrame(buffer, rotationDegree, 0 /* timestampNs */);
}
}
// Helper native function to do a video frame plane copying.
public static native void nativeCopyPlane(
ByteBuffer src, int width, int height, int srcStride, ByteBuffer dst, int dstStride);
/** The real meat of VideoSinkInterface. */
public static interface Callbacks {
// |frame| might have pending rotation and implementation of Callbacks
// should handle that by applying rotation during rendering. The callee
// is responsible for signaling when it is done with |frame| by calling
// renderFrameDone(frame).
public void renderFrame(I420Frame frame);
}
/**
* This must be called after every renderFrame() to release the frame.
*/
public static void renderFrameDone(I420Frame frame) {
frame.yuvPlanes = null;
frame.textureId = 0;
if (frame.nativeFramePointer != 0) {
releaseNativeFrame(frame.nativeFramePointer);
frame.nativeFramePointer = 0;
}
}
long nativeVideoRenderer;
public VideoRenderer(Callbacks callbacks) {
nativeVideoRenderer = nativeWrapVideoRenderer(callbacks);
}
public void dispose() {
if (nativeVideoRenderer == 0) {
// Already disposed.
return;
}
freeWrappedVideoRenderer(nativeVideoRenderer);
nativeVideoRenderer = 0;
}
private static native long nativeWrapVideoRenderer(Callbacks callbacks);
private static native void freeWrappedVideoRenderer(long nativeVideoRenderer);
private static native void releaseNativeFrame(long nativeFramePointer);
}

View File

@ -0,0 +1,23 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/**
* Java version of rtc::VideoSinkInterface.
*/
public interface VideoSink {
/**
* Implementations should call frame.retain() if they need to hold a reference to the frame after
* this function returns. Each call to retain() should be followed by a call to frame.release()
* when the reference is no longer needed.
*/
void onFrame(VideoFrame frame);
}

View File

@ -0,0 +1,33 @@
/*
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/**
* Java wrapper of native AndroidVideoTrackSource.
*/
public class VideoSource extends MediaSource {
public VideoSource(long nativeSource) {
super(nativeSource);
}
/**
* Calling this function will cause frames to be scaled down to the requested resolution. Also,
* frames will be cropped to match the requested aspect ratio, and frames will be dropped to match
* the requested fps. The requested aspect ratio is orientation agnostic and will be adjusted to
* maintain the input orientation, so it doesn't matter if e.g. 1280x720 or 720x1280 is requested.
*/
public void adaptOutputFormat(int width, int height, int fps) {
nativeAdaptOutputFormat(nativeSource, width, height, fps);
}
private static native void nativeAdaptOutputFormat(
long nativeSource, int width, int height, int fps);
}

View File

@ -0,0 +1,82 @@
/*
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import java.util.IdentityHashMap;
import java.util.LinkedList;
/** Java version of VideoTrackInterface. */
public class VideoTrack extends MediaStreamTrack {
private final LinkedList<VideoRenderer> renderers = new LinkedList<VideoRenderer>();
private final IdentityHashMap<VideoSink, Long> sinks = new IdentityHashMap<VideoSink, Long>();
public VideoTrack(long nativeTrack) {
super(nativeTrack);
}
/**
* Adds a VideoSink to the track.
*
* A track can have any number of VideoSinks. VideoSinks will replace
* renderers. However, converting old style texture frames will involve costly
* conversion to I420 so it is not recommended to upgrade before all your
* sources produce VideoFrames.
*/
public void addSink(VideoSink sink) {
final long nativeSink = nativeWrapSink(sink);
sinks.put(sink, nativeSink);
nativeAddSink(nativeTrack, nativeSink);
}
/**
* Removes a VideoSink from the track.
*
* If the VideoSink was not attached to the track, this is a no-op.
*/
public void removeSink(VideoSink sink) {
final long nativeSink = sinks.remove(sink);
if (nativeSink != 0) {
nativeRemoveSink(nativeTrack, nativeSink);
nativeFreeSink(nativeSink);
}
}
public void addRenderer(VideoRenderer renderer) {
renderers.add(renderer);
nativeAddSink(nativeTrack, renderer.nativeVideoRenderer);
}
public void removeRenderer(VideoRenderer renderer) {
if (!renderers.remove(renderer)) {
return;
}
nativeRemoveSink(nativeTrack, renderer.nativeVideoRenderer);
renderer.dispose();
}
public void dispose() {
while (!renderers.isEmpty()) {
removeRenderer(renderers.getFirst());
}
for (long nativeSink : sinks.values()) {
nativeRemoveSink(nativeTrack, nativeSink);
nativeFreeSink(nativeSink);
}
sinks.clear();
super.dispose();
}
private static native void nativeAddSink(long nativeTrack, long nativeSink);
private static native void nativeRemoveSink(long nativeTrack, long nativeSink);
private static native long nativeWrapSink(VideoSink sink);
private static native void nativeFreeSink(long nativeSink);
}