Format all Java in WebRTC.

BUG=webrtc:6419
TBR=henrika@webrtc.org

Review-Url: https://codereview.webrtc.org/2377003002
Cr-Commit-Position: refs/heads/master@{#14432}
This commit is contained in:
sakal
2016-09-29 04:12:44 -07:00
committed by Commit bot
parent a48ddb7636
commit b6760f9e44
88 changed files with 1555 additions and 2071 deletions

View File

@ -33,8 +33,7 @@ public class CallSessionFileRotatingLogSink {
}
}
private static native long nativeAddSink(
String dirPath, int maxFileSize, int severity);
private static native long nativeAddSink(String dirPath, int maxFileSize, int severity);
private static native void nativeDeleteSink(long nativeSink);
private static native byte[] nativeGetLogData(String dirPath);
}

View File

@ -19,20 +19,20 @@ import java.util.List;
public class Camera1Capturer extends CameraCapturer {
private final boolean captureToTexture;
public Camera1Capturer(String cameraName, CameraEventsHandler eventsHandler,
boolean captureToTexture) {
public Camera1Capturer(
String cameraName, CameraEventsHandler eventsHandler, boolean captureToTexture) {
super(cameraName, eventsHandler, new Camera1Enumerator(captureToTexture));
this.captureToTexture = captureToTexture;
}
@Override
protected void createCameraSession(
CameraSession.CreateSessionCallback createSessionCallback, CameraSession.Events events,
Context applicationContext, SurfaceTextureHelper surfaceTextureHelper,
String cameraName, int width, int height, int framerate) {
Camera1Session.create(
createSessionCallback, events, captureToTexture, applicationContext, surfaceTextureHelper,
Camera1Enumerator.getCameraIndex(cameraName), width, height, framerate);
protected void createCameraSession(CameraSession.CreateSessionCallback createSessionCallback,
CameraSession.Events events, Context applicationContext,
SurfaceTextureHelper surfaceTextureHelper, String cameraName, int width, int height,
int framerate) {
Camera1Session.create(createSessionCallback, events, captureToTexture, applicationContext,
surfaceTextureHelper, Camera1Enumerator.getCameraIndex(cameraName), width, height,
framerate);
}
}

View File

@ -70,8 +70,8 @@ public class Camera1Enumerator implements CameraEnumerator {
}
@Override
public CameraVideoCapturer createCapturer(String deviceName,
CameraVideoCapturer.CameraEventsHandler eventsHandler) {
public CameraVideoCapturer createCapturer(
String deviceName, CameraVideoCapturer.CameraEventsHandler eventsHandler) {
return new VideoCapturerAndroid(deviceName, eventsHandler, captureToTexture);
}
@ -135,7 +135,7 @@ public class Camera1Enumerator implements CameraEnumerator {
final long endTimeMs = SystemClock.elapsedRealtime();
Logging.d(TAG, "Get supported formats for camera index " + cameraId + " done."
+ " Time spent: " + (endTimeMs - startTimeMs) + " ms.");
+ " Time spent: " + (endTimeMs - startTimeMs) + " ms.");
return formatList;
}
@ -181,7 +181,6 @@ public class Camera1Enumerator implements CameraEnumerator {
String facing =
(info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT) ? "front" : "back";
return "Camera " + index + ", Facing " + facing
+ ", Orientation " + info.orientation;
return "Camera " + index + ", Facing " + facing + ", Orientation " + info.orientation;
}
}

View File

@ -34,7 +34,7 @@ public class Camera1Session implements CameraSession {
private static final Histogram camera1StopTimeMsHistogram =
Histogram.createCounts("WebRTC.Android.Camera1.StopTimeMs", 1, 10000, 50);
private static enum SessionState { RUNNING, STOPPED };
private static enum SessionState { RUNNING, STOPPED }
private final Handler cameraThreadHandler;
private final Events events;
@ -54,11 +54,10 @@ public class Camera1Session implements CameraSession {
private SessionState state;
private boolean firstFrameReported = false;
public static void create(
final CreateSessionCallback callback, final Events events,
public static void create(final CreateSessionCallback callback, final Events events,
final boolean captureToTexture, final Context applicationContext,
final SurfaceTextureHelper surfaceTextureHelper,
final int cameraId, final int width, final int height, final int framerate) {
final SurfaceTextureHelper surfaceTextureHelper, final int cameraId, final int width,
final int height, final int framerate) {
final long constructionTimeNs = System.nanoTime();
Logging.d(TAG, "Open camera " + cameraId);
events.onCameraOpening();
@ -83,8 +82,8 @@ public class Camera1Session implements CameraSession {
android.hardware.Camera.getCameraInfo(cameraId, info);
final android.hardware.Camera.Parameters parameters = camera.getParameters();
final CaptureFormat captureFormat = findClosestCaptureFormat(
parameters, width, height, framerate);
final CaptureFormat captureFormat =
findClosestCaptureFormat(parameters, width, height, framerate);
final Size pictureSize = findClosestPictureSize(parameters, width, height);
updateCameraParameters(camera, parameters, captureFormat, pictureSize, captureToTexture);
@ -101,10 +100,9 @@ public class Camera1Session implements CameraSession {
// Calculate orientation manually and send it as CVO insted.
camera.setDisplayOrientation(0 /* degrees */);
callback.onDone(new Camera1Session(
events, captureToTexture, applicationContext, surfaceTextureHelper,
cameraId, width, height, framerate,
camera, info, captureFormat, constructionTimeNs));
callback.onDone(
new Camera1Session(events, captureToTexture, applicationContext, surfaceTextureHelper,
cameraId, width, height, framerate, camera, info, captureFormat, constructionTimeNs));
}
private static void updateCameraParameters(android.hardware.Camera camera,
@ -136,27 +134,22 @@ public class Camera1Session implements CameraSession {
Logging.d(TAG, "Available fps ranges: " + supportedFramerates);
final CaptureFormat.FramerateRange fpsRange =
CameraEnumerationAndroid.getClosestSupportedFramerateRange(
supportedFramerates, framerate);
CameraEnumerationAndroid.getClosestSupportedFramerateRange(supportedFramerates, framerate);
final Size previewSize = CameraEnumerationAndroid.getClosestSupportedSize(
Camera1Enumerator.convertSizes(parameters.getSupportedPreviewSizes()),
width, height);
Camera1Enumerator.convertSizes(parameters.getSupportedPreviewSizes()), width, height);
return new CaptureFormat(previewSize.width, previewSize.height, fpsRange);
}
private static Size findClosestPictureSize(android.hardware.Camera.Parameters parameters,
int width, int height) {
private static Size findClosestPictureSize(
android.hardware.Camera.Parameters parameters, int width, int height) {
return CameraEnumerationAndroid.getClosestSupportedSize(
Camera1Enumerator.convertSizes(parameters.getSupportedPictureSizes()),
width, height);
Camera1Enumerator.convertSizes(parameters.getSupportedPictureSizes()), width, height);
}
private Camera1Session(
Events events, boolean captureToTexture,
Context applicationContext, SurfaceTextureHelper surfaceTextureHelper,
int cameraId, int width, int height, int framerate,
private Camera1Session(Events events, boolean captureToTexture, Context applicationContext,
SurfaceTextureHelper surfaceTextureHelper, int cameraId, int width, int height, int framerate,
android.hardware.Camera camera, android.hardware.Camera.CameraInfo info,
CaptureFormat captureFormat, long constructionTimeNs) {
Logging.d(TAG, "Create new camera1 session on camera " + cameraId);
@ -186,8 +179,7 @@ public class Camera1Session implements CameraSession {
final long stopStartTime = System.nanoTime();
state = SessionState.STOPPED;
stopInternal();
final int stopTimeMs =
(int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStartTime);
final int stopTimeMs = (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStartTime);
camera1StopTimeMsHistogram.addSample(stopTimeMs);
}
}
@ -312,9 +304,8 @@ public class Camera1Session implements CameraSession {
private int getDeviceOrientation() {
int orientation = 0;
WindowManager wm = (WindowManager) applicationContext.getSystemService(
Context.WINDOW_SERVICE);
switch(wm.getDefaultDisplay().getRotation()) {
WindowManager wm = (WindowManager) applicationContext.getSystemService(Context.WINDOW_SERVICE);
switch (wm.getDefaultDisplay().getRotation()) {
case Surface.ROTATION_90:
orientation = 90;
break;

View File

@ -27,14 +27,11 @@ public class Camera2Capturer extends CameraCapturer {
}
@Override
protected void createCameraSession(
CameraSession.CreateSessionCallback createSessionCallback, CameraSession.Events events,
Context applicationContext, SurfaceTextureHelper surfaceTextureHelper,
String cameraName, int width, int height, int framerate) {
Camera2Session.create(
createSessionCallback, events,
applicationContext, cameraManager,
surfaceTextureHelper,
cameraName, width, height, framerate);
protected void createCameraSession(CameraSession.CreateSessionCallback createSessionCallback,
CameraSession.Events events, Context applicationContext,
SurfaceTextureHelper surfaceTextureHelper, String cameraName, int width, int height,
int framerate) {
Camera2Session.create(createSessionCallback, events, applicationContext, cameraManager,
surfaceTextureHelper, cameraName, width, height, framerate);
}
}

View File

@ -63,22 +63,20 @@ public class Camera2Enumerator implements CameraEnumerator {
@Override
public boolean isFrontFacing(String deviceName) {
CameraCharacteristics characteristics
= getCameraCharacteristics(deviceName);
CameraCharacteristics characteristics = getCameraCharacteristics(deviceName);
return characteristics != null
&& characteristics.get(CameraCharacteristics.LENS_FACING)
== CameraMetadata.LENS_FACING_FRONT;
== CameraMetadata.LENS_FACING_FRONT;
}
@Override
public boolean isBackFacing(String deviceName) {
CameraCharacteristics characteristics
= getCameraCharacteristics(deviceName);
CameraCharacteristics characteristics = getCameraCharacteristics(deviceName);
return characteristics != null
&& characteristics.get(CameraCharacteristics.LENS_FACING)
== CameraMetadata.LENS_FACING_BACK;
== CameraMetadata.LENS_FACING_BACK;
}
@Override
@ -87,8 +85,8 @@ public class Camera2Enumerator implements CameraEnumerator {
}
@Override
public CameraVideoCapturer createCapturer(String deviceName,
CameraVideoCapturer.CameraEventsHandler eventsHandler) {
public CameraVideoCapturer createCapturer(
String deviceName, CameraVideoCapturer.CameraEventsHandler eventsHandler) {
return new Camera2Capturer(context, deviceName, eventsHandler);
}
@ -132,9 +130,9 @@ public class Camera2Enumerator implements CameraEnumerator {
return false;
}
}
// On Android OS pre 4.4.2, a class will not load because of VerifyError if it contains a
// catch statement with an Exception from a newer API, even if the code is never executed.
// https://code.google.com/p/android/issues/detail?id=209129
// On Android OS pre 4.4.2, a class will not load because of VerifyError if it contains a
// catch statement with an Exception from a newer API, even if the code is never executed.
// https://code.google.com/p/android/issues/detail?id=209129
} catch (/* CameraAccessException */ AndroidException e) {
Logging.e(TAG, "Camera access exception: " + e);
return false;
@ -149,10 +147,9 @@ public class Camera2Enumerator implements CameraEnumerator {
return fpsRanges[0].getUpper() < 1000 ? 1000 : 1;
}
static List<Size> getSupportedSizes(
CameraCharacteristics cameraCharacteristics) {
static List<Size> getSupportedSizes(CameraCharacteristics cameraCharacteristics) {
final StreamConfigurationMap streamMap =
cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
final int supportLevel =
cameraCharacteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
@ -161,8 +158,8 @@ public class Camera2Enumerator implements CameraEnumerator {
// Video may be stretched pre LMR1 on legacy implementations.
// Filter out formats that have different aspect ratio than the sensor array.
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP_MR1 &&
supportLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP_MR1
&& supportLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
final Rect activeArraySize =
cameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
final ArrayList<Size> filteredSizes = new ArrayList<Size>();
@ -184,8 +181,7 @@ public class Camera2Enumerator implements CameraEnumerator {
(CameraManager) context.getSystemService(Context.CAMERA_SERVICE), cameraId);
}
static List<CaptureFormat> getSupportedFormats(
CameraManager cameraManager, String cameraId) {
static List<CaptureFormat> getSupportedFormats(CameraManager cameraManager, String cameraId) {
synchronized (cachedSupportedFormats) {
if (cachedSupportedFormats.containsKey(cameraId)) {
return cachedSupportedFormats.get(cameraId);
@ -220,8 +216,8 @@ public class Camera2Enumerator implements CameraEnumerator {
for (Size size : sizes) {
long minFrameDurationNs = 0;
try {
minFrameDurationNs = streamMap.getOutputMinFrameDuration(SurfaceTexture.class,
new android.util.Size(size.width, size.height));
minFrameDurationNs = streamMap.getOutputMinFrameDuration(
SurfaceTexture.class, new android.util.Size(size.width, size.height));
} catch (Exception e) {
// getOutputMinFrameDuration() is not supported on all devices. Ignore silently.
}
@ -235,7 +231,7 @@ public class Camera2Enumerator implements CameraEnumerator {
cachedSupportedFormats.put(cameraId, formatList);
final long endTimeMs = SystemClock.elapsedRealtime();
Logging.d(TAG, "Get supported formats for camera index " + cameraId + " done."
+ " Time spent: " + (endTimeMs - startTimeMs) + " ms.");
+ " Time spent: " + (endTimeMs - startTimeMs) + " ms.");
return formatList;
}
}
@ -255,8 +251,7 @@ public class Camera2Enumerator implements CameraEnumerator {
final List<CaptureFormat.FramerateRange> ranges = new ArrayList<CaptureFormat.FramerateRange>();
for (Range<Integer> range : arrayRanges) {
ranges.add(new CaptureFormat.FramerateRange(
range.getLower() * unitFactor,
range.getUpper() * unitFactor));
range.getLower() * unitFactor, range.getUpper() * unitFactor));
}
return ranges;
}

View File

@ -42,7 +42,7 @@ public class Camera2Session implements CameraSession {
private static final Histogram camera2StopTimeMsHistogram =
Histogram.createCounts("WebRTC.Android.Camera2.StopTimeMs", 1, 10000, 50);
private static enum SessionState { RUNNING, STOPPED };
private static enum SessionState { RUNNING, STOPPED }
private final Handler cameraThreadHandler;
private final CreateSessionCallback callback;
@ -159,11 +159,11 @@ public class Camera2Session implements CameraSession {
final CaptureRequest.Builder captureRequestBuilder =
cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
// Set auto exposure fps range.
captureRequestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, new Range<Integer>(
captureFormat.framerate.min / fpsUnitFactor,
captureFormat.framerate.max / fpsUnitFactor));
captureRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE,
CaptureRequest.CONTROL_AE_MODE_ON);
captureRequestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE,
new Range<Integer>(captureFormat.framerate.min / fpsUnitFactor,
captureFormat.framerate.max / fpsUnitFactor));
captureRequestBuilder.set(
CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
captureRequestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false);
chooseStabilizationMode(captureRequestBuilder);
chooseFocusMode(captureRequestBuilder);
@ -205,8 +205,8 @@ public class Camera2Session implements CameraSession {
}
// Undo camera orientation - we report it as rotation instead.
transformMatrix = RendererCommon.rotateTextureMatrix(
transformMatrix, -cameraOrientation);
transformMatrix =
RendererCommon.rotateTextureMatrix(transformMatrix, -cameraOrientation);
events.onTextureFrameCaptured(Camera2Session.this, captureFormat.width,
captureFormat.height, oesTextureId, transformMatrix, rotation, timestampNs);
@ -250,12 +250,12 @@ public class Camera2Session implements CameraSession {
}
private void chooseFocusMode(CaptureRequest.Builder captureRequestBuilder) {
final int[] availableFocusModes = cameraCharacteristics.get(
CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES);
final int[] availableFocusModes =
cameraCharacteristics.get(CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES);
for (int mode : availableFocusModes) {
if (mode == CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO) {
captureRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO);
captureRequestBuilder.set(
CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO);
Logging.d(TAG, "Using continuous video auto-focus.");
return;
}
@ -272,23 +272,17 @@ public class Camera2Session implements CameraSession {
}
}
public static void create(
CreateSessionCallback callback, Events events,
public static void create(CreateSessionCallback callback, Events events,
Context applicationContext, CameraManager cameraManager,
SurfaceTextureHelper surfaceTextureHelper,
String cameraId, int width, int height, int framerate) {
new Camera2Session(
callback, events,
applicationContext, cameraManager,
surfaceTextureHelper,
SurfaceTextureHelper surfaceTextureHelper, String cameraId, int width, int height,
int framerate) {
new Camera2Session(callback, events, applicationContext, cameraManager, surfaceTextureHelper,
cameraId, width, height, framerate);
}
private Camera2Session(
CreateSessionCallback callback, Events events,
Context applicationContext, CameraManager cameraManager,
SurfaceTextureHelper surfaceTextureHelper,
String cameraId, int width, int height, int framerate) {
private Camera2Session(CreateSessionCallback callback, Events events, Context applicationContext,
CameraManager cameraManager, SurfaceTextureHelper surfaceTextureHelper, String cameraId,
int width, int height, int framerate) {
Logging.d(TAG, "Create new camera2 session on camera " + cameraId);
constructionTimeNs = System.nanoTime();
@ -341,11 +335,9 @@ public class Camera2Session implements CameraSession {
}
final CaptureFormat.FramerateRange bestFpsRange =
CameraEnumerationAndroid.getClosestSupportedFramerateRange(
framerateRanges, framerate);
CameraEnumerationAndroid.getClosestSupportedFramerateRange(framerateRanges, framerate);
final Size bestSize = CameraEnumerationAndroid.getClosestSupportedSize(
sizes, width, height);
final Size bestSize = CameraEnumerationAndroid.getClosestSupportedSize(sizes, width, height);
captureFormat = new CaptureFormat(bestSize.width, bestSize.height, bestFpsRange);
Logging.d(TAG, "Using capture format: " + captureFormat);
@ -372,8 +364,7 @@ public class Camera2Session implements CameraSession {
final long stopStartTime = System.nanoTime();
state = SessionState.STOPPED;
stopInternal();
final int stopTimeMs =
(int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStartTime);
final int stopTimeMs = (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStartTime);
camera2StopTimeMsHistogram.addSample(stopTimeMs);
}
}
@ -417,9 +408,8 @@ public class Camera2Session implements CameraSession {
private int getDeviceOrientation() {
int orientation = 0;
WindowManager wm = (WindowManager) applicationContext.getSystemService(
Context.WINDOW_SERVICE);
switch(wm.getDefaultDisplay().getRotation()) {
WindowManager wm = (WindowManager) applicationContext.getSystemService(Context.WINDOW_SERVICE);
switch (wm.getDefaultDisplay().getRotation()) {
case Surface.ROTATION_90:
orientation = 90;
break;

View File

@ -19,8 +19,8 @@ import java.util.Arrays;
@SuppressWarnings("deprecation")
public abstract class CameraCapturer implements CameraVideoCapturer {
enum SwitchState {
IDLE, // No switch requested.
PENDING, // Waiting for previous capture session to open.
IDLE, // No switch requested.
PENDING, // Waiting for previous capture session to open.
IN_PROGRESS, // Waiting for new switched capture session to start.
}
@ -50,8 +50,7 @@ public abstract class CameraCapturer implements CameraVideoCapturer {
if (switchState == SwitchState.IN_PROGRESS) {
if (switchEventsHandler != null) {
switchEventsHandler.onCameraSwitchDone(
cameraEnumerator.isFrontFacing(cameraName));
switchEventsHandler.onCameraSwitchDone(cameraEnumerator.isFrontFacing(cameraName));
switchEventsHandler = null;
}
switchState = SwitchState.IDLE;
@ -133,8 +132,7 @@ public abstract class CameraCapturer implements CameraVideoCapturer {
@Override
public void onByteBufferFrameCaptured(
CameraSession session, byte[] data, int width, int height, int rotation,
long timestamp) {
CameraSession session, byte[] data, int width, int height, int rotation, long timestamp) {
checkIsOnCameraThread();
synchronized (stateLock) {
if (session != currentSession) {
@ -151,9 +149,8 @@ public abstract class CameraCapturer implements CameraVideoCapturer {
}
@Override
public void onTextureFrameCaptured(
CameraSession session, int width, int height, int oesTextureId, float[] transformMatrix,
int rotation, long timestamp) {
public void onTextureFrameCaptured(CameraSession session, int width, int height,
int oesTextureId, float[] transformMatrix, int rotation, long timestamp) {
checkIsOnCameraThread();
synchronized (stateLock) {
if (session != currentSession) {
@ -187,18 +184,18 @@ public abstract class CameraCapturer implements CameraVideoCapturer {
private SurfaceTextureHelper surfaceHelper;
private final Object stateLock = new Object();
private boolean sessionOpening; /* guarded by stateLock */
private CameraSession currentSession; /* guarded by stateLock */
private String cameraName; /* guarded by stateLock */
private int width; /* guarded by stateLock */
private int height; /* guarded by stateLock */
private int framerate; /* guarded by stateLock */
private int openAttemptsRemaining; /* guarded by stateLock */
private boolean sessionOpening; /* guarded by stateLock */
private CameraSession currentSession; /* guarded by stateLock */
private String cameraName; /* guarded by stateLock */
private int width; /* guarded by stateLock */
private int height; /* guarded by stateLock */
private int framerate; /* guarded by stateLock */
private int openAttemptsRemaining; /* guarded by stateLock */
private SwitchState switchState = SwitchState.IDLE; /* guarded by stateLock */
private CameraSwitchHandler switchEventsHandler; /* guarded by stateLock */
// Valid from onDone call until stopCapture, otherwise null.
private CameraStatistics cameraStatistics; /* guarded by stateLock */
private boolean firstFrameObserved; /* guarded by stateLock */
private CameraStatistics cameraStatistics; /* guarded by stateLock */
private boolean firstFrameObserved; /* guarded by stateLock */
public CameraCapturer(
String cameraName, CameraEventsHandler eventsHandler, CameraEnumerator cameraEnumerator) {
@ -268,9 +265,8 @@ public abstract class CameraCapturer implements CameraVideoCapturer {
cameraThreadHandler.postDelayed(new Runnable() {
@Override
public void run() {
createCameraSession(
createSessionCallback, cameraSessionEventsHandler, applicationContext, surfaceHelper,
cameraName, width, height, framerate);
createCameraSession(createSessionCallback, cameraSessionEventsHandler, applicationContext,
surfaceHelper, cameraName, width, height, framerate);
}
}, delayMs);
}
@ -285,7 +281,6 @@ public abstract class CameraCapturer implements CameraVideoCapturer {
ThreadUtils.waitUninterruptibly(stateLock);
}
if (currentSession != null) {
Logging.d(TAG, "Stop capture: Nulling session");
cameraStatistics.release();
@ -428,6 +423,6 @@ public abstract class CameraCapturer implements CameraVideoCapturer {
abstract protected void createCameraSession(
CameraSession.CreateSessionCallback createSessionCallback, CameraSession.Events events,
Context applicationContext, SurfaceTextureHelper surfaceTextureHelper,
String cameraName, int width, int height, int framerate);
Context applicationContext, SurfaceTextureHelper surfaceTextureHelper, String cameraName,
int width, int height, int framerate);
}

View File

@ -123,7 +123,6 @@ public class CameraEnumerationAndroid {
return new Camera1Enumerator().getDeviceNames();
}
/**
* @deprecated
* Please use Camera1Enumerator.getDeviceNames().length instead.
@ -177,8 +176,8 @@ public class CameraEnumerationAndroid {
// lower bound, to allow the framerate to fluctuate based on lightning conditions.
public static CaptureFormat.FramerateRange getClosestSupportedFramerateRange(
List<CaptureFormat.FramerateRange> supportedFramerates, final int requestedFps) {
return Collections.min(supportedFramerates,
new ClosestComparator<CaptureFormat.FramerateRange>() {
return Collections.min(
supportedFramerates, new ClosestComparator<CaptureFormat.FramerateRange>() {
// Progressive penalty if the upper bound is further away than |MAX_FPS_DIFF_THRESHOLD|
// from requested.
private static final int MAX_FPS_DIFF_THRESHOLD = 5000;
@ -192,32 +191,29 @@ public class CameraEnumerationAndroid {
// Use one weight for small |value| less than |threshold|, and another weight above.
private int progressivePenalty(int value, int threshold, int lowWeight, int highWeight) {
return (value < threshold)
? value * lowWeight
: threshold * lowWeight + (value - threshold) * highWeight;
return (value < threshold) ? value * lowWeight
: threshold * lowWeight + (value - threshold) * highWeight;
}
@Override
int diff(CaptureFormat.FramerateRange range) {
final int minFpsError = progressivePenalty(range.min,
MIN_FPS_THRESHOLD, MIN_FPS_LOW_VALUE_WEIGHT, MIN_FPS_HIGH_VALUE_WEIGHT);
final int minFpsError = progressivePenalty(
range.min, MIN_FPS_THRESHOLD, MIN_FPS_LOW_VALUE_WEIGHT, MIN_FPS_HIGH_VALUE_WEIGHT);
final int maxFpsError = progressivePenalty(Math.abs(requestedFps * 1000 - range.max),
MAX_FPS_DIFF_THRESHOLD, MAX_FPS_LOW_DIFF_WEIGHT, MAX_FPS_HIGH_DIFF_WEIGHT);
return minFpsError + maxFpsError;
}
});
});
}
public static Size getClosestSupportedSize(
List<Size> supportedSizes, final int requestedWidth,
final int requestedHeight) {
return Collections.min(supportedSizes,
new ClosestComparator<Size>() {
@Override
int diff(Size size) {
return abs(requestedWidth - size.width) + abs(requestedHeight - size.height);
}
});
List<Size> supportedSizes, final int requestedWidth, final int requestedHeight) {
return Collections.min(supportedSizes, new ClosestComparator<Size>() {
@Override
int diff(Size size) {
return abs(requestedWidth - size.width) + abs(requestedHeight - size.height);
}
});
}
private static String getNameOfDevice(int facing) {

View File

@ -20,6 +20,6 @@ public interface CameraEnumerator {
public boolean isBackFacing(String deviceName);
public List<CaptureFormat> getSupportedFormats(String deviceName);
public CameraVideoCapturer createCapturer(String deviceName,
CameraVideoCapturer.CameraEventsHandler eventsHandler);
public CameraVideoCapturer createCapturer(
String deviceName, CameraVideoCapturer.CameraEventsHandler eventsHandler);
}

View File

@ -22,10 +22,10 @@ public interface CameraSession {
void onCameraOpening();
void onCameraError(CameraSession session, String error);
void onCameraClosed(CameraSession session);
void onByteBufferFrameCaptured(CameraSession session, byte[] data, int width, int height,
int rotation, long timestamp);
void onByteBufferFrameCaptured(
CameraSession session, byte[] data, int width, int height, int rotation, long timestamp);
void onTextureFrameCaptured(CameraSession session, int width, int height, int oesTextureId,
float[] transformMatrix, int rotation, long timestamp);
float[] transformMatrix, int rotation, long timestamp);
}
/**

View File

@ -75,7 +75,7 @@ public interface CameraVideoCapturer extends VideoCapturer {
@Override
public void run() {
final int cameraFps = Math.round(frameCount * 1000.0f / CAMERA_OBSERVER_PERIOD_MS);
Logging.d(TAG, "Camera fps: " + cameraFps +".");
Logging.d(TAG, "Camera fps: " + cameraFps + ".");
if (frameCount == 0) {
++freezePeriodCount;
if (CAMERA_OBSERVER_PERIOD_MS * freezePeriodCount >= CAMERA_FREEZE_REPORT_TIMOUT_MS

View File

@ -29,9 +29,8 @@ public class DataChannel {
public Init() {}
// Called only by native code.
private Init(
boolean ordered, int maxRetransmitTimeMs, int maxRetransmits,
String protocol, boolean negotiated, int id) {
private Init(boolean ordered, int maxRetransmitTimeMs, int maxRetransmits, String protocol,
boolean negotiated, int id) {
this.ordered = ordered;
this.maxRetransmitTimeMs = maxRetransmitTimeMs;
this.maxRetransmits = maxRetransmits;
@ -73,7 +72,7 @@ public class DataChannel {
}
/** Keep in sync with DataChannelInterface::DataState. */
public enum State { CONNECTING, OPEN, CLOSING, CLOSED };
public enum State { CONNECTING, OPEN, CLOSING, CLOSED }
private final long nativeDataChannel;
private long nativeObserver;

View File

@ -15,15 +15,13 @@ import android.view.Surface;
import javax.microedition.khronos.egl.EGL10;
/**
* Holds EGL state and utility methods for handling an egl 1.0 EGLContext, an EGLDisplay,
* and an EGLSurface.
*/
public abstract class EglBase {
// EGL wrapper for an actual EGLContext.
public static class Context {
}
public static class Context {}
// According to the documentation, EGL can be used from multiple threads at the same time if each
// thread has its own EGLContext, but in practice it deadlocks on some devices when doing this.
@ -39,6 +37,7 @@ public abstract class EglBase {
// Android-specific extension.
private static final int EGL_RECORDABLE_ANDROID = 0x3142;
// clang-format off
public static final int[] CONFIG_PLAIN = {
EGL10.EGL_RED_SIZE, 8,
EGL10.EGL_GREEN_SIZE, 8,
@ -79,14 +78,15 @@ public abstract class EglBase {
EGL_RECORDABLE_ANDROID, 1,
EGL10.EGL_NONE
};
// clang-format on
// Create a new context with the specified config attributes, sharing data with sharedContext.
// |sharedContext| can be null.
public static EglBase create(Context sharedContext, int[] configAttributes) {
return (EglBase14.isEGL14Supported()
&& (sharedContext == null || sharedContext instanceof EglBase14.Context))
? new EglBase14((EglBase14.Context) sharedContext, configAttributes)
: new EglBase10((EglBase10.Context) sharedContext, configAttributes);
&& (sharedContext == null || sharedContext instanceof EglBase14.Context))
? new EglBase14((EglBase14.Context) sharedContext, configAttributes)
: new EglBase10((EglBase10.Context) sharedContext, configAttributes);
}
public static EglBase create() {

View File

@ -159,9 +159,8 @@ public final class EglBase10 extends EglBase {
int[] surfaceAttribs = {EGL10.EGL_WIDTH, width, EGL10.EGL_HEIGHT, height, EGL10.EGL_NONE};
eglSurface = egl.eglCreatePbufferSurface(eglDisplay, eglConfig, surfaceAttribs);
if (eglSurface == EGL10.EGL_NO_SURFACE) {
throw new RuntimeException(
"Failed to create pixel buffer surface with size " + width + "x" + height
+ ": 0x" + Integer.toHexString(egl.eglGetError()));
throw new RuntimeException("Failed to create pixel buffer surface with size " + width + "x"
+ height + ": 0x" + Integer.toHexString(egl.eglGetError()));
}
}
@ -235,7 +234,7 @@ public final class EglBase10 extends EglBase {
public void detachCurrent() {
synchronized (EglBase.lock) {
if (!egl.eglMakeCurrent(
eglDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT)) {
eglDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT)) {
throw new RuntimeException(
"eglDetachCurrent failed: 0x" + Integer.toHexString(egl.eglGetError()));
}
@ -272,8 +271,7 @@ public final class EglBase10 extends EglBase {
private EGLConfig getEglConfig(EGLDisplay eglDisplay, int[] configAttributes) {
EGLConfig[] configs = new EGLConfig[1];
int[] numConfigs = new int[1];
if (!egl.eglChooseConfig(
eglDisplay, configAttributes, configs, configs.length, numConfigs)) {
if (!egl.eglChooseConfig(eglDisplay, configAttributes, configs, configs.length, numConfigs)) {
throw new RuntimeException(
"eglChooseConfig failed: 0x" + Integer.toHexString(egl.eglGetError()));
}

View File

@ -37,8 +37,8 @@ public final class EglBase14 extends EglBase {
// EGL 1.4 is supported from API 17. But EGLExt that is used for setting presentation
// time stamp on a surface is supported from 18 so we require 18.
public static boolean isEGL14Supported() {
Logging.d(TAG, "SDK version: " + CURRENT_SDK_VERSION
+ ". isEGL14Supported: " + (CURRENT_SDK_VERSION >= EGLExt_SDK_VERSION));
Logging.d(TAG, "SDK version: " + CURRENT_SDK_VERSION + ". isEGL14Supported: "
+ (CURRENT_SDK_VERSION >= EGLExt_SDK_VERSION));
return (CURRENT_SDK_VERSION >= EGLExt_SDK_VERSION);
}
@ -101,9 +101,8 @@ public final class EglBase14 extends EglBase {
int[] surfaceAttribs = {EGL14.EGL_WIDTH, width, EGL14.EGL_HEIGHT, height, EGL14.EGL_NONE};
eglSurface = EGL14.eglCreatePbufferSurface(eglDisplay, eglConfig, surfaceAttribs, 0);
if (eglSurface == EGL14.EGL_NO_SURFACE) {
throw new RuntimeException(
"Failed to create pixel buffer surface with size " + width + "x" + height
+ ": 0x" + Integer.toHexString(EGL14.eglGetError()));
throw new RuntimeException("Failed to create pixel buffer surface with size " + width + "x"
+ height + ": 0x" + Integer.toHexString(EGL14.eglGetError()));
}
}
@ -178,7 +177,7 @@ public final class EglBase14 extends EglBase {
public void detachCurrent() {
synchronized (EglBase.lock) {
if (!EGL14.eglMakeCurrent(
eglDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT)) {
eglDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT)) {
throw new RuntimeException(
"eglDetachCurrent failed: 0x" + Integer.toHexString(EGL14.eglGetError()));
}
@ -202,7 +201,8 @@ public final class EglBase14 extends EglBase {
throw new RuntimeException("No EGLSurface - can't swap buffers");
}
synchronized (EglBase.lock) {
// See https://android.googlesource.com/platform/frameworks/native/+/tools_r22.2/opengl/specs/EGL_ANDROID_presentation_time.txt
// See
// https://android.googlesource.com/platform/frameworks/native/+/tools_r22.2/opengl/specs/EGL_ANDROID_presentation_time.txt
EGLExt.eglPresentationTimeANDROID(eglDisplay, eglSurface, timeStampNs);
EGL14.eglSwapBuffers(eglDisplay, eglSurface);
}
@ -228,7 +228,7 @@ public final class EglBase14 extends EglBase {
EGLConfig[] configs = new EGLConfig[1];
int[] numConfigs = new int[1];
if (!EGL14.eglChooseConfig(
eglDisplay, configAttributes, 0, configs, 0, configs.length, numConfigs, 0)) {
eglDisplay, configAttributes, 0, configs, 0, configs.length, numConfigs, 0)) {
throw new RuntimeException(
"eglChooseConfig failed: 0x" + Integer.toHexString(EGL14.eglGetError()));
}

View File

@ -25,6 +25,7 @@ import java.util.Map;
* manually to free the resources held by this object.
*/
public class GlRectDrawer implements RendererCommon.GlDrawer {
// clang-format off
// Simple vertex shader, used for both YUV and OES.
private static final String VERTEX_SHADER_STRING =
"varying vec2 interp_tc;\n"
@ -76,25 +77,24 @@ public class GlRectDrawer implements RendererCommon.GlDrawer {
+ "void main() {\n"
+ " gl_FragColor = texture2D(oes_tex, interp_tc);\n"
+ "}\n";
// clang-format on
// Vertex coordinates in Normalized Device Coordinates, i.e. (-1, -1) is bottom-left and (1, 1) is
// top-right.
private static final FloatBuffer FULL_RECTANGLE_BUF =
GlUtil.createFloatBuffer(new float[] {
-1.0f, -1.0f, // Bottom left.
1.0f, -1.0f, // Bottom right.
-1.0f, 1.0f, // Top left.
1.0f, 1.0f, // Top right.
});
private static final FloatBuffer FULL_RECTANGLE_BUF = GlUtil.createFloatBuffer(new float[] {
-1.0f, -1.0f, // Bottom left.
1.0f, -1.0f, // Bottom right.
-1.0f, 1.0f, // Top left.
1.0f, 1.0f, // Top right.
});
// Texture coordinates - (0, 0) is bottom-left and (1, 1) is top-right.
private static final FloatBuffer FULL_RECTANGLE_TEX_BUF =
GlUtil.createFloatBuffer(new float[] {
0.0f, 0.0f, // Bottom left.
1.0f, 0.0f, // Bottom right.
0.0f, 1.0f, // Top left.
1.0f, 1.0f // Top right.
});
private static final FloatBuffer FULL_RECTANGLE_TEX_BUF = GlUtil.createFloatBuffer(new float[] {
0.0f, 0.0f, // Bottom left.
1.0f, 0.0f, // Bottom right.
0.0f, 1.0f, // Top left.
1.0f, 1.0f // Top right.
});
private static class Shader {
public final GlShader glShader;

View File

@ -25,13 +25,11 @@ public class GlShader {
}
GLES20.glShaderSource(shader, source);
GLES20.glCompileShader(shader);
int[] compileStatus = new int[] {
GLES20.GL_FALSE
};
int[] compileStatus = new int[] {GLES20.GL_FALSE};
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
if (compileStatus[0] != GLES20.GL_TRUE) {
Logging.e(TAG, "Could not compile shader " + shaderType + ":" +
GLES20.glGetShaderInfoLog(shader));
Logging.e(
TAG, "Could not compile shader " + shaderType + ":" + GLES20.glGetShaderInfoLog(shader));
throw new RuntimeException(GLES20.glGetShaderInfoLog(shader));
}
GlUtil.checkNoGLES2Error("compileShader");
@ -50,13 +48,10 @@ public class GlShader {
GLES20.glAttachShader(program, vertexShader);
GLES20.glAttachShader(program, fragmentShader);
GLES20.glLinkProgram(program);
int[] linkStatus = new int[] {
GLES20.GL_FALSE
};
int[] linkStatus = new int[] {GLES20.GL_FALSE};
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
if (linkStatus[0] != GLES20.GL_TRUE) {
Logging.e(TAG, "Could not link program: " +
GLES20.glGetProgramInfoLog(program));
Logging.e(TAG, "Could not link program: " + GLES20.glGetProgramInfoLog(program));
throw new RuntimeException(GLES20.glGetProgramInfoLog(program));
}
// According to the documentation of glLinkProgram():

View File

@ -52,8 +52,8 @@ public class GlTextureFrameBuffer {
GlUtil.checkNoGLES2Error("Generate framebuffer");
// Attach the texture to the framebuffer as color attachment.
GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0,
GLES20.GL_TEXTURE_2D, textureId, 0);
GLES20.glFramebufferTexture2D(
GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, textureId, 0);
GlUtil.checkNoGLES2Error("Attach texture to framebuffer");
// Restore normal framebuffer.

View File

@ -42,11 +42,7 @@ public class MediaCodecVideoDecoder {
private static final long MAX_DECODE_TIME_MS = 200;
// Tracks webrtc::VideoCodecType.
public enum VideoCodecType {
VIDEO_CODEC_VP8,
VIDEO_CODEC_VP9,
VIDEO_CODEC_H264
}
public enum VideoCodecType { VIDEO_CODEC_VP8, VIDEO_CODEC_VP9, VIDEO_CODEC_H264 }
// Timeout for input buffer dequeue.
private static final int DEQUEUE_INPUT_TIMEOUT = 500000;
@ -70,14 +66,13 @@ public class MediaCodecVideoDecoder {
private static final String VP9_MIME_TYPE = "video/x-vnd.on2.vp9";
private static final String H264_MIME_TYPE = "video/avc";
// List of supported HW VP8 decoders.
private static final String[] supportedVp8HwCodecPrefixes =
{"OMX.qcom.", "OMX.Nvidia.", "OMX.Exynos.", "OMX.Intel." };
private static final String[] supportedVp8HwCodecPrefixes = {
"OMX.qcom.", "OMX.Nvidia.", "OMX.Exynos.", "OMX.Intel."};
// List of supported HW VP9 decoders.
private static final String[] supportedVp9HwCodecPrefixes =
{"OMX.qcom.", "OMX.Exynos." };
private static final String[] supportedVp9HwCodecPrefixes = {"OMX.qcom.", "OMX.Exynos."};
// List of supported HW H.264 decoders.
private static final String[] supportedH264HwCodecPrefixes =
{"OMX.qcom.", "OMX.Intel.", "OMX.Exynos." };
private static final String[] supportedH264HwCodecPrefixes = {
"OMX.qcom.", "OMX.Intel.", "OMX.Exynos."};
// NV12 color format supported by QCOM codec, but not declared in MediaCodec -
// see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h
@ -87,13 +82,11 @@ public class MediaCodecVideoDecoder {
private static final int COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04;
// Allowable color formats supported by codec - in order of preference.
private static final List<Integer> supportedColorList = Arrays.asList(
CodecCapabilities.COLOR_FormatYUV420Planar,
CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
COLOR_QCOM_FORMATYVU420PackedSemiPlanar32m4ka,
COLOR_QCOM_FORMATYVU420PackedSemiPlanar16m4ka,
COLOR_QCOM_FORMATYVU420PackedSemiPlanar64x32Tile2m8ka,
COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m);
CodecCapabilities.COLOR_FormatYUV420Planar, CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
COLOR_QCOM_FORMATYVU420PackedSemiPlanar32m4ka, COLOR_QCOM_FORMATYVU420PackedSemiPlanar16m4ka,
COLOR_QCOM_FORMATYVU420PackedSemiPlanar64x32Tile2m8ka,
COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m);
private int colorFormat;
private int width;
@ -108,8 +101,8 @@ public class MediaCodecVideoDecoder {
private TextureListener textureListener;
private int droppedFrames;
private Surface surface = null;
private final Queue<DecodedOutputBuffer>
dequeuedSurfaceOutputBuffers = new LinkedList<DecodedOutputBuffer>();
private final Queue<DecodedOutputBuffer> dequeuedSurfaceOutputBuffers =
new LinkedList<DecodedOutputBuffer>();
// MediaCodec error handler - invoked when critical error happens which may prevent
// further use of media codec API. Now it means that one of media codec instances
@ -142,18 +135,18 @@ public class MediaCodecVideoDecoder {
// Functions to query if HW decoding is supported.
public static boolean isVp8HwSupported() {
return !hwDecoderDisabledTypes.contains(VP8_MIME_TYPE) &&
(findDecoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes) != null);
return !hwDecoderDisabledTypes.contains(VP8_MIME_TYPE)
&& (findDecoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes) != null);
}
public static boolean isVp9HwSupported() {
return !hwDecoderDisabledTypes.contains(VP9_MIME_TYPE) &&
(findDecoder(VP9_MIME_TYPE, supportedVp9HwCodecPrefixes) != null);
return !hwDecoderDisabledTypes.contains(VP9_MIME_TYPE)
&& (findDecoder(VP9_MIME_TYPE, supportedVp9HwCodecPrefixes) != null);
}
public static boolean isH264HwSupported() {
return !hwDecoderDisabledTypes.contains(H264_MIME_TYPE) &&
(findDecoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes) != null);
return !hwDecoderDisabledTypes.contains(H264_MIME_TYPE)
&& (findDecoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes) != null);
}
public static void printStackTrace() {
@ -175,11 +168,10 @@ public class MediaCodecVideoDecoder {
this.colorFormat = colorFormat;
}
public final String codecName; // OpenMax component name for VP8 codec.
public final int colorFormat; // Color format supported by codec.
public final int colorFormat; // Color format supported by codec.
}
private static DecoderProperties findDecoder(
String mime, String[] supportedCodecPrefixes) {
private static DecoderProperties findDecoder(String mime, String[] supportedCodecPrefixes) {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
return null; // MediaCodec.setParameters is missing.
}
@ -189,7 +181,7 @@ public class MediaCodecVideoDecoder {
try {
info = MediaCodecList.getCodecInfoAt(i);
} catch (IllegalArgumentException e) {
Logging.e(TAG, "Cannot retrieve decoder codec info", e);
Logging.e(TAG, "Cannot retrieve decoder codec info", e);
}
if (info == null || info.isEncoder()) {
continue;
@ -202,7 +194,7 @@ public class MediaCodecVideoDecoder {
}
}
if (name == null) {
continue; // No HW support in this codec; try the next one.
continue; // No HW support in this codec; try the next one.
}
Logging.d(TAG, "Found candidate decoder " + name);
@ -223,7 +215,7 @@ public class MediaCodecVideoDecoder {
try {
capabilities = info.getCapabilitiesForType(mime);
} catch (IllegalArgumentException e) {
Logging.e(TAG, "Cannot retrieve decoder capabilities", e);
Logging.e(TAG, "Cannot retrieve decoder capabilities", e);
continue;
}
for (int colorFormat : capabilities.colorFormats) {
@ -233,29 +225,27 @@ public class MediaCodecVideoDecoder {
for (int codecColorFormat : capabilities.colorFormats) {
if (codecColorFormat == supportedColorFormat) {
// Found supported HW decoder.
Logging.d(TAG, "Found target decoder " + name +
". Color: 0x" + Integer.toHexString(codecColorFormat));
Logging.d(TAG, "Found target decoder " + name + ". Color: 0x"
+ Integer.toHexString(codecColorFormat));
return new DecoderProperties(name, codecColorFormat);
}
}
}
}
Logging.d(TAG, "No HW decoder found for mime " + mime);
return null; // No HW decoder.
return null; // No HW decoder.
}
private void checkOnMediaCodecThread() throws IllegalStateException {
if (mediaCodecThread.getId() != Thread.currentThread().getId()) {
throw new IllegalStateException(
"MediaCodecVideoDecoder previously operated on " + mediaCodecThread +
" but is now called on " + Thread.currentThread());
throw new IllegalStateException("MediaCodecVideoDecoder previously operated on "
+ mediaCodecThread + " but is now called on " + Thread.currentThread());
}
}
// Pass null in |surfaceTextureHelper| to configure the codec for ByteBuffer output.
private boolean initDecode(
VideoCodecType type, int width, int height,
SurfaceTextureHelper surfaceTextureHelper) {
VideoCodecType type, int width, int height, SurfaceTextureHelper surfaceTextureHelper) {
if (mediaCodecThread != null) {
throw new RuntimeException("initDecode: Forgot to release()?");
}
@ -280,9 +270,8 @@ public class MediaCodecVideoDecoder {
throw new RuntimeException("Cannot find HW decoder for " + type);
}
Logging.d(TAG, "Java initDecode: " + type + " : "+ width + " x " + height +
". Color: 0x" + Integer.toHexString(properties.colorFormat) +
". Use Surface: " + useSurface);
Logging.d(TAG, "Java initDecode: " + type + " : " + width + " x " + height + ". Color: 0x"
+ Integer.toHexString(properties.colorFormat) + ". Use Surface: " + useSurface);
runningInstance = this; // Decoder is now running and can be queried for stack traces.
mediaCodecThread = Thread.currentThread();
@ -317,8 +306,8 @@ public class MediaCodecVideoDecoder {
hasDecodedFirstFrame = false;
dequeuedSurfaceOutputBuffers.clear();
droppedFrames = 0;
Logging.d(TAG, "Input buffers: " + inputBuffers.length +
". Output buffers: " + outputBuffers.length);
Logging.d(TAG,
"Input buffers: " + inputBuffers.length + ". Output buffers: " + outputBuffers.length);
return true;
} catch (IllegalStateException e) {
Logging.e(TAG, "initDecode failed", e);
@ -406,12 +395,11 @@ public class MediaCodecVideoDecoder {
try {
inputBuffers[inputBufferIndex].position(0);
inputBuffers[inputBufferIndex].limit(size);
decodeStartTimeMs.add(new TimeStamps(SystemClock.elapsedRealtime(), timeStampMs,
ntpTimeStamp));
decodeStartTimeMs.add(
new TimeStamps(SystemClock.elapsedRealtime(), timeStampMs, ntpTimeStamp));
mediaCodec.queueInputBuffer(inputBufferIndex, 0, size, presentationTimeStamUs, 0);
return true;
}
catch (IllegalStateException e) {
} catch (IllegalStateException e) {
Logging.e(TAG, "decode failed", e);
return false;
}
@ -511,8 +499,7 @@ public class MediaCodecVideoDecoder {
public void addBufferToRender(DecodedOutputBuffer buffer) {
if (bufferToRender != null) {
Logging.e(TAG,
"Unexpected addBufferToRender() called while waiting for a texture.");
Logging.e(TAG, "Unexpected addBufferToRender() called while waiting for a texture.");
throw new IllegalStateException("Waiting for a texture.");
}
bufferToRender = buffer;
@ -530,8 +517,8 @@ public class MediaCodecVideoDecoder {
int oesTextureId, float[] transformMatrix, long timestampNs) {
synchronized (newFrameLock) {
if (renderedBuffer != null) {
Logging.e(TAG,
"Unexpected onTextureFrameAvailable() called while already holding a texture.");
Logging.e(
TAG, "Unexpected onTextureFrameAvailable() called while already holding a texture.");
throw new IllegalStateException("Already holding a texture.");
}
// |timestampNs| is always zero on some Android versions.
@ -550,7 +537,7 @@ public class MediaCodecVideoDecoder {
if (renderedBuffer == null && timeoutMs > 0 && isWaitingForTexture()) {
try {
newFrameLock.wait(timeoutMs);
} catch(InterruptedException e) {
} catch (InterruptedException e) {
// Restore the interrupted status by reinterrupting the thread.
Thread.currentThread().interrupt();
}
@ -588,8 +575,8 @@ public class MediaCodecVideoDecoder {
// MediaCodec.INFO_TRY_AGAIN_LATER.
final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
while (true) {
final int result = mediaCodec.dequeueOutputBuffer(
info, TimeUnit.MILLISECONDS.toMicros(dequeueTimeoutMs));
final int result =
mediaCodec.dequeueOutputBuffer(info, TimeUnit.MILLISECONDS.toMicros(dequeueTimeoutMs));
switch (result) {
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
outputBuffers = mediaCodec.getOutputBuffers();
@ -604,8 +591,8 @@ public class MediaCodecVideoDecoder {
int new_width = format.getInteger(MediaFormat.KEY_WIDTH);
int new_height = format.getInteger(MediaFormat.KEY_HEIGHT);
if (hasDecodedFirstFrame && (new_width != width || new_height != height)) {
throw new RuntimeException("Unexpected size change. Configured " + width + "*" +
height + ". New " + new_width + "*" + new_height);
throw new RuntimeException("Unexpected size change. Configured " + width + "*" + height
+ ". New " + new_width + "*" + new_height);
}
width = format.getInteger(MediaFormat.KEY_WIDTH);
height = format.getInteger(MediaFormat.KEY_HEIGHT);
@ -635,19 +622,14 @@ public class MediaCodecVideoDecoder {
long decodeTimeMs = SystemClock.elapsedRealtime() - timeStamps.decodeStartTimeMs;
if (decodeTimeMs > MAX_DECODE_TIME_MS) {
Logging.e(TAG, "Very high decode time: " + decodeTimeMs + "ms"
+ ". Q size: " + decodeStartTimeMs.size()
+ ". Might be caused by resuming H264 decoding after a pause.");
+ ". Q size: " + decodeStartTimeMs.size()
+ ". Might be caused by resuming H264 decoding after a pause.");
decodeTimeMs = MAX_DECODE_TIME_MS;
}
return new DecodedOutputBuffer(result,
info.offset,
info.size,
TimeUnit.MICROSECONDS.toMillis(info.presentationTimeUs),
timeStamps.timeStampMs,
timeStamps.ntpTimeStampMs,
decodeTimeMs,
SystemClock.elapsedRealtime());
}
return new DecodedOutputBuffer(result, info.offset, info.size,
TimeUnit.MICROSECONDS.toMillis(info.presentationTimeUs), timeStamps.timeStampMs,
timeStamps.ntpTimeStampMs, decodeTimeMs, SystemClock.elapsedRealtime());
}
}
}
@ -675,8 +657,8 @@ public class MediaCodecVideoDecoder {
}
if ((dequeuedSurfaceOutputBuffers.size()
>= Math.min(MAX_QUEUED_OUTPUTBUFFERS, outputBuffers.length)
|| (dequeueTimeoutMs > 0 && !dequeuedSurfaceOutputBuffers.isEmpty()))) {
>= Math.min(MAX_QUEUED_OUTPUTBUFFERS, outputBuffers.length)
|| (dequeueTimeoutMs > 0 && !dequeuedSurfaceOutputBuffers.isEmpty()))) {
++droppedFrames;
// Drop the oldest frame still in dequeuedSurfaceOutputBuffers.
// The oldest frame is owned by |textureListener| and can't be dropped since
@ -686,18 +668,17 @@ public class MediaCodecVideoDecoder {
// TODO(perkj): Re-add the below log when VideoRenderGUI has been removed or fixed to
// return the one and only texture even if it does not render.
Logging.w(TAG, "Draining decoder. Dropping frame with TS: "
+ droppedFrame.presentationTimeStampMs +
". Total number of dropped frames: " + droppedFrames);
+ droppedFrame.presentationTimeStampMs + ". Total number of dropped frames: "
+ droppedFrames);
} else {
Logging.w(TAG, "Too many output buffers " + dequeuedSurfaceOutputBuffers.size() +
". Dropping frame with TS: " + droppedFrame.presentationTimeStampMs +
". Total number of dropped frames: " + droppedFrames);
Logging.w(TAG, "Too many output buffers " + dequeuedSurfaceOutputBuffers.size()
+ ". Dropping frame with TS: " + droppedFrame.presentationTimeStampMs
+ ". Total number of dropped frames: " + droppedFrames);
}
mediaCodec.releaseOutputBuffer(droppedFrame.index, false /* render */);
return new DecodedTextureBuffer(0, null,
droppedFrame.presentationTimeStampMs, droppedFrame.timeStampMs,
droppedFrame.ntpTimeStampMs, droppedFrame.decodeTimeMs,
return new DecodedTextureBuffer(0, null, droppedFrame.presentationTimeStampMs,
droppedFrame.timeStampMs, droppedFrame.ntpTimeStampMs, droppedFrame.decodeTimeMs,
SystemClock.elapsedRealtime() - droppedFrame.endDecodeTimeMs);
}
return null;

View File

@ -42,14 +42,10 @@ public class MediaCodecVideoEncoder {
private static final String TAG = "MediaCodecVideoEncoder";
// Tracks webrtc::VideoCodecType.
public enum VideoCodecType {
VIDEO_CODEC_VP8,
VIDEO_CODEC_VP9,
VIDEO_CODEC_H264
}
public enum VideoCodecType { VIDEO_CODEC_VP8, VIDEO_CODEC_VP9, VIDEO_CODEC_H264 }
private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000; // Timeout for codec releasing.
private static final int DEQUEUE_TIMEOUT = 0; // Non-blocking, no wait.
private static final int DEQUEUE_TIMEOUT = 0; // Non-blocking, no wait.
private static final int BITRATE_ADJUSTMENT_FPS = 30;
private static final int MAXIMUM_INITIAL_FPS = 30;
private static final double BITRATE_CORRECTION_SEC = 3.0;
@ -115,56 +111,45 @@ public class MediaCodecVideoEncoder {
"OMX.qcom.", Build.VERSION_CODES.KITKAT, BitrateAdjustmentType.NO_ADJUSTMENT);
private static final MediaCodecProperties exynosVp8HwProperties = new MediaCodecProperties(
"OMX.Exynos.", Build.VERSION_CODES.M, BitrateAdjustmentType.DYNAMIC_ADJUSTMENT);
private static final MediaCodecProperties[] vp8HwList = new MediaCodecProperties[] {
qcomVp8HwProperties, exynosVp8HwProperties
};
private static final MediaCodecProperties[] vp8HwList =
new MediaCodecProperties[] {qcomVp8HwProperties, exynosVp8HwProperties};
// List of supported HW VP9 encoders.
private static final MediaCodecProperties qcomVp9HwProperties = new MediaCodecProperties(
"OMX.qcom.", Build.VERSION_CODES.M, BitrateAdjustmentType.NO_ADJUSTMENT);
private static final MediaCodecProperties exynosVp9HwProperties = new MediaCodecProperties(
"OMX.Exynos.", Build.VERSION_CODES.M, BitrateAdjustmentType.NO_ADJUSTMENT);
private static final MediaCodecProperties[] vp9HwList = new MediaCodecProperties[] {
qcomVp9HwProperties, exynosVp9HwProperties
};
private static final MediaCodecProperties[] vp9HwList =
new MediaCodecProperties[] {qcomVp9HwProperties, exynosVp9HwProperties};
// List of supported HW H.264 encoders.
private static final MediaCodecProperties qcomH264HwProperties = new MediaCodecProperties(
"OMX.qcom.", Build.VERSION_CODES.KITKAT, BitrateAdjustmentType.NO_ADJUSTMENT);
private static final MediaCodecProperties exynosH264HwProperties = new MediaCodecProperties(
"OMX.Exynos.", Build.VERSION_CODES.LOLLIPOP, BitrateAdjustmentType.FRAMERATE_ADJUSTMENT);
private static final MediaCodecProperties[] h264HwList = new MediaCodecProperties[] {
qcomH264HwProperties, exynosH264HwProperties
};
private static final MediaCodecProperties[] h264HwList =
new MediaCodecProperties[] {qcomH264HwProperties, exynosH264HwProperties};
// List of devices with poor H.264 encoder quality.
private static final String[] H264_HW_EXCEPTION_MODELS = new String[] {
// HW H.264 encoder on below devices has poor bitrate control - actual
// bitrates deviates a lot from the target value.
"SAMSUNG-SGH-I337",
"Nexus 7",
"Nexus 4"
};
// HW H.264 encoder on below devices has poor bitrate control - actual
// bitrates deviates a lot from the target value.
private static final String[] H264_HW_EXCEPTION_MODELS =
new String[] {"SAMSUNG-SGH-I337", "Nexus 7", "Nexus 4"};
// Bitrate modes - should be in sync with OMX_VIDEO_CONTROLRATETYPE defined
// in OMX_Video.h
private static final int VIDEO_ControlRateConstant = 2;
// NV12 color format supported by QCOM codec, but not declared in MediaCodec -
// see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h
private static final int
COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04;
private static final int COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04;
// Allowable color formats supported by codec - in order of preference.
private static final int[] supportedColorList = {
CodecCapabilities.COLOR_FormatYUV420Planar,
CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m
};
private static final int[] supportedSurfaceColorList = {
CodecCapabilities.COLOR_FormatSurface
};
private static final int[] supportedColorList = {CodecCapabilities.COLOR_FormatYUV420Planar,
CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m};
private static final int[] supportedSurfaceColorList = {CodecCapabilities.COLOR_FormatSurface};
private VideoCodecType type;
private int colorFormat; // Used by native code.
private int colorFormat; // Used by native code.
// Variables used for dynamic bitrate adjustment.
private BitrateAdjustmentType bitrateAdjustmentType = BitrateAdjustmentType.NO_ADJUSTMENT;
@ -209,33 +194,33 @@ public class MediaCodecVideoEncoder {
// Functions to query if HW encoding is supported.
public static boolean isVp8HwSupported() {
return !hwEncoderDisabledTypes.contains(VP8_MIME_TYPE) &&
(findHwEncoder(VP8_MIME_TYPE, vp8HwList, supportedColorList) != null);
return !hwEncoderDisabledTypes.contains(VP8_MIME_TYPE)
&& (findHwEncoder(VP8_MIME_TYPE, vp8HwList, supportedColorList) != null);
}
public static boolean isVp9HwSupported() {
return !hwEncoderDisabledTypes.contains(VP9_MIME_TYPE) &&
(findHwEncoder(VP9_MIME_TYPE, vp9HwList, supportedColorList) != null);
return !hwEncoderDisabledTypes.contains(VP9_MIME_TYPE)
&& (findHwEncoder(VP9_MIME_TYPE, vp9HwList, supportedColorList) != null);
}
public static boolean isH264HwSupported() {
return !hwEncoderDisabledTypes.contains(H264_MIME_TYPE) &&
(findHwEncoder(H264_MIME_TYPE, h264HwList, supportedColorList) != null);
return !hwEncoderDisabledTypes.contains(H264_MIME_TYPE)
&& (findHwEncoder(H264_MIME_TYPE, h264HwList, supportedColorList) != null);
}
public static boolean isVp8HwSupportedUsingTextures() {
return !hwEncoderDisabledTypes.contains(VP8_MIME_TYPE) &&
(findHwEncoder(VP8_MIME_TYPE, vp8HwList, supportedSurfaceColorList) != null);
return !hwEncoderDisabledTypes.contains(VP8_MIME_TYPE)
&& (findHwEncoder(VP8_MIME_TYPE, vp8HwList, supportedSurfaceColorList) != null);
}
public static boolean isVp9HwSupportedUsingTextures() {
return !hwEncoderDisabledTypes.contains(VP9_MIME_TYPE) &&
(findHwEncoder(VP9_MIME_TYPE, vp9HwList, supportedSurfaceColorList) != null);
return !hwEncoderDisabledTypes.contains(VP9_MIME_TYPE)
&& (findHwEncoder(VP9_MIME_TYPE, vp9HwList, supportedSurfaceColorList) != null);
}
public static boolean isH264HwSupportedUsingTextures() {
return !hwEncoderDisabledTypes.contains(H264_MIME_TYPE) &&
(findHwEncoder(H264_MIME_TYPE, h264HwList, supportedSurfaceColorList) != null);
return !hwEncoderDisabledTypes.contains(H264_MIME_TYPE)
&& (findHwEncoder(H264_MIME_TYPE, h264HwList, supportedSurfaceColorList) != null);
}
// Helper struct for findHwEncoder() below.
@ -247,7 +232,7 @@ public class MediaCodecVideoEncoder {
this.bitrateAdjustmentType = bitrateAdjustmentType;
}
public final String codecName; // OpenMax component name for HW codec.
public final int colorFormat; // Color format supported by codec.
public final int colorFormat; // Color format supported by codec.
public final BitrateAdjustmentType bitrateAdjustmentType; // Bitrate adjustment type
}
@ -273,7 +258,7 @@ public class MediaCodecVideoEncoder {
try {
info = MediaCodecList.getCodecInfoAt(i);
} catch (IllegalArgumentException e) {
Logging.e(TAG, "Cannot retrieve encoder codec info", e);
Logging.e(TAG, "Cannot retrieve encoder codec info", e);
}
if (info == null || !info.isEncoder()) {
continue;
@ -286,7 +271,7 @@ public class MediaCodecVideoEncoder {
}
}
if (name == null) {
continue; // No HW support in this codec; try the next one.
continue; // No HW support in this codec; try the next one.
}
Logging.v(TAG, "Found candidate encoder " + name);
@ -296,14 +281,14 @@ public class MediaCodecVideoEncoder {
for (MediaCodecProperties codecProperties : supportedHwCodecProperties) {
if (name.startsWith(codecProperties.codecPrefix)) {
if (Build.VERSION.SDK_INT < codecProperties.minSdk) {
Logging.w(TAG, "Codec " + name + " is disabled due to SDK version " +
Build.VERSION.SDK_INT);
Logging.w(
TAG, "Codec " + name + " is disabled due to SDK version " + Build.VERSION.SDK_INT);
continue;
}
if (codecProperties.bitrateAdjustmentType != BitrateAdjustmentType.NO_ADJUSTMENT) {
bitrateAdjustmentType = codecProperties.bitrateAdjustmentType;
Logging.w(TAG, "Codec " + name
+ " requires bitrate adjustment: " + bitrateAdjustmentType);
Logging.w(
TAG, "Codec " + name + " requires bitrate adjustment: " + bitrateAdjustmentType);
}
supportedCodec = true;
break;
@ -318,7 +303,7 @@ public class MediaCodecVideoEncoder {
try {
capabilities = info.getCapabilitiesForType(mime);
} catch (IllegalArgumentException e) {
Logging.e(TAG, "Cannot retrieve encoder capabilities", e);
Logging.e(TAG, "Cannot retrieve encoder capabilities", e);
continue;
}
for (int colorFormat : capabilities.colorFormats) {
@ -329,22 +314,21 @@ public class MediaCodecVideoEncoder {
for (int codecColorFormat : capabilities.colorFormats) {
if (codecColorFormat == supportedColorFormat) {
// Found supported HW encoder.
Logging.d(TAG, "Found target encoder for mime " + mime + " : " + name
+ ". Color: 0x" + Integer.toHexString(codecColorFormat)
+ ". Bitrate adjustment: " + bitrateAdjustmentType);
Logging.d(TAG, "Found target encoder for mime " + mime + " : " + name + ". Color: 0x"
+ Integer.toHexString(codecColorFormat) + ". Bitrate adjustment: "
+ bitrateAdjustmentType);
return new EncoderProperties(name, codecColorFormat, bitrateAdjustmentType);
}
}
}
}
return null; // No HW encoder.
return null; // No HW encoder.
}
private void checkOnMediaCodecThread() {
if (mediaCodecThread.getId() != Thread.currentThread().getId()) {
throw new RuntimeException(
"MediaCodecVideoEncoder previously operated on " + mediaCodecThread +
" but is now called on " + Thread.currentThread());
throw new RuntimeException("MediaCodecVideoEncoder previously operated on " + mediaCodecThread
+ " but is now called on " + Thread.currentThread());
}
}
@ -373,8 +357,8 @@ public class MediaCodecVideoEncoder {
boolean initEncode(VideoCodecType type, int width, int height, int kbps, int fps,
EglBase14.Context sharedContext) {
final boolean useSurface = sharedContext != null;
Logging.d(TAG, "Java initEncode: " + type + " : " + width + " x " + height +
". @ " + kbps + " kbps. Fps: " + fps + ". Encode from texture : " + useSurface);
Logging.d(TAG, "Java initEncode: " + type + " : " + width + " x " + height + ". @ " + kbps
+ " kbps. Fps: " + fps + ". Encode from texture : " + useSurface);
this.width = width;
this.height = height;
@ -408,12 +392,11 @@ public class MediaCodecVideoEncoder {
bitrateAdjustmentType = properties.bitrateAdjustmentType;
if (bitrateAdjustmentType == BitrateAdjustmentType.FRAMERATE_ADJUSTMENT) {
fps = BITRATE_ADJUSTMENT_FPS;
} else {
} else {
fps = Math.min(fps, MAXIMUM_INITIAL_FPS);
}
Logging.d(TAG, "Color format: " + colorFormat +
". Bitrate adjustment: " + bitrateAdjustmentType +
". Initial fps: " + fps);
Logging.d(TAG, "Color format: " + colorFormat + ". Bitrate adjustment: " + bitrateAdjustmentType
+ ". Initial fps: " + fps);
targetBitrateBps = 1000 * kbps;
targetFps = fps;
bitrateAccumulatorMax = targetBitrateBps / 8.0;
@ -436,8 +419,7 @@ public class MediaCodecVideoEncoder {
Logging.e(TAG, "Can not create media encoder");
return false;
}
mediaCodec.configure(
format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
if (useSurface) {
eglBase = new EglBase14(sharedContext, EglBase.CONFIG_RECORDABLE);
@ -457,15 +439,14 @@ public class MediaCodecVideoEncoder {
return true;
}
ByteBuffer[] getInputBuffers() {
ByteBuffer[] getInputBuffers() {
ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
Logging.d(TAG, "Input buffers: " + inputBuffers.length);
return inputBuffers;
}
boolean encodeBuffer(
boolean isKeyframe, int inputBuffer, int size,
long presentationTimestampUs) {
boolean isKeyframe, int inputBuffer, int size, long presentationTimestampUs) {
checkOnMediaCodecThread();
try {
if (isKeyframe) {
@ -478,11 +459,9 @@ public class MediaCodecVideoEncoder {
b.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
mediaCodec.setParameters(b);
}
mediaCodec.queueInputBuffer(
inputBuffer, 0, size, presentationTimestampUs, 0);
mediaCodec.queueInputBuffer(inputBuffer, 0, size, presentationTimestampUs, 0);
return true;
}
catch (IllegalStateException e) {
} catch (IllegalStateException e) {
Logging.e(TAG, "encodeBuffer failed", e);
return false;
}
@ -505,8 +484,7 @@ public class MediaCodecVideoEncoder {
drawer.drawOes(oesTextureId, transformationMatrix, width, height, 0, 0, width, height);
eglBase.swapBuffers(TimeUnit.MICROSECONDS.toNanos(presentationTimestampUs));
return true;
}
catch (RuntimeException e) {
} catch (RuntimeException e) {
Logging.e(TAG, "encodeTexture failed", e);
return false;
}
@ -580,13 +558,13 @@ public class MediaCodecVideoEncoder {
// Adjust actual encoder bitrate based on bitrate adjustment type.
if (bitrateAdjustmentType == BitrateAdjustmentType.FRAMERATE_ADJUSTMENT && targetFps > 0) {
codecBitrateBps = BITRATE_ADJUSTMENT_FPS * targetBitrateBps / targetFps;
Logging.v(TAG, "setRates: " + kbps + " -> " + (codecBitrateBps / 1000)
+ " kbps. Fps: " + targetFps);
Logging.v(TAG,
"setRates: " + kbps + " -> " + (codecBitrateBps / 1000) + " kbps. Fps: " + targetFps);
} else if (bitrateAdjustmentType == BitrateAdjustmentType.DYNAMIC_ADJUSTMENT) {
Logging.v(TAG, "setRates: " + kbps + " kbps. Fps: " + targetFps
+ ". ExpScale: " + bitrateAdjustmentScaleExp);
Logging.v(TAG, "setRates: " + kbps + " kbps. Fps: " + targetFps + ". ExpScale: "
+ bitrateAdjustmentScaleExp);
if (bitrateAdjustmentScaleExp != 0) {
codecBitrateBps = (int)(codecBitrateBps * getBitrateScale(bitrateAdjustmentScaleExp));
codecBitrateBps = (int) (codecBitrateBps * getBitrateScale(bitrateAdjustmentScaleExp));
}
} else {
Logging.v(TAG, "setRates: " + kbps + " kbps. Fps: " + targetFps);
@ -618,8 +596,7 @@ public class MediaCodecVideoEncoder {
// Helper struct for dequeueOutputBuffer() below.
static class OutputBufferInfo {
public OutputBufferInfo(
int index, ByteBuffer buffer,
boolean isKeyFrame, long presentationTimestampUs) {
int index, ByteBuffer buffer, boolean isKeyFrame, long presentationTimestampUs) {
this.index = index;
this.buffer = buffer;
this.isKeyFrame = isKeyFrame;
@ -641,11 +618,9 @@ public class MediaCodecVideoEncoder {
int result = mediaCodec.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT);
// Check if this is config frame and save configuration data.
if (result >= 0) {
boolean isConfigFrame =
(info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0;
boolean isConfigFrame = (info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0;
if (isConfigFrame) {
Logging.d(TAG, "Config frame generated. Offset: " + info.offset +
". Size: " + info.size);
Logging.d(TAG, "Config frame generated. Offset: " + info.offset + ". Size: " + info.size);
configData = ByteBuffer.allocateDirect(info.size);
outputBuffers[result].position(info.offset);
outputBuffers[result].limit(info.offset + info.size);
@ -666,27 +641,23 @@ public class MediaCodecVideoEncoder {
reportEncodedFrame(info.size);
// Check key frame flag.
boolean isKeyFrame =
(info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
boolean isKeyFrame = (info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
if (isKeyFrame) {
Logging.d(TAG, "Sync frame generated");
}
if (isKeyFrame && type == VideoCodecType.VIDEO_CODEC_H264) {
Logging.d(TAG, "Appending config frame of size " + configData.capacity() +
" to output buffer with offset " + info.offset + ", size " +
info.size);
Logging.d(TAG, "Appending config frame of size " + configData.capacity()
+ " to output buffer with offset " + info.offset + ", size " + info.size);
// For H.264 key frame append SPS and PPS NALs at the start
ByteBuffer keyFrameBuffer = ByteBuffer.allocateDirect(
configData.capacity() + info.size);
ByteBuffer keyFrameBuffer = ByteBuffer.allocateDirect(configData.capacity() + info.size);
configData.rewind();
keyFrameBuffer.put(configData);
keyFrameBuffer.put(outputBuffer);
keyFrameBuffer.position(0);
return new OutputBufferInfo(result, keyFrameBuffer,
isKeyFrame, info.presentationTimeUs);
return new OutputBufferInfo(result, keyFrameBuffer, isKeyFrame, info.presentationTimeUs);
} else {
return new OutputBufferInfo(result, outputBuffer.slice(),
isKeyFrame, info.presentationTimeUs);
return new OutputBufferInfo(
result, outputBuffer.slice(), isKeyFrame, info.presentationTimeUs);
}
} else if (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
outputBuffers = mediaCodec.getOutputBuffers();
@ -705,7 +676,7 @@ public class MediaCodecVideoEncoder {
private double getBitrateScale(int bitrateAdjustmentScaleExp) {
return Math.pow(BITRATE_CORRECTION_MAX_SCALE,
(double)bitrateAdjustmentScaleExp / BITRATE_CORRECTION_STEPS);
(double) bitrateAdjustmentScaleExp / BITRATE_CORRECTION_STEPS);
}
private void reportEncodedFrame(int size) {
@ -727,9 +698,8 @@ public class MediaCodecVideoEncoder {
// Do bitrate adjustment every 3 seconds if actual encoder bitrate deviates too much
// form the target value.
if (bitrateObservationTimeMs > 1000 * BITRATE_CORRECTION_SEC) {
Logging.d(TAG, "Acc: " + (int)bitrateAccumulator
+ ". Max: " + (int)bitrateAccumulatorMax
+ ". ExpScale: " + bitrateAdjustmentScaleExp);
Logging.d(TAG, "Acc: " + (int) bitrateAccumulator + ". Max: " + (int) bitrateAccumulatorMax
+ ". ExpScale: " + bitrateAdjustmentScaleExp);
boolean bitrateAdjustmentScaleChanged = false;
if (bitrateAccumulator > bitrateAccumulatorMax) {
// Encoder generates too high bitrate - need to reduce the scale.
@ -745,8 +715,8 @@ public class MediaCodecVideoEncoder {
if (bitrateAdjustmentScaleChanged) {
bitrateAdjustmentScaleExp = Math.min(bitrateAdjustmentScaleExp, BITRATE_CORRECTION_STEPS);
bitrateAdjustmentScaleExp = Math.max(bitrateAdjustmentScaleExp, -BITRATE_CORRECTION_STEPS);
Logging.d(TAG, "Adjusting bitrate scale to " + bitrateAdjustmentScaleExp
+ ". Value: " + getBitrateScale(bitrateAdjustmentScaleExp));
Logging.d(TAG, "Adjusting bitrate scale to " + bitrateAdjustmentScaleExp + ". Value: "
+ getBitrateScale(bitrateAdjustmentScaleExp));
setRates(targetBitrateBps / 1000, targetFps);
}
bitrateObservationTimeMs = 0;

View File

@ -48,7 +48,7 @@ public class MediaConstraints {
if (other == null || getClass() != other.getClass()) {
return false;
}
KeyValuePair that = (KeyValuePair)other;
KeyValuePair that = (KeyValuePair) other;
return key.equals(that.key) && value.equals(that.value);
}
@ -78,7 +78,7 @@ public class MediaConstraints {
}
public String toString() {
return "mandatory: " + stringifyKeyValuePairList(mandatory) +
", optional: " + stringifyKeyValuePairList(optional);
return "mandatory: " + stringifyKeyValuePairList(mandatory) + ", optional: "
+ stringifyKeyValuePairList(optional);
}
}

View File

@ -8,17 +8,14 @@
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/** Java wrapper for a C++ MediaSourceInterface. */
public class MediaSource {
/** Tracks MediaSourceInterface.SourceState */
public enum State {
INITIALIZING, LIVE, ENDED, MUTED
}
public enum State { INITIALIZING, LIVE, ENDED, MUTED }
final long nativeSource; // Package-protected for PeerConnectionFactory.
final long nativeSource; // Package-protected for PeerConnectionFactory.
public MediaSource(long nativeSource) {
this.nativeSource = nativeSource;

View File

@ -89,21 +89,16 @@ public class MediaStream {
}
public String toString() {
return "[" + label() + ":A=" + audioTracks.size() +
":V=" + videoTracks.size() + "]";
return "[" + label() + ":A=" + audioTracks.size() + ":V=" + videoTracks.size() + "]";
}
private static native boolean nativeAddAudioTrack(
long nativeStream, long nativeAudioTrack);
private static native boolean nativeAddAudioTrack(long nativeStream, long nativeAudioTrack);
private static native boolean nativeAddVideoTrack(
long nativeStream, long nativeVideoTrack);
private static native boolean nativeAddVideoTrack(long nativeStream, long nativeVideoTrack);
private static native boolean nativeRemoveAudioTrack(
long nativeStream, long nativeAudioTrack);
private static native boolean nativeRemoveAudioTrack(long nativeStream, long nativeAudioTrack);
private static native boolean nativeRemoveVideoTrack(
long nativeStream, long nativeVideoTrack);
private static native boolean nativeRemoveVideoTrack(long nativeStream, long nativeVideoTrack);
private static native String nativeLabel(long nativeStream);

View File

@ -51,8 +51,7 @@ public class MediaStreamTrack {
private static native boolean nativeEnabled(long nativeTrack);
private static native boolean nativeSetEnabled(
long nativeTrack, boolean enabled);
private static native boolean nativeSetEnabled(long nativeTrack, boolean enabled);
private static native State nativeState(long nativeTrack);

View File

@ -36,7 +36,7 @@ public class Metrics {
System.loadLibrary("jingle_peerconnection_so");
}
public final Map<String, HistogramInfo> map =
new HashMap<String, HistogramInfo>(); // <name, HistogramInfo>
new HashMap<String, HistogramInfo>(); // <name, HistogramInfo>
/**
* Class holding histogram information.
@ -46,7 +46,7 @@ public class Metrics {
public final int max;
public final int bucketCount;
public final Map<Integer, Integer> samples =
new HashMap<Integer, Integer>(); // <value, # of events>
new HashMap<Integer, Integer>(); // <value, # of events>
public HistogramInfo(int min, int max, int bucketCount) {
this.min = min;

View File

@ -140,25 +140,23 @@ public class NetworkMonitor {
return;
}
if (autoDetector == null) {
autoDetector = new NetworkMonitorAutoDetect(
new NetworkMonitorAutoDetect.Observer() {
autoDetector = new NetworkMonitorAutoDetect(new NetworkMonitorAutoDetect.Observer() {
@Override
public void onConnectionTypeChanged(ConnectionType newConnectionType) {
updateCurrentConnectionType(newConnectionType);
}
@Override
public void onConnectionTypeChanged(ConnectionType newConnectionType) {
updateCurrentConnectionType(newConnectionType);
}
@Override
public void onNetworkConnect(NetworkInformation networkInfo) {
notifyObserversOfNetworkConnect(networkInfo);
}
@Override
public void onNetworkConnect(NetworkInformation networkInfo) {
notifyObserversOfNetworkConnect(networkInfo);
}
@Override
public void onNetworkDisconnect(long networkHandle) {
notifyObserversOfNetworkDisconnect(networkHandle);
}
},
applicationContext);
@Override
public void onNetworkDisconnect(long networkHandle) {
notifyObserversOfNetworkDisconnect(networkHandle);
}
}, applicationContext);
final NetworkMonitorAutoDetect.NetworkState networkState =
autoDetector.getCurrentNetworkState();
updateCurrentConnectionType(NetworkMonitorAutoDetect.getConnectionType(networkState));
@ -241,8 +239,8 @@ public class NetworkMonitor {
private native void nativeNotifyConnectionTypeChanged(long nativePtr);
private native void nativeNotifyOfNetworkConnect(long nativePtr, NetworkInformation networkInfo);
private native void nativeNotifyOfNetworkDisconnect(long nativePtr, long networkHandle);
private native void nativeNotifyOfActiveNetworkList(long nativePtr,
NetworkInformation[] networkInfos);
private native void nativeNotifyOfActiveNetworkList(
long nativePtr, NetworkInformation[] networkInfos);
// For testing only.
static void resetInstanceForTests(Context context) {

View File

@ -58,19 +58,19 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
public static class IPAddress {
public final byte[] address;
public IPAddress (byte[] address) {
public IPAddress(byte[] address) {
this.address = address;
}
}
/** Java version of NetworkMonitor.NetworkInformation */
public static class NetworkInformation{
public static class NetworkInformation {
public final String name;
public final ConnectionType type;
public final long handle;
public final IPAddress[] ipAddresses;
public NetworkInformation(String name, ConnectionType type, long handle,
IPAddress[] addresses) {
public NetworkInformation(
String name, ConnectionType type, long handle, IPAddress[] addresses) {
this.name = name;
this.type = type;
this.handle = handle;
@ -112,7 +112,6 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
*/
@SuppressLint("NewApi")
private class SimpleNetworkCallback extends NetworkCallback {
@Override
public void onAvailable(Network network) {
Logging.d(TAG, "Network becomes available: " + network.toString());
@ -120,8 +119,7 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
}
@Override
public void onCapabilitiesChanged(
Network network, NetworkCapabilities networkCapabilities) {
public void onCapabilitiesChanged(Network network, NetworkCapabilities networkCapabilities) {
// A capabilities change may indicate the ConnectionType has changed,
// so forward the new NetworkInformation along to the observer.
Logging.d(TAG, "capabilities changed: " + networkCapabilities.toString());
@ -140,8 +138,8 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
public void onLosing(Network network, int maxMsToLive) {
// Tell the network is going to lose in MaxMsToLive milliseconds.
// We may use this signal later.
Logging.d(TAG,
"Network " + network.toString() + " is about to lose in " + maxMsToLive + "ms");
Logging.d(
TAG, "Network " + network.toString() + " is about to lose in " + maxMsToLive + "ms");
}
@Override
@ -303,15 +301,13 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
if (connectionType == ConnectionType.CONNECTION_UNKNOWN
|| connectionType == ConnectionType.CONNECTION_UNKNOWN_CELLULAR) {
Logging.d(TAG, "Network " + network.toString() + " connection type is " + connectionType
+ " because it has type " + networkState.getNetworkType()
+ " and subtype " + networkState.getNetworkSubType());
+ " because it has type " + networkState.getNetworkType() + " and subtype "
+ networkState.getNetworkSubType());
}
NetworkInformation networkInformation = new NetworkInformation(
linkProperties.getInterfaceName(),
connectionType,
networkToNetId(network),
getIPAddresses(linkProperties));
NetworkInformation networkInformation =
new NetworkInformation(linkProperties.getInterfaceName(), connectionType,
networkToNetId(network), getIPAddresses(linkProperties));
return networkInformation;
}
@ -324,8 +320,7 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
if (connectivityManager == null) {
return false;
}
final NetworkCapabilities capabilities =
connectivityManager.getNetworkCapabilities(network);
final NetworkCapabilities capabilities = connectivityManager.getNetworkCapabilities(network);
return capabilities != null && capabilities.hasCapability(NET_CAPABILITY_INTERNET);
}
@ -369,7 +364,6 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
}
}
/** Queries the WifiManager for SSID of the current Wifi connection. */
static class WifiManagerDelegate {
private final Context context;
@ -384,8 +378,8 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
}
String getWifiSSID() {
final Intent intent = context.registerReceiver(null,
new IntentFilter(WifiManager.NETWORK_STATE_CHANGED_ACTION));
final Intent intent = context.registerReceiver(
null, new IntentFilter(WifiManager.NETWORK_STATE_CHANGED_ACTION));
if (intent != null) {
final WifiInfo wifiInfo = intent.getParcelableExtra(WifiManager.EXTRA_WIFI_INFO);
if (wifiInfo != null) {
@ -397,7 +391,6 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
}
return "";
}
}
static final long INVALID_NET_ID = -1;
@ -507,7 +500,8 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
* Registers a BroadcastReceiver in the given context.
*/
private void registerReceiver() {
if (isRegistered) return;
if (isRegistered)
return;
isRegistered = true;
context.registerReceiver(this, intentFilter);
@ -517,7 +511,8 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
* Unregisters the BroadcastReceiver in the given context.
*/
private void unregisterReceiver() {
if (!isRegistered) return;
if (!isRegistered)
return;
isRegistered = false;
context.unregisterReceiver(this);
@ -581,7 +576,8 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
}
private String getWifiSSID(NetworkState networkState) {
if (getConnectionType(networkState) != ConnectionType.CONNECTION_WIFI) return "";
if (getConnectionType(networkState) != ConnectionType.CONNECTION_WIFI)
return "";
return wifiManagerDelegate.getWifiSSID();
}
@ -597,7 +593,8 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
private void connectionTypeChanged(NetworkState networkState) {
ConnectionType newConnectionType = getConnectionType(networkState);
String newWifiSSID = getWifiSSID(networkState);
if (newConnectionType == connectionType && newWifiSSID.equals(wifiSSID)) return;
if (newConnectionType == connectionType && newWifiSSID.equals(wifiSSID))
return;
connectionType = newConnectionType;
wifiSSID = newWifiSSID;

View File

@ -8,7 +8,6 @@
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import java.util.Collections;
@ -27,19 +26,28 @@ public class PeerConnection {
}
/** Tracks PeerConnectionInterface::IceGatheringState */
public enum IceGatheringState { NEW, GATHERING, COMPLETE };
public enum IceGatheringState { NEW, GATHERING, COMPLETE }
/** Tracks PeerConnectionInterface::IceConnectionState */
public enum IceConnectionState {
NEW, CHECKING, CONNECTED, COMPLETED, FAILED, DISCONNECTED, CLOSED
};
NEW,
CHECKING,
CONNECTED,
COMPLETED,
FAILED,
DISCONNECTED,
CLOSED
}
/** Tracks PeerConnectionInterface::SignalingState */
public enum SignalingState {
STABLE, HAVE_LOCAL_OFFER, HAVE_LOCAL_PRANSWER, HAVE_REMOTE_OFFER,
HAVE_REMOTE_PRANSWER, CLOSED
};
STABLE,
HAVE_LOCAL_OFFER,
HAVE_LOCAL_PRANSWER,
HAVE_REMOTE_OFFER,
HAVE_REMOTE_PRANSWER,
CLOSED
}
/** Java version of PeerConnectionObserver. */
public static interface Observer {
@ -97,39 +105,25 @@ public class PeerConnection {
}
/** Java version of PeerConnectionInterface.IceTransportsType */
public enum IceTransportsType {
NONE, RELAY, NOHOST, ALL
};
public enum IceTransportsType { NONE, RELAY, NOHOST, ALL }
/** Java version of PeerConnectionInterface.BundlePolicy */
public enum BundlePolicy {
BALANCED, MAXBUNDLE, MAXCOMPAT
};
public enum BundlePolicy { BALANCED, MAXBUNDLE, MAXCOMPAT }
/** Java version of PeerConnectionInterface.RtcpMuxPolicy */
public enum RtcpMuxPolicy {
NEGOTIATE, REQUIRE
};
public enum RtcpMuxPolicy { NEGOTIATE, REQUIRE }
/** Java version of PeerConnectionInterface.TcpCandidatePolicy */
public enum TcpCandidatePolicy {
ENABLED, DISABLED
};
public enum TcpCandidatePolicy { ENABLED, DISABLED }
/** Java version of PeerConnectionInterface.CandidateNetworkPolicy */
public enum CandidateNetworkPolicy {
ALL, LOW_COST
};
public enum CandidateNetworkPolicy { ALL, LOW_COST }
/** Java version of rtc::KeyType */
public enum KeyType {
RSA, ECDSA
}
public enum KeyType { RSA, ECDSA }
/** Java version of PeerConnectionInterface.ContinualGatheringPolicy */
public enum ContinualGatheringPolicy {
GATHER_ONCE, GATHER_CONTINUALLY
}
public enum ContinualGatheringPolicy { GATHER_ONCE, GATHER_CONTINUALLY }
/** Java version of PeerConnectionInterface.RTCConfiguration */
public static class RTCConfiguration {
@ -187,26 +181,20 @@ public class PeerConnection {
public native SessionDescription getRemoteDescription();
public native DataChannel createDataChannel(
String label, DataChannel.Init init);
public native DataChannel createDataChannel(String label, DataChannel.Init init);
public native void createOffer(
SdpObserver observer, MediaConstraints constraints);
public native void createOffer(SdpObserver observer, MediaConstraints constraints);
public native void createAnswer(
SdpObserver observer, MediaConstraints constraints);
public native void createAnswer(SdpObserver observer, MediaConstraints constraints);
public native void setLocalDescription(
SdpObserver observer, SessionDescription sdp);
public native void setLocalDescription(SdpObserver observer, SessionDescription sdp);
public native void setRemoteDescription(
SdpObserver observer, SessionDescription sdp);
public native void setRemoteDescription(SdpObserver observer, SessionDescription sdp);
public native boolean setConfiguration(RTCConfiguration config);
public boolean addIceCandidate(IceCandidate candidate) {
return nativeAddIceCandidate(
candidate.sdpMid, candidate.sdpMLineIndex, candidate.sdp);
return nativeAddIceCandidate(candidate.sdpMid, candidate.sdpMLineIndex, candidate.sdp);
}
public boolean removeIceCandidates(final IceCandidate[] candidates) {
@ -314,8 +302,7 @@ public class PeerConnection {
private native void nativeRemoveLocalStream(long nativeStream);
private native boolean nativeGetStats(
StatsObserver observer, long nativeTrack);
private native boolean nativeGetStats(StatsObserver observer, long nativeTrack);
private native RtpSender nativeCreateSender(String kind, String stream_id);
@ -323,9 +310,7 @@ public class PeerConnection {
private native List<RtpReceiver> nativeGetReceivers();
private native boolean nativeStartRtcEventLog(
int file_descriptor, int max_size_bytes);
private native boolean nativeStartRtcEventLog(int file_descriptor, int max_size_bytes);
private native void nativeStopRtcEventLog();
}

View File

@ -8,7 +8,6 @@
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import java.util.List;
@ -52,9 +51,8 @@ public class PeerConnectionFactory {
// |renderEGLContext| can be provided to suport HW video decoding to
// texture and will be used to create a shared EGL context on video
// decoding thread.
public static native boolean initializeAndroidGlobals(
Object context, boolean initializeAudio, boolean initializeVideo,
boolean videoHwAcceleration);
public static native boolean initializeAndroidGlobals(Object context, boolean initializeAudio,
boolean initializeVideo, boolean videoHwAcceleration);
// Field trial initialization. Must be called before PeerConnectionFactory
// is created.
@ -81,51 +79,44 @@ public class PeerConnectionFactory {
}
}
public PeerConnection createPeerConnection(
PeerConnection.RTCConfiguration rtcConfig,
MediaConstraints constraints,
PeerConnection.Observer observer) {
public PeerConnection createPeerConnection(PeerConnection.RTCConfiguration rtcConfig,
MediaConstraints constraints, PeerConnection.Observer observer) {
long nativeObserver = nativeCreateObserver(observer);
if (nativeObserver == 0) {
return null;
}
long nativePeerConnection = nativeCreatePeerConnection(
nativeFactory, rtcConfig, constraints, nativeObserver);
long nativePeerConnection =
nativeCreatePeerConnection(nativeFactory, rtcConfig, constraints, nativeObserver);
if (nativePeerConnection == 0) {
return null;
}
return new PeerConnection(nativePeerConnection, nativeObserver);
}
public PeerConnection createPeerConnection(
List<PeerConnection.IceServer> iceServers,
MediaConstraints constraints,
PeerConnection.Observer observer) {
PeerConnection.RTCConfiguration rtcConfig =
new PeerConnection.RTCConfiguration(iceServers);
public PeerConnection createPeerConnection(List<PeerConnection.IceServer> iceServers,
MediaConstraints constraints, PeerConnection.Observer observer) {
PeerConnection.RTCConfiguration rtcConfig = new PeerConnection.RTCConfiguration(iceServers);
return createPeerConnection(rtcConfig, constraints, observer);
}
public MediaStream createLocalMediaStream(String label) {
return new MediaStream(
nativeCreateLocalMediaStream(nativeFactory, label));
return new MediaStream(nativeCreateLocalMediaStream(nativeFactory, label));
}
public VideoSource createVideoSource(VideoCapturer capturer) {
final EglBase.Context eglContext =
localEglbase == null ? null : localEglbase.getEglBaseContext();
long nativeAndroidVideoTrackSource = nativeCreateVideoSource(
nativeFactory, eglContext, capturer.isScreencast());
VideoCapturer.CapturerObserver capturerObserver
= new VideoCapturer.AndroidVideoTrackSourceObserver(nativeAndroidVideoTrackSource);
nativeInitializeVideoCapturer(nativeFactory, capturer, nativeAndroidVideoTrackSource,
capturerObserver);
long nativeAndroidVideoTrackSource =
nativeCreateVideoSource(nativeFactory, eglContext, capturer.isScreencast());
VideoCapturer.CapturerObserver capturerObserver =
new VideoCapturer.AndroidVideoTrackSourceObserver(nativeAndroidVideoTrackSource);
nativeInitializeVideoCapturer(
nativeFactory, capturer, nativeAndroidVideoTrackSource, capturerObserver);
return new VideoSource(nativeAndroidVideoTrackSource);
}
public VideoTrack createVideoTrack(String id, VideoSource source) {
return new VideoTrack(nativeCreateVideoTrack(
nativeFactory, id, source.nativeSource));
return new VideoTrack(nativeCreateVideoTrack(nativeFactory, id, source.nativeSource));
}
public AudioSource createAudioSource(MediaConstraints constraints) {
@ -133,8 +124,7 @@ public class PeerConnectionFactory {
}
public AudioTrack createAudioTrack(String id, AudioSource source) {
return new AudioTrack(nativeCreateAudioTrack(
nativeFactory, id, source.nativeSource));
return new AudioTrack(nativeCreateAudioTrack(nativeFactory, id, source.nativeSource));
}
// Starts recording an AEC dump. Ownership of the file is transfered to the
@ -161,8 +151,8 @@ public class PeerConnectionFactory {
* renderer.
* @param remoteEglContext Must be the same as used by any remote video renderer.
*/
public void setVideoHwAccelerationOptions(EglBase.Context localEglContext,
EglBase.Context remoteEglContext) {
public void setVideoHwAccelerationOptions(
EglBase.Context localEglContext, EglBase.Context remoteEglContext) {
if (localEglbase != null) {
Logging.w(TAG, "Egl context already set.");
localEglbase.release();
@ -173,8 +163,8 @@ public class PeerConnectionFactory {
}
localEglbase = EglBase.create(localEglContext);
remoteEglbase = EglBase.create(remoteEglContext);
nativeSetVideoHwAccelerationOptions(nativeFactory, localEglbase.getEglBaseContext(),
remoteEglbase.getEglBaseContext());
nativeSetVideoHwAccelerationOptions(
nativeFactory, localEglbase.getEglBaseContext(), remoteEglbase.getEglBaseContext());
}
public void dispose() {
@ -227,22 +217,19 @@ public class PeerConnectionFactory {
private static native long nativeCreatePeerConnectionFactory(Options options);
private static native long nativeCreateObserver(
PeerConnection.Observer observer);
private static native long nativeCreateObserver(PeerConnection.Observer observer);
private static native long nativeCreatePeerConnection(
long nativeFactory, PeerConnection.RTCConfiguration rtcConfig,
MediaConstraints constraints, long nativeObserver);
private static native long nativeCreatePeerConnection(long nativeFactory,
PeerConnection.RTCConfiguration rtcConfig, MediaConstraints constraints, long nativeObserver);
private static native long nativeCreateLocalMediaStream(
long nativeFactory, String label);
private static native long nativeCreateLocalMediaStream(long nativeFactory, String label);
private static native long nativeCreateVideoSource(
long nativeFactory, EglBase.Context eglContext, boolean is_screencast);
private static native void nativeInitializeVideoCapturer(
long native_factory, VideoCapturer j_video_capturer, long native_source,
VideoCapturer.CapturerObserver j_frame_observer);
private static native void nativeInitializeVideoCapturer(long native_factory,
VideoCapturer j_video_capturer, long native_source,
VideoCapturer.CapturerObserver j_frame_observer);
private static native long nativeCreateVideoTrack(
long nativeFactory, String id, long nativeVideoSource);
@ -258,8 +245,7 @@ public class PeerConnectionFactory {
private static native void nativeStopAecDump(long nativeFactory);
@Deprecated
public native void nativeSetOptions(long nativeFactory, Options options);
@Deprecated public native void nativeSetOptions(long nativeFactory, Options options);
private static native void nativeSetVideoHwAccelerationOptions(
long nativeFactory, Object localEGLContext, Object remoteEGLContext);

View File

@ -42,8 +42,8 @@ public class RendererCommon {
*/
void drawOes(int oesTextureId, float[] texMatrix, int frameWidth, int frameHeight,
int viewportX, int viewportY, int viewportWidth, int viewportHeight);
void drawRgb(int textureId, float[] texMatrix, int frameWidth, int frameHeight,
int viewportX, int viewportY, int viewportWidth, int viewportHeight);
void drawRgb(int textureId, float[] texMatrix, int frameWidth, int frameHeight, int viewportX,
int viewportY, int viewportWidth, int viewportHeight);
void drawYuv(int[] yuvTextures, float[] texMatrix, int frameWidth, int frameHeight,
int viewportX, int viewportY, int viewportWidth, int viewportHeight);
@ -116,6 +116,7 @@ public class RendererCommon {
// The minimum fraction of the frame content that will be shown for |SCALE_ASPECT_BALANCED|.
// This limits excessive cropping when adjusting display size.
private static float BALANCED_VISIBLE_FRACTION = 0.5625f;
// clang-format off
public static final float[] identityMatrix() {
return new float[] {
1, 0, 0, 0,
@ -140,6 +141,7 @@ public class RendererCommon {
0, 0, 1, 0,
1, 0, 0, 1};
}
// clang-format on
/**
* Returns texture matrix that will have the effect of rotating the frame |rotationDegree|
@ -189,8 +191,8 @@ public class RendererCommon {
/**
* Calculate display size based on scaling type, video aspect ratio, and maximum display size.
*/
public static Point getDisplaySize(ScalingType scalingType, float videoAspectRatio,
int maxDisplayWidth, int maxDisplayHeight) {
public static Point getDisplaySize(
ScalingType scalingType, float videoAspectRatio, int maxDisplayWidth, int maxDisplayHeight) {
return getDisplaySize(convertScalingTypeToVisibleFraction(scalingType), videoAspectRatio,
maxDisplayWidth, maxDisplayHeight);
}
@ -230,17 +232,17 @@ public class RendererCommon {
* Calculate display size based on minimum fraction of the video that must remain visible,
* video aspect ratio, and maximum display size.
*/
private static Point getDisplaySize(float minVisibleFraction, float videoAspectRatio,
int maxDisplayWidth, int maxDisplayHeight) {
private static Point getDisplaySize(
float minVisibleFraction, float videoAspectRatio, int maxDisplayWidth, int maxDisplayHeight) {
// If there is no constraint on the amount of cropping, fill the allowed display area.
if (minVisibleFraction == 0 || videoAspectRatio == 0) {
return new Point(maxDisplayWidth, maxDisplayHeight);
}
// Each dimension is constrained on max display size and how much we are allowed to crop.
final int width = Math.min(maxDisplayWidth,
Math.round(maxDisplayHeight / minVisibleFraction * videoAspectRatio));
final int height = Math.min(maxDisplayHeight,
Math.round(maxDisplayWidth / minVisibleFraction / videoAspectRatio));
final int width = Math.min(
maxDisplayWidth, Math.round(maxDisplayHeight / minVisibleFraction * videoAspectRatio));
final int height = Math.min(
maxDisplayHeight, Math.round(maxDisplayWidth / minVisibleFraction / videoAspectRatio));
return new Point(width, height);
}
}

View File

@ -48,8 +48,8 @@ public class RtpReceiver {
// Will be released in dispose().
private static native long nativeGetTrack(long nativeRtpReceiver);
private static native boolean nativeSetParameters(long nativeRtpReceiver,
RtpParameters parameters);
private static native boolean nativeSetParameters(
long nativeRtpReceiver, RtpParameters parameters);
private static native RtpParameters nativeGetParameters(long nativeRtpReceiver);

View File

@ -30,9 +30,8 @@ public class RtpSender {
// not appropriate when the track is owned by, for example, another RtpSender
// or a MediaStream.
public boolean setTrack(MediaStreamTrack track, boolean takeOwnership) {
if (!nativeSetTrack(nativeRtpSender,
(track == null) ? 0 : track.nativeTrack)) {
return false;
if (!nativeSetTrack(nativeRtpSender, (track == null) ? 0 : track.nativeTrack)) {
return false;
}
if (cachedTrack != null && ownsTrack) {
cachedTrack.dispose();
@ -65,20 +64,17 @@ public class RtpSender {
free(nativeRtpSender);
}
private static native boolean nativeSetTrack(long nativeRtpSender,
long nativeTrack);
private static native boolean nativeSetTrack(long nativeRtpSender, long nativeTrack);
// This should increment the reference count of the track.
// Will be released in dispose() or setTrack().
private static native long nativeGetTrack(long nativeRtpSender);
private static native boolean nativeSetParameters(long nativeRtpSender,
RtpParameters parameters);
private static native boolean nativeSetParameters(long nativeRtpSender, RtpParameters parameters);
private static native RtpParameters nativeGetParameters(long nativeRtpSender);
private static native String nativeId(long nativeRtpSender);
private static native void free(long nativeRtpSender);
}
;
};

View File

@ -35,11 +35,10 @@ import java.util.List;
* frames. At any time, at most one frame is being processed.
*/
@TargetApi(21)
public class ScreenCapturerAndroid implements
VideoCapturer, SurfaceTextureHelper.OnTextureFrameAvailableListener {
private static final int DISPLAY_FLAGS = DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC
| DisplayManager.VIRTUAL_DISPLAY_FLAG_PRESENTATION;
public class ScreenCapturerAndroid
implements VideoCapturer, SurfaceTextureHelper.OnTextureFrameAvailableListener {
private static final int DISPLAY_FLAGS =
DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC | DisplayManager.VIRTUAL_DISPLAY_FLAG_PRESENTATION;
// DPI for VirtualDisplay, does not seem to matter for us.
private static final int VIRTUAL_DISPLAY_DPI = 400;
@ -65,8 +64,7 @@ public class ScreenCapturerAndroid implements
* @param mediaProjectionCallback MediaProjection callback to implement application specific
* logic in events such as when the user revokes a previously granted capture permission.
**/
public ScreenCapturerAndroid(
Intent mediaProjectionPermissionResultData,
public ScreenCapturerAndroid(Intent mediaProjectionPermissionResultData,
MediaProjection.Callback mediaProjectionCallback) {
this.mediaProjectionPermissionResultData = mediaProjectionPermissionResultData;
this.mediaProjectionCallback = mediaProjectionCallback;
@ -79,10 +77,8 @@ public class ScreenCapturerAndroid implements
}
@Override
public synchronized void initialize(
final SurfaceTextureHelper surfaceTextureHelper,
final Context applicationContext,
final VideoCapturer.CapturerObserver capturerObserver) {
public synchronized void initialize(final SurfaceTextureHelper surfaceTextureHelper,
final Context applicationContext, final VideoCapturer.CapturerObserver capturerObserver) {
checkNotDisposed();
if (capturerObserver == null) {
@ -95,13 +91,13 @@ public class ScreenCapturerAndroid implements
}
this.surfaceTextureHelper = surfaceTextureHelper;
mediaProjectionManager = (MediaProjectionManager)
applicationContext.getSystemService(Context.MEDIA_PROJECTION_SERVICE);
mediaProjectionManager = (MediaProjectionManager) applicationContext.getSystemService(
Context.MEDIA_PROJECTION_SERVICE);
}
@Override
public synchronized void startCapture(final int width, final int height,
final int ignoredFramerate) {
public synchronized void startCapture(
final int width, final int height, final int ignoredFramerate) {
checkNotDisposed();
this.width = width;
@ -143,7 +139,6 @@ public class ScreenCapturerAndroid implements
});
}
@Override
public synchronized void dispose() {
isDisposed = true;
@ -184,9 +179,8 @@ public class ScreenCapturerAndroid implements
private void createVirtualDisplay() {
surfaceTextureHelper.getSurfaceTexture().setDefaultBufferSize(width, height);
virtualDisplay = mediaProjection.createVirtualDisplay(
"WebRTC_ScreenCapture", width, height, VIRTUAL_DISPLAY_DPI,
DISPLAY_FLAGS, new Surface(surfaceTextureHelper.getSurfaceTexture()),
virtualDisplay = mediaProjection.createVirtualDisplay("WebRTC_ScreenCapture", width, height,
VIRTUAL_DISPLAY_DPI, DISPLAY_FLAGS, new Surface(surfaceTextureHelper.getSurfaceTexture()),
null /* callback */, null /* callback handler */);
}
@ -194,8 +188,8 @@ public class ScreenCapturerAndroid implements
@Override
public void onTextureFrameAvailable(int oesTextureId, float[] transformMatrix, long timestampNs) {
numCapturedFrames++;
capturerObserver.onTextureFrameCaptured(width, height, oesTextureId, transformMatrix,
0 /* rotation */, timestampNs);
capturerObserver.onTextureFrameCaptured(
width, height, oesTextureId, transformMatrix, 0 /* rotation */, timestampNs);
}
@Override
@ -207,4 +201,3 @@ public class ScreenCapturerAndroid implements
return numCapturedFrames;
}
}

View File

@ -8,7 +8,6 @@
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/**
@ -19,7 +18,9 @@ package org.webrtc;
public class SessionDescription {
/** Java-land enum version of SessionDescriptionInterface's type() string. */
public static enum Type {
OFFER, PRANSWER, ANSWER;
OFFER,
PRANSWER,
ANSWER;
public String canonicalForm() {
return name().toLowerCase();

View File

@ -12,7 +12,6 @@ package org.webrtc;
/** Java version of webrtc::StatsReport. */
public class StatsReport {
/** Java version of webrtc::StatsReport::Value. */
public static class Value {
public final String name;
@ -45,8 +44,13 @@ public class StatsReport {
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("id: ").append(id).append(", type: ").append(type)
.append(", timestamp: ").append(timestamp).append(", values: ");
builder.append("id: ")
.append(id)
.append(", type: ")
.append(type)
.append(", timestamp: ")
.append(timestamp)
.append(", values: ");
for (int i = 0; i < values.length; ++i) {
builder.append(values[i].toString()).append(", ");
}

View File

@ -83,69 +83,69 @@ class SurfaceTextureHelper {
// Vertex coordinates in Normalized Device Coordinates, i.e.
// (-1, -1) is bottom-left and (1, 1) is top-right.
private static final FloatBuffer DEVICE_RECTANGLE =
GlUtil.createFloatBuffer(new float[] {
-1.0f, -1.0f, // Bottom left.
1.0f, -1.0f, // Bottom right.
-1.0f, 1.0f, // Top left.
1.0f, 1.0f, // Top right.
});
private static final FloatBuffer DEVICE_RECTANGLE = GlUtil.createFloatBuffer(new float[] {
-1.0f, -1.0f, // Bottom left.
1.0f, -1.0f, // Bottom right.
-1.0f, 1.0f, // Top left.
1.0f, 1.0f, // Top right.
});
// Texture coordinates - (0, 0) is bottom-left and (1, 1) is top-right.
private static final FloatBuffer TEXTURE_RECTANGLE =
GlUtil.createFloatBuffer(new float[] {
0.0f, 0.0f, // Bottom left.
1.0f, 0.0f, // Bottom right.
0.0f, 1.0f, // Top left.
1.0f, 1.0f // Top right.
});
private static final FloatBuffer TEXTURE_RECTANGLE = GlUtil.createFloatBuffer(new float[] {
0.0f, 0.0f, // Bottom left.
1.0f, 0.0f, // Bottom right.
0.0f, 1.0f, // Top left.
1.0f, 1.0f // Top right.
});
// clang-format off
private static final String VERTEX_SHADER =
"varying vec2 interp_tc;\n"
+ "attribute vec4 in_pos;\n"
+ "attribute vec4 in_tc;\n"
+ "\n"
+ "uniform mat4 texMatrix;\n"
+ "\n"
+ "void main() {\n"
+ " gl_Position = in_pos;\n"
+ " interp_tc = (texMatrix * in_tc).xy;\n"
+ "}\n";
"varying vec2 interp_tc;\n"
+ "attribute vec4 in_pos;\n"
+ "attribute vec4 in_tc;\n"
+ "\n"
+ "uniform mat4 texMatrix;\n"
+ "\n"
+ "void main() {\n"
+ " gl_Position = in_pos;\n"
+ " interp_tc = (texMatrix * in_tc).xy;\n"
+ "}\n";
private static final String FRAGMENT_SHADER =
"#extension GL_OES_EGL_image_external : require\n"
+ "precision mediump float;\n"
+ "varying vec2 interp_tc;\n"
+ "\n"
+ "uniform samplerExternalOES oesTex;\n"
// Difference in texture coordinate corresponding to one
// sub-pixel in the x direction.
+ "uniform vec2 xUnit;\n"
// Color conversion coefficients, including constant term
+ "uniform vec4 coeffs;\n"
+ "\n"
+ "void main() {\n"
// Since the alpha read from the texture is always 1, this could
// be written as a mat4 x vec4 multiply. However, that seems to
// give a worse framerate, possibly because the additional
// multiplies by 1.0 consume resources. TODO(nisse): Could also
// try to do it as a vec3 x mat3x4, followed by an add in of a
// constant vector.
+ " gl_FragColor.r = coeffs.a + dot(coeffs.rgb,\n"
+ " texture2D(oesTex, interp_tc - 1.5 * xUnit).rgb);\n"
+ " gl_FragColor.g = coeffs.a + dot(coeffs.rgb,\n"
+ " texture2D(oesTex, interp_tc - 0.5 * xUnit).rgb);\n"
+ " gl_FragColor.b = coeffs.a + dot(coeffs.rgb,\n"
+ " texture2D(oesTex, interp_tc + 0.5 * xUnit).rgb);\n"
+ " gl_FragColor.a = coeffs.a + dot(coeffs.rgb,\n"
+ " texture2D(oesTex, interp_tc + 1.5 * xUnit).rgb);\n"
+ "}\n";
"#extension GL_OES_EGL_image_external : require\n"
+ "precision mediump float;\n"
+ "varying vec2 interp_tc;\n"
+ "\n"
+ "uniform samplerExternalOES oesTex;\n"
// Difference in texture coordinate corresponding to one
// sub-pixel in the x direction.
+ "uniform vec2 xUnit;\n"
// Color conversion coefficients, including constant term
+ "uniform vec4 coeffs;\n"
+ "\n"
+ "void main() {\n"
// Since the alpha read from the texture is always 1, this could
// be written as a mat4 x vec4 multiply. However, that seems to
// give a worse framerate, possibly because the additional
// multiplies by 1.0 consume resources. TODO(nisse): Could also
// try to do it as a vec3 x mat3x4, followed by an add in of a
// constant vector.
+ " gl_FragColor.r = coeffs.a + dot(coeffs.rgb,\n"
+ " texture2D(oesTex, interp_tc - 1.5 * xUnit).rgb);\n"
+ " gl_FragColor.g = coeffs.a + dot(coeffs.rgb,\n"
+ " texture2D(oesTex, interp_tc - 0.5 * xUnit).rgb);\n"
+ " gl_FragColor.b = coeffs.a + dot(coeffs.rgb,\n"
+ " texture2D(oesTex, interp_tc + 0.5 * xUnit).rgb);\n"
+ " gl_FragColor.a = coeffs.a + dot(coeffs.rgb,\n"
+ " texture2D(oesTex, interp_tc + 1.5 * xUnit).rgb);\n"
+ "}\n";
// clang-format on
private int texMatrixLoc;
private int xUnitLoc;
private int coeffsLoc;;
private int coeffsLoc;
YuvConverter (EglBase.Context sharedContext) {
YuvConverter(EglBase.Context sharedContext) {
eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PIXEL_RGBA_BUFFER);
eglBase.createDummyPbufferSurface();
eglBase.makeCurrent();
@ -165,11 +165,10 @@ class SurfaceTextureHelper {
eglBase.detachCurrent();
}
synchronized void convert(ByteBuffer buf,
int width, int height, int stride, int textureId, float [] transformMatrix) {
synchronized void convert(
ByteBuffer buf, int width, int height, int stride, int textureId, float[] transformMatrix) {
if (released) {
throw new IllegalStateException(
"YuvConverter.convert called on released object");
throw new IllegalStateException("YuvConverter.convert called on released object");
}
// We draw into a buffer laid out like
@ -202,17 +201,15 @@ class SurfaceTextureHelper {
// has to be a multiple of 8 pixels.
if (stride % 8 != 0) {
throw new IllegalArgumentException(
"Invalid stride, must be a multiple of 8");
throw new IllegalArgumentException("Invalid stride, must be a multiple of 8");
}
if (stride < width){
throw new IllegalArgumentException(
"Invalid stride, must >= width");
if (stride < width) {
throw new IllegalArgumentException("Invalid stride, must >= width");
}
int y_width = (width+3) / 4;
int uv_width = (width+7) / 8;
int uv_height = (height+1)/2;
int y_width = (width + 3) / 4;
int uv_width = (width + 7) / 8;
int uv_height = (height + 1) / 2;
int total_height = height + uv_height;
int size = stride * total_height;
@ -222,18 +219,16 @@ class SurfaceTextureHelper {
// Produce a frame buffer starting at top-left corner, not
// bottom-left.
transformMatrix =
RendererCommon.multiplyMatrices(transformMatrix,
RendererCommon.verticalFlipMatrix());
RendererCommon.multiplyMatrices(transformMatrix, RendererCommon.verticalFlipMatrix());
// Create new pBuffferSurface with the correct size if needed.
if (eglBase.hasSurface()) {
if (eglBase.surfaceWidth() != stride/4 ||
eglBase.surfaceHeight() != total_height){
if (eglBase.surfaceWidth() != stride / 4 || eglBase.surfaceHeight() != total_height) {
eglBase.releaseSurface();
eglBase.createPbufferSurface(stride/4, total_height);
eglBase.createPbufferSurface(stride / 4, total_height);
}
} else {
eglBase.createPbufferSurface(stride/4, total_height);
eglBase.createPbufferSurface(stride / 4, total_height);
}
eglBase.makeCurrent();
@ -245,9 +240,7 @@ class SurfaceTextureHelper {
// Draw Y
GLES20.glViewport(0, 0, y_width, height);
// Matrix * (1;0;0;0) / width. Note that opengl uses column major order.
GLES20.glUniform2f(xUnitLoc,
transformMatrix[0] / width,
transformMatrix[1] / width);
GLES20.glUniform2f(xUnitLoc, transformMatrix[0] / width, transformMatrix[1] / width);
// Y'UV444 to RGB888, see
// https://en.wikipedia.org/wiki/YUV#Y.27UV444_to_RGB888_conversion.
// We use the ITU-R coefficients for U and V */
@ -257,19 +250,18 @@ class SurfaceTextureHelper {
// Draw U
GLES20.glViewport(0, height, uv_width, uv_height);
// Matrix * (1;0;0;0) / (width / 2). Note that opengl uses column major order.
GLES20.glUniform2f(xUnitLoc,
2.0f * transformMatrix[0] / width,
2.0f * transformMatrix[1] / width);
GLES20.glUniform2f(
xUnitLoc, 2.0f * transformMatrix[0] / width, 2.0f * transformMatrix[1] / width);
GLES20.glUniform4f(coeffsLoc, -0.169f, -0.331f, 0.499f, 0.5f);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
// Draw V
GLES20.glViewport(stride/8, height, uv_width, uv_height);
GLES20.glViewport(stride / 8, height, uv_width, uv_height);
GLES20.glUniform4f(coeffsLoc, 0.499f, -0.418f, -0.0813f, 0.5f);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
GLES20.glReadPixels(0, 0, stride/4, total_height, GLES20.GL_RGBA,
GLES20.GL_UNSIGNED_BYTE, buf);
GLES20.glReadPixels(
0, 0, stride / 4, total_height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buf);
GlUtil.checkNoGLES2Error("YuvConverter.convert");
@ -351,7 +343,7 @@ class SurfaceTextureHelper {
if (yuvConverter != null)
return yuvConverter;
synchronized(this) {
synchronized (this) {
if (yuvConverter == null)
yuvConverter = new YuvConverter(eglBase.getEglBaseContext());
return yuvConverter;
@ -409,7 +401,8 @@ class SurfaceTextureHelper {
*/
public void returnTextureFrame() {
handler.post(new Runnable() {
@Override public void run() {
@Override
public void run() {
isTextureInUse = false;
if (isQuitting) {
release();
@ -442,8 +435,8 @@ class SurfaceTextureHelper {
});
}
public void textureToYUV(ByteBuffer buf,
int width, int height, int stride, int textureId, float [] transformMatrix) {
public void textureToYUV(
ByteBuffer buf, int width, int height, int stride, int textureId, float[] transformMatrix) {
if (textureId != oesTextureId)
throw new IllegalStateException("textureToByteBuffer called with unexpected textureId");

View File

@ -33,8 +33,8 @@ import javax.microedition.khronos.egl.EGLContext;
* Interaction from the Activity lifecycle in surfaceCreated, surfaceChanged, and surfaceDestroyed.
* Interaction with the layout framework in onMeasure and onSizeChanged.
*/
public class SurfaceViewRenderer extends SurfaceView
implements SurfaceHolder.Callback, VideoRenderer.Callbacks {
public class SurfaceViewRenderer
extends SurfaceView implements SurfaceHolder.Callback, VideoRenderer.Callbacks {
private static final String TAG = "SurfaceViewRenderer";
// Dedicated render thread.
@ -103,13 +103,15 @@ public class SurfaceViewRenderer extends SurfaceView
// Runnable for posting frames to render thread.
private final Runnable renderFrameRunnable = new Runnable() {
@Override public void run() {
@Override
public void run() {
renderFrameOnRenderThread();
}
};
// Runnable for clearing Surface to black.
private final Runnable makeBlackRunnable = new Runnable() {
@Override public void run() {
@Override
public void run() {
makeBlack();
}
};
@ -134,8 +136,7 @@ public class SurfaceViewRenderer extends SurfaceView
* Initialize this class, sharing resources with |sharedContext|. It is allowed to call init() to
* reinitialize the renderer after a previous init()/release() cycle.
*/
public void init(
EglBase.Context sharedContext, RendererCommon.RendererEvents rendererEvents) {
public void init(EglBase.Context sharedContext, RendererCommon.RendererEvents rendererEvents) {
init(sharedContext, rendererEvents, EglBase.CONFIG_PLAIN, new GlRectDrawer());
}
@ -145,9 +146,9 @@ public class SurfaceViewRenderer extends SurfaceView
* |drawer|. It is allowed to call init() to reinitialize the renderer after a previous
* init()/release() cycle.
*/
public void init(
final EglBase.Context sharedContext, RendererCommon.RendererEvents rendererEvents,
final int[] configAttributes, RendererCommon.GlDrawer drawer) {
public void init(final EglBase.Context sharedContext,
RendererCommon.RendererEvents rendererEvents, final int[] configAttributes,
RendererCommon.GlDrawer drawer) {
synchronized (handlerLock) {
if (renderThreadHandler != null) {
throw new IllegalStateException(getResourceName() + "Already initialized");
@ -210,7 +211,8 @@ public class SurfaceViewRenderer extends SurfaceView
// when the EGL context is lost. It might be dangerous to delete them manually in
// Activity.onDestroy().
renderThreadHandler.postAtFrontOfQueue(new Runnable() {
@Override public void run() {
@Override
public void run() {
drawer.release();
drawer = null;
if (yuvTextures != null) {
@ -289,8 +291,7 @@ public class SurfaceViewRenderer extends SurfaceView
}
synchronized (handlerLock) {
if (renderThreadHandler == null) {
Logging.d(TAG, getResourceName()
+ "Dropping frame - Not initialized or already released.");
Logging.d(TAG, getResourceName() + "Dropping frame - Not initialized or already released.");
VideoRenderer.renderFrameDone(frame);
return;
}
@ -335,8 +336,8 @@ public class SurfaceViewRenderer extends SurfaceView
return;
}
desiredLayoutSize = getDesiredLayoutSize(widthSpec, heightSpec);
isNewSize = (desiredLayoutSize.x != getMeasuredWidth()
|| desiredLayoutSize.y != getMeasuredHeight());
isNewSize =
(desiredLayoutSize.x != getMeasuredWidth() || desiredLayoutSize.y != getMeasuredHeight());
setMeasuredDimension(desiredLayoutSize.x, desiredLayoutSize.y);
}
if (isNewSize) {
@ -498,17 +499,17 @@ public class SurfaceViewRenderer extends SurfaceView
// Make sure YUV textures are allocated.
if (yuvTextures == null) {
yuvTextures = new int[3];
for (int i = 0; i < 3; i++) {
for (int i = 0; i < 3; i++) {
yuvTextures[i] = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
}
}
yuvUploader.uploadYuvData(
yuvTextures, frame.width, frame.height, frame.yuvStrides, frame.yuvPlanes);
drawer.drawYuv(yuvTextures, texMatrix, frame.rotatedWidth(), frame.rotatedHeight(),
0, 0, surfaceSize.x, surfaceSize.y);
drawer.drawYuv(yuvTextures, texMatrix, frame.rotatedWidth(), frame.rotatedHeight(), 0, 0,
surfaceSize.x, surfaceSize.y);
} else {
drawer.drawOes(frame.textureId, texMatrix, frame.rotatedWidth(), frame.rotatedHeight(),
0, 0, surfaceSize.x, surfaceSize.y);
drawer.drawOes(frame.textureId, texMatrix, frame.rotatedWidth(), frame.rotatedHeight(), 0, 0,
surfaceSize.x, surfaceSize.y);
}
eglBase.swapBuffers();
@ -547,8 +548,8 @@ public class SurfaceViewRenderer extends SurfaceView
synchronized (layoutLock) {
if (frameWidth != frame.width || frameHeight != frame.height
|| frameRotation != frame.rotationDegree) {
Logging.d(TAG, getResourceName() + "Reporting frame resolution changed to "
+ frame.width + "x" + frame.height + " with rotation " + frame.rotationDegree);
Logging.d(TAG, getResourceName() + "Reporting frame resolution changed to " + frame.width
+ "x" + frame.height + " with rotation " + frame.rotationDegree);
if (rendererEvents != null) {
rendererEvents.onFrameResolutionChanged(frame.width, frame.height, frame.rotationDegree);
}
@ -556,7 +557,8 @@ public class SurfaceViewRenderer extends SurfaceView
frameHeight = frame.height;
frameRotation = frame.rotationDegree;
post(new Runnable() {
@Override public void run() {
@Override
public void run() {
requestLayout();
}
});
@ -566,14 +568,14 @@ public class SurfaceViewRenderer extends SurfaceView
private void logStatistics() {
synchronized (statisticsLock) {
Logging.d(TAG, getResourceName() + "Frames received: "
+ framesReceived + ". Dropped: " + framesDropped + ". Rendered: " + framesRendered);
Logging.d(TAG, getResourceName() + "Frames received: " + framesReceived + ". Dropped: "
+ framesDropped + ". Rendered: " + framesRendered);
if (framesReceived > 0 && framesRendered > 0) {
final long timeSinceFirstFrameNs = System.nanoTime() - firstFrameTimeNs;
Logging.d(TAG, getResourceName() + "Duration: " + (int) (timeSinceFirstFrameNs / 1e6) +
" ms. FPS: " + framesRendered * 1e9 / timeSinceFirstFrameNs);
Logging.d(TAG, getResourceName() + "Duration: " + (int) (timeSinceFirstFrameNs / 1e6)
+ " ms. FPS: " + framesRendered * 1e9 / timeSinceFirstFrameNs);
Logging.d(TAG, getResourceName() + "Average render time: "
+ (int) (renderTimeNs / (1000 * framesRendered)) + " us.");
+ (int) (renderTimeNs / (1000 * framesRendered)) + " us.");
}
}
}

View File

@ -24,14 +24,13 @@ public interface VideoCapturer {
void onCapturerStopped();
// Delivers a captured frame. Called on a Java thread owned by VideoCapturer.
void onByteBufferFrameCaptured(byte[] data, int width, int height, int rotation,
long timeStamp);
void onByteBufferFrameCaptured(
byte[] data, int width, int height, int rotation, long timeStamp);
// Delivers a captured frame in a texture with id |oesTextureId|. Called on a Java thread
// owned by VideoCapturer.
void onTextureFrameCaptured(
int width, int height, int oesTextureId, float[] transformMatrix, int rotation,
long timestamp);
void onTextureFrameCaptured(int width, int height, int oesTextureId, float[] transformMatrix,
int rotation, long timestamp);
}
// An implementation of CapturerObserver that forwards all calls from
@ -55,25 +54,23 @@ public interface VideoCapturer {
}
@Override
public void onByteBufferFrameCaptured(byte[] data, int width, int height,
int rotation, long timeStamp) {
nativeOnByteBufferFrameCaptured(nativeSource, data, data.length, width, height, rotation,
timeStamp);
public void onByteBufferFrameCaptured(
byte[] data, int width, int height, int rotation, long timeStamp) {
nativeOnByteBufferFrameCaptured(
nativeSource, data, data.length, width, height, rotation, timeStamp);
}
@Override
public void onTextureFrameCaptured(
int width, int height, int oesTextureId, float[] transformMatrix, int rotation,
long timestamp) {
nativeOnTextureFrameCaptured(nativeSource, width, height, oesTextureId, transformMatrix,
rotation, timestamp);
public void onTextureFrameCaptured(int width, int height, int oesTextureId,
float[] transformMatrix, int rotation, long timestamp) {
nativeOnTextureFrameCaptured(
nativeSource, width, height, oesTextureId, transformMatrix, rotation, timestamp);
}
private native void nativeCapturerStarted(long nativeSource,
boolean success);
private native void nativeCapturerStarted(long nativeSource, boolean success);
private native void nativeCapturerStopped(long nativeSource);
private native void nativeOnByteBufferFrameCaptured(long nativeSource,
byte[] data, int length, int width, int height, int rotation, long timeStamp);
private native void nativeOnByteBufferFrameCaptured(long nativeSource, byte[] data, int length,
int width, int height, int rotation, long timeStamp);
private native void nativeOnTextureFrameCaptured(long nativeSource, int width, int height,
int oesTextureId, float[] transformMatrix, int rotation, long timestamp);
}

View File

@ -40,14 +40,13 @@ import java.util.concurrent.atomic.AtomicBoolean;
// the camera has been stopped.
// TODO(magjed): This class name is now confusing - rename to Camera1VideoCapturer.
@SuppressWarnings("deprecation")
public class VideoCapturerAndroid implements
CameraVideoCapturer,
android.hardware.Camera.PreviewCallback,
SurfaceTextureHelper.OnTextureFrameAvailableListener {
public class VideoCapturerAndroid
implements CameraVideoCapturer, android.hardware.Camera.PreviewCallback,
SurfaceTextureHelper.OnTextureFrameAvailableListener {
private static final String TAG = "VideoCapturerAndroid";
private static final int CAMERA_STOP_TIMEOUT_MS = 7000;
private android.hardware.Camera camera; // Only non-null while capturing.
private android.hardware.Camera camera; // Only non-null while capturing.
private final AtomicBoolean isCameraRunning = new AtomicBoolean();
// Use maybePostOnCameraThread() instead of posting directly to the handler - this way all
// callbacks with a specifed token can be removed at once.
@ -83,30 +82,29 @@ public class VideoCapturerAndroid implements
// Camera error callback.
private final android.hardware.Camera.ErrorCallback cameraErrorCallback =
new android.hardware.Camera.ErrorCallback() {
@Override
public void onError(int error, android.hardware.Camera camera) {
String errorMessage;
if (error == android.hardware.Camera.CAMERA_ERROR_SERVER_DIED) {
errorMessage = "Camera server died!";
} else {
errorMessage = "Camera error: " + error;
}
Logging.e(TAG, errorMessage);
if (eventsHandler != null) {
eventsHandler.onCameraError(errorMessage);
}
}
};
@Override
public void onError(int error, android.hardware.Camera camera) {
String errorMessage;
if (error == android.hardware.Camera.CAMERA_ERROR_SERVER_DIED) {
errorMessage = "Camera server died!";
} else {
errorMessage = "Camera error: " + error;
}
Logging.e(TAG, errorMessage);
if (eventsHandler != null) {
eventsHandler.onCameraError(errorMessage);
}
}
};
public static VideoCapturerAndroid create(String name,
CameraEventsHandler eventsHandler) {
public static VideoCapturerAndroid create(String name, CameraEventsHandler eventsHandler) {
return VideoCapturerAndroid.create(name, eventsHandler, false /* captureToTexture */);
}
// Use ctor directly instead.
@Deprecated
public static VideoCapturerAndroid create(String name,
CameraEventsHandler eventsHandler, boolean captureToTexture) {
public static VideoCapturerAndroid create(
String name, CameraEventsHandler eventsHandler, boolean captureToTexture) {
try {
return new VideoCapturerAndroid(name, eventsHandler, captureToTexture);
} catch (RuntimeException e) {
@ -176,7 +174,8 @@ public class VideoCapturerAndroid implements
@Override
public void changeCaptureFormat(final int width, final int height, final int framerate) {
maybePostOnCameraThread(new Runnable() {
@Override public void run() {
@Override
public void run() {
startPreviewOnCameraThread(width, height, framerate);
}
});
@ -195,8 +194,8 @@ public class VideoCapturerAndroid implements
return isCapturingToTexture;
}
public VideoCapturerAndroid(String cameraName, CameraEventsHandler eventsHandler,
boolean captureToTexture) {
public VideoCapturerAndroid(
String cameraName, CameraEventsHandler eventsHandler, boolean captureToTexture) {
if (android.hardware.Camera.getNumberOfCameras() == 0) {
throw new RuntimeException("No cameras available");
}
@ -225,7 +224,7 @@ public class VideoCapturerAndroid implements
private boolean maybePostDelayedOnCameraThread(int delayMs, Runnable runnable) {
return cameraThreadHandler != null && isCameraRunning.get()
&& cameraThreadHandler.postAtTime(
runnable, this /* token */, SystemClock.uptimeMillis() + delayMs);
runnable, this /* token */, SystemClock.uptimeMillis() + delayMs);
}
@Override
@ -332,8 +331,8 @@ public class VideoCapturerAndroid implements
camera.setPreviewTexture(surfaceHelper.getSurfaceTexture());
Logging.d(TAG, "Camera orientation: " + info.orientation +
" .Device orientation: " + getDeviceOrientation());
Logging.d(TAG, "Camera orientation: " + info.orientation + " .Device orientation: "
+ getDeviceOrientation());
camera.setErrorCallback(cameraErrorCallback);
startPreviewOnCameraThread(width, height, framerate);
frameObserver.onCapturerStarted(true);
@ -343,7 +342,7 @@ public class VideoCapturerAndroid implements
// Start camera observer.
cameraStatistics = new CameraStatistics(surfaceHelper, eventsHandler);
} catch (IOException|RuntimeException e) {
} catch (IOException | RuntimeException e) {
Logging.e(TAG, "startCapture failed", e);
// Make sure the camera is released.
stopCaptureOnCameraThread(true /* stopHandler */);
@ -351,7 +350,7 @@ public class VideoCapturerAndroid implements
if (eventsHandler != null) {
eventsHandler.onCameraError("Camera can not be started.");
}
}
}
}
// (Re)start preview with the closest supported format to |width| x |height| @ |framerate|.
@ -392,8 +391,7 @@ public class VideoCapturerAndroid implements
}
// Update camera parameters.
Logging.d(TAG, "isVideoStabilizationSupported: " +
parameters.isVideoStabilizationSupported());
Logging.d(TAG, "isVideoStabilizationSupported: " + parameters.isVideoStabilizationSupported());
if (parameters.isVideoStabilizationSupported()) {
parameters.setVideoStabilization(true);
}
@ -453,7 +451,8 @@ public class VideoCapturerAndroid implements
Logging.d(TAG, "stopCapture");
final CountDownLatch barrier = new CountDownLatch(1);
final boolean didPost = maybePostOnCameraThread(new Runnable() {
@Override public void run() {
@Override
public void run() {
stopCaptureOnCameraThread(true /* stopHandler */);
barrier.countDown();
}
@ -535,9 +534,8 @@ public class VideoCapturerAndroid implements
private int getDeviceOrientation() {
int orientation = 0;
WindowManager wm = (WindowManager) applicationContext.getSystemService(
Context.WINDOW_SERVICE);
switch(wm.getDefaultDisplay().getRotation()) {
WindowManager wm = (WindowManager) applicationContext.getSystemService(Context.WINDOW_SERVICE);
switch (wm.getDefaultDisplay().getRotation()) {
case Surface.ROTATION_90:
orientation = 90;
break;
@ -579,8 +577,7 @@ public class VideoCapturerAndroid implements
throw new RuntimeException("Unexpected camera in callback!");
}
final long captureTimeNs =
TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
final long captureTimeNs = TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
if (eventsHandler != null && !firstFrameReported) {
eventsHandler.onFirstFrameAvailable();
@ -588,14 +585,13 @@ public class VideoCapturerAndroid implements
}
cameraStatistics.addFrame();
frameObserver.onByteBufferFrameCaptured(data, captureFormat.width, captureFormat.height,
getFrameOrientation(), captureTimeNs);
frameObserver.onByteBufferFrameCaptured(
data, captureFormat.width, captureFormat.height, getFrameOrientation(), captureTimeNs);
camera.addCallbackBuffer(data);
}
@Override
public void onTextureFrameAvailable(
int oesTextureId, float[] transformMatrix, long timestampNs) {
public void onTextureFrameAvailable(int oesTextureId, float[] transformMatrix, long timestampNs) {
checkIsOnCameraThread();
if (!isCameraRunning.get()) {
Logging.e(TAG, "onTextureFrameAvailable: Camera is stopped");

View File

@ -61,11 +61,13 @@ public class VideoRenderer {
// top-left corner of the image, but in glTexImage2D() the first element corresponds to the
// bottom-left corner. This discrepancy is corrected by setting a vertical flip as sampling
// matrix.
// clang-format off
samplingMatrix = new float[] {
1, 0, 0, 0,
0, -1, 0, 0,
0, 0, 1, 0,
0, 1, 0, 1};
// clang-format on
}
/**
@ -97,14 +99,13 @@ public class VideoRenderer {
@Override
public String toString() {
return width + "x" + height + ":" + yuvStrides[0] + ":" + yuvStrides[1] +
":" + yuvStrides[2];
return width + "x" + height + ":" + yuvStrides[0] + ":" + yuvStrides[1] + ":" + yuvStrides[2];
}
}
// Helper native function to do a video frame plane copying.
public static native void nativeCopyPlane(ByteBuffer src, int width,
int height, int srcStride, ByteBuffer dst, int dstStride);
public static native void nativeCopyPlane(
ByteBuffer src, int width, int height, int srcStride, ByteBuffer dst, int dstStride);
/** The real meat of VideoSinkInterface. */
public static interface Callbacks {
@ -115,17 +116,17 @@ public class VideoRenderer {
public void renderFrame(I420Frame frame);
}
/**
* This must be called after every renderFrame() to release the frame.
*/
public static void renderFrameDone(I420Frame frame) {
frame.yuvPlanes = null;
frame.textureId = 0;
if (frame.nativeFramePointer != 0) {
releaseNativeFrame(frame.nativeFramePointer);
frame.nativeFramePointer = 0;
}
}
/**
* This must be called after every renderFrame() to release the frame.
*/
public static void renderFrameDone(I420Frame frame) {
frame.yuvPlanes = null;
frame.textureId = 0;
if (frame.nativeFramePointer != 0) {
releaseNativeFrame(frame.nativeFramePointer);
frame.nativeFramePointer = 0;
}
}
long nativeVideoRenderer;

View File

@ -78,7 +78,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
// TODO(magjed): Delete GL resources in release(). Must be synchronized with draw(). We are
// currently leaking resources to avoid a rare crash in release() where the EGLContext has
// become invalid beforehand.
private int[] yuvTextures = { 0, 0, 0 };
private int[] yuvTextures = {0, 0, 0};
private final RendererCommon.YuvUploader yuvUploader = new RendererCommon.YuvUploader();
private final RendererCommon.GlDrawer drawer;
// Resources for making a deep copy of incoming OES texture frame.
@ -90,7 +90,8 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
private I420Frame pendingFrame;
private final Object pendingFrameLock = new Object();
// Type of video frame used for recent frame rendering.
private static enum RendererType { RENDERER_YUV, RENDERER_TEXTURE };
private static enum RendererType { RENDERER_YUV, RENDERER_TEXTURE }
private RendererType rendererType;
private RendererCommon.ScalingType scalingType;
private boolean mirror;
@ -136,9 +137,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
// it rendered up right.
private int rotationDegree;
private YuvImageRenderer(
GLSurfaceView surface, int id,
int x, int y, int width, int height,
private YuvImageRenderer(GLSurfaceView surface, int id, int x, int y, int width, int height,
RendererCommon.ScalingType scalingType, boolean mirror, RendererCommon.GlDrawer drawer) {
Logging.d(TAG, "YuvImageRenderer.Create id: " + id);
this.surface = surface;
@ -167,11 +166,11 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
}
private void createTextures() {
Logging.d(TAG, " YuvImageRenderer.createTextures " + id + " on GL thread:" +
Thread.currentThread().getId());
Logging.d(TAG, " YuvImageRenderer.createTextures " + id + " on GL thread:"
+ Thread.currentThread().getId());
// Generate 3 texture ids for Y/U/V and place them into |yuvTextures|.
for (int i = 0; i < 3; i++) {
for (int i = 0; i < 3; i++) {
yuvTextures[i] = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
}
// Generate texture and framebuffer for offscreen texture copy.
@ -179,30 +178,29 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
}
private void updateLayoutMatrix() {
synchronized(updateLayoutLock) {
synchronized (updateLayoutLock) {
if (!updateLayoutProperties) {
return;
}
// Initialize to maximum allowed area. Round to integer coordinates inwards the layout
// bounding box (ceil left/top and floor right/bottom) to not break constraints.
displayLayout.set(
(screenWidth * layoutInPercentage.left + 99) / 100,
displayLayout.set((screenWidth * layoutInPercentage.left + 99) / 100,
(screenHeight * layoutInPercentage.top + 99) / 100,
(screenWidth * layoutInPercentage.right) / 100,
(screenHeight * layoutInPercentage.bottom) / 100);
Logging.d(TAG, "ID: " + id + ". AdjustTextureCoords. Allowed display size: "
+ displayLayout.width() + " x " + displayLayout.height() + ". Video: " + videoWidth
+ " x " + videoHeight + ". Rotation: " + rotationDegree + ". Mirror: " + mirror);
Logging.d(TAG, "ID: " + id + ". AdjustTextureCoords. Allowed display size: "
+ displayLayout.width() + " x " + displayLayout.height() + ". Video: " + videoWidth
+ " x " + videoHeight + ". Rotation: " + rotationDegree + ". Mirror: " + mirror);
final float videoAspectRatio = (rotationDegree % 180 == 0)
? (float) videoWidth / videoHeight
: (float) videoHeight / videoWidth;
// Adjust display size based on |scalingType|.
final Point displaySize = RendererCommon.getDisplaySize(scalingType,
videoAspectRatio, displayLayout.width(), displayLayout.height());
final Point displaySize = RendererCommon.getDisplaySize(
scalingType, videoAspectRatio, displayLayout.width(), displayLayout.height());
displayLayout.inset((displayLayout.width() - displaySize.x) / 2,
(displayLayout.height() - displaySize.y) / 2);
Logging.d(TAG, " Adjusted display size: " + displayLayout.width() + " x "
+ displayLayout.height());
(displayLayout.height() - displaySize.y) / 2);
Logging.d(TAG,
" Adjusted display size: " + displayLayout.width() + " x " + displayLayout.height());
layoutMatrix = RendererCommon.getLayoutMatrix(
mirror, videoAspectRatio, (float) displayLayout.width() / displayLayout.height());
updateLayoutProperties = false;
@ -242,14 +240,13 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
GlUtil.checkNoGLES2Error("glBindFramebuffer");
// Copy the OES texture content. This will also normalize the sampling matrix.
drawer.drawOes(pendingFrame.textureId, rotatedSamplingMatrix,
textureCopy.getWidth(), textureCopy.getHeight(),
0, 0, textureCopy.getWidth(), textureCopy.getHeight());
rotatedSamplingMatrix = RendererCommon.identityMatrix();
drawer.drawOes(pendingFrame.textureId, rotatedSamplingMatrix, textureCopy.getWidth(),
textureCopy.getHeight(), 0, 0, textureCopy.getWidth(), textureCopy.getHeight());
rotatedSamplingMatrix = RendererCommon.identityMatrix();
// Restore normal framebuffer.
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
GLES20.glFinish();
// Restore normal framebuffer.
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
GLES20.glFinish();
}
copyTimeNs += (System.nanoTime() - now);
VideoRenderer.renderFrameDone(pendingFrame);
@ -263,8 +260,8 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
// OpenGL defaults to lower left origin - flip viewport position vertically.
final int viewportY = screenHeight - displayLayout.bottom;
if (rendererType == RendererType.RENDERER_YUV) {
drawer.drawYuv(yuvTextures, texMatrix, videoWidth, videoHeight,
displayLayout.left, viewportY, displayLayout.width(), displayLayout.height());
drawer.drawYuv(yuvTextures, texMatrix, videoWidth, videoHeight, displayLayout.left,
viewportY, displayLayout.width(), displayLayout.height());
} else {
drawer.drawRgb(textureCopy.getTextureId(), texMatrix, videoWidth, videoHeight,
displayLayout.left, viewportY, displayLayout.width(), displayLayout.height());
@ -281,25 +278,23 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
private void logStatistics() {
long timeSinceFirstFrameNs = System.nanoTime() - startTimeNs;
Logging.d(TAG, "ID: " + id + ". Type: " + rendererType +
". Frames received: " + framesReceived +
". Dropped: " + framesDropped + ". Rendered: " + framesRendered);
Logging.d(TAG, "ID: " + id + ". Type: " + rendererType + ". Frames received: "
+ framesReceived + ". Dropped: " + framesDropped + ". Rendered: " + framesRendered);
if (framesReceived > 0 && framesRendered > 0) {
Logging.d(TAG, "Duration: " + (int)(timeSinceFirstFrameNs / 1e6) +
" ms. FPS: " + framesRendered * 1e9 / timeSinceFirstFrameNs);
Logging.d(TAG, "Draw time: " +
(int) (drawTimeNs / (1000 * framesRendered)) + " us. Copy time: " +
(int) (copyTimeNs / (1000 * framesReceived)) + " us");
Logging.d(TAG, "Duration: " + (int) (timeSinceFirstFrameNs / 1e6) + " ms. FPS: "
+ framesRendered * 1e9 / timeSinceFirstFrameNs);
Logging.d(TAG, "Draw time: " + (int) (drawTimeNs / (1000 * framesRendered))
+ " us. Copy time: " + (int) (copyTimeNs / (1000 * framesReceived)) + " us");
}
}
public void setScreenSize(final int screenWidth, final int screenHeight) {
synchronized(updateLayoutLock) {
synchronized (updateLayoutLock) {
if (screenWidth == this.screenWidth && screenHeight == this.screenHeight) {
return;
}
Logging.d(TAG, "ID: " + id + ". YuvImageRenderer.setScreenSize: " +
screenWidth + " x " + screenHeight);
Logging.d(TAG, "ID: " + id + ". YuvImageRenderer.setScreenSize: " + screenWidth + " x "
+ screenHeight);
this.screenWidth = screenWidth;
this.screenHeight = screenHeight;
updateLayoutProperties = true;
@ -310,14 +305,13 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
RendererCommon.ScalingType scalingType, boolean mirror) {
final Rect layoutInPercentage =
new Rect(x, y, Math.min(100, x + width), Math.min(100, y + height));
synchronized(updateLayoutLock) {
synchronized (updateLayoutLock) {
if (layoutInPercentage.equals(this.layoutInPercentage) && scalingType == this.scalingType
&& mirror == this.mirror) {
return;
}
Logging.d(TAG, "ID: " + id + ". YuvImageRenderer.setPosition: (" + x + ", " + y +
") " + width + " x " + height + ". Scaling: " + scalingType +
". Mirror: " + mirror);
Logging.d(TAG, "ID: " + id + ". YuvImageRenderer.setPosition: (" + x + ", " + y + ") "
+ width + " x " + height + ". Scaling: " + scalingType + ". Mirror: " + mirror);
this.layoutInPercentage.set(layoutInPercentage);
this.scalingType = scalingType;
this.mirror = mirror;
@ -331,14 +325,14 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
return;
}
if (rendererEvents != null) {
Logging.d(TAG, "ID: " + id +
". Reporting frame resolution changed to " + videoWidth + " x " + videoHeight);
Logging.d(TAG, "ID: " + id + ". Reporting frame resolution changed to " + videoWidth + " x "
+ videoHeight);
rendererEvents.onFrameResolutionChanged(videoWidth, videoHeight, rotation);
}
synchronized (updateLayoutLock) {
Logging.d(TAG, "ID: " + id + ". YuvImageRenderer.setSize: " +
videoWidth + " x " + videoHeight + " rotation " + rotation);
Logging.d(TAG, "ID: " + id + ". YuvImageRenderer.setSize: " + videoWidth + " x "
+ videoHeight + " rotation " + rotation);
this.videoWidth = videoWidth;
this.videoHeight = videoHeight;
@ -366,11 +360,10 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
synchronized (pendingFrameLock) {
// Check input frame parameters.
if (frame.yuvFrame) {
if (frame.yuvStrides[0] < frame.width ||
frame.yuvStrides[1] < frame.width / 2 ||
frame.yuvStrides[2] < frame.width / 2) {
Logging.e(TAG, "Incorrect strides " + frame.yuvStrides[0] + ", " +
frame.yuvStrides[1] + ", " + frame.yuvStrides[2]);
if (frame.yuvStrides[0] < frame.width || frame.yuvStrides[1] < frame.width / 2
|| frame.yuvStrides[2] < frame.width / 2) {
Logging.e(TAG, "Incorrect strides " + frame.yuvStrides[0] + ", " + frame.yuvStrides[1]
+ ", " + frame.yuvStrides[2]);
VideoRenderer.renderFrameDone(frame);
return;
}
@ -394,8 +387,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
}
/** Passes GLSurfaceView to video renderer. */
public static synchronized void setView(GLSurfaceView surface,
Runnable eglContextReadyCallback) {
public static synchronized void setView(GLSurfaceView surface, Runnable eglContextReadyCallback) {
Logging.d(TAG, "VideoRendererGui.setView");
instance = new VideoRendererGui(surface);
eglContextReady = eglContextReadyCallback;
@ -407,7 +399,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
/** Releases GLSurfaceView video renderer. */
public static synchronized void dispose() {
if (instance == null){
if (instance == null) {
return;
}
Logging.d(TAG, "VideoRendererGui.dispose");
@ -431,14 +423,12 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
*/
public static VideoRenderer createGui(int x, int y, int width, int height,
RendererCommon.ScalingType scalingType, boolean mirror) throws Exception {
YuvImageRenderer javaGuiRenderer = create(
x, y, width, height, scalingType, mirror);
YuvImageRenderer javaGuiRenderer = create(x, y, width, height, scalingType, mirror);
return new VideoRenderer(javaGuiRenderer);
}
public static VideoRenderer.Callbacks createGuiRenderer(
int x, int y, int width, int height,
RendererCommon.ScalingType scalingType, boolean mirror) {
int x, int y, int width, int height, RendererCommon.ScalingType scalingType, boolean mirror) {
return create(x, y, width, height, scalingType, mirror);
}
@ -447,8 +437,8 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
* resolution (width, height). All parameters are in percentage of
* screen resolution.
*/
public static synchronized YuvImageRenderer create(int x, int y, int width, int height,
RendererCommon.ScalingType scalingType, boolean mirror) {
public static synchronized YuvImageRenderer create(
int x, int y, int width, int height, RendererCommon.ScalingType scalingType, boolean mirror) {
return create(x, y, width, height, scalingType, mirror, new GlRectDrawer());
}
@ -460,19 +450,16 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
public static synchronized YuvImageRenderer create(int x, int y, int width, int height,
RendererCommon.ScalingType scalingType, boolean mirror, RendererCommon.GlDrawer drawer) {
// Check display region parameters.
if (x < 0 || x > 100 || y < 0 || y > 100 ||
width < 0 || width > 100 || height < 0 || height > 100 ||
x + width > 100 || y + height > 100) {
if (x < 0 || x > 100 || y < 0 || y > 100 || width < 0 || width > 100 || height < 0
|| height > 100 || x + width > 100 || y + height > 100) {
throw new RuntimeException("Incorrect window parameters.");
}
if (instance == null) {
throw new RuntimeException(
"Attempt to create yuv renderer before setting GLSurfaceView");
throw new RuntimeException("Attempt to create yuv renderer before setting GLSurfaceView");
}
final YuvImageRenderer yuvImageRenderer = new YuvImageRenderer(
instance.surface, instance.yuvImageRenderers.size(),
x, y, width, height, scalingType, mirror, drawer);
final YuvImageRenderer yuvImageRenderer = new YuvImageRenderer(instance.surface,
instance.yuvImageRenderers.size(), x, y, width, height, scalingType, mirror, drawer);
synchronized (instance.yuvImageRenderers) {
if (instance.onSurfaceCreatedCalled) {
// onSurfaceCreated has already been called for VideoRendererGui -
@ -483,8 +470,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
@Override
public void run() {
yuvImageRenderer.createTextures();
yuvImageRenderer.setScreenSize(
instance.screenWidth, instance.screenHeight);
yuvImageRenderer.setScreenSize(instance.screenWidth, instance.screenHeight);
countDownLatch.countDown();
}
});
@ -501,13 +487,11 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
return yuvImageRenderer;
}
public static synchronized void update(
VideoRenderer.Callbacks renderer, int x, int y, int width, int height,
RendererCommon.ScalingType scalingType, boolean mirror) {
public static synchronized void update(VideoRenderer.Callbacks renderer, int x, int y, int width,
int height, RendererCommon.ScalingType scalingType, boolean mirror) {
Logging.d(TAG, "VideoRendererGui.update");
if (instance == null) {
throw new RuntimeException(
"Attempt to update yuv renderer before setting GLSurfaceView");
throw new RuntimeException("Attempt to update yuv renderer before setting GLSurfaceView");
}
synchronized (instance.yuvImageRenderers) {
for (YuvImageRenderer yuvImageRenderer : instance.yuvImageRenderers) {
@ -522,8 +506,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
VideoRenderer.Callbacks renderer, RendererCommon.RendererEvents rendererEvents) {
Logging.d(TAG, "VideoRendererGui.setRendererEvents");
if (instance == null) {
throw new RuntimeException(
"Attempt to set renderer events before setting GLSurfaceView");
throw new RuntimeException("Attempt to set renderer events before setting GLSurfaceView");
}
synchronized (instance.yuvImageRenderers) {
for (YuvImageRenderer yuvImageRenderer : instance.yuvImageRenderers) {
@ -537,8 +520,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
public static synchronized void remove(VideoRenderer.Callbacks renderer) {
Logging.d(TAG, "VideoRendererGui.remove");
if (instance == null) {
throw new RuntimeException(
"Attempt to remove renderer before setting GLSurfaceView");
throw new RuntimeException("Attempt to remove renderer before setting GLSurfaceView");
}
synchronized (instance.yuvImageRenderers) {
final int index = instance.yuvImageRenderers.indexOf(renderer);
@ -553,8 +535,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
public static synchronized void reset(VideoRenderer.Callbacks renderer) {
Logging.d(TAG, "VideoRendererGui.reset");
if (instance == null) {
throw new RuntimeException(
"Attempt to reset renderer before setting GLSurfaceView");
throw new RuntimeException("Attempt to reset renderer before setting GLSurfaceView");
}
synchronized (instance.yuvImageRenderers) {
for (YuvImageRenderer yuvImageRenderer : instance.yuvImageRenderers) {
@ -621,8 +602,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
@Override
public void onSurfaceChanged(GL10 unused, int width, int height) {
Logging.d(TAG, "VideoRendererGui.onSurfaceChanged: " +
width + " x " + height + " ");
Logging.d(TAG, "VideoRendererGui.onSurfaceChanged: " + width + " x " + height + " ");
screenWidth = width;
screenHeight = height;
synchronized (yuvImageRenderers) {
@ -645,5 +625,4 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
}
}
}
}

View File

@ -8,7 +8,6 @@
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/**

View File

@ -14,8 +14,7 @@ import java.util.LinkedList;
/** Java version of VideoTrackInterface. */
public class VideoTrack extends MediaStreamTrack {
private final LinkedList<VideoRenderer> renderers =
new LinkedList<VideoRenderer>();
private final LinkedList<VideoRenderer> renderers = new LinkedList<VideoRenderer>();
public VideoTrack(long nativeTrack) {
super(nativeTrack);
@ -43,9 +42,7 @@ public class VideoTrack extends MediaStreamTrack {
private static native void free(long nativeTrack);
private static native void nativeAddRenderer(
long nativeTrack, long nativeRenderer);
private static native void nativeAddRenderer(long nativeTrack, long nativeRenderer);
private static native void nativeRemoveRenderer(
long nativeTrack, long nativeRenderer);
private static native void nativeRemoveRenderer(long nativeTrack, long nativeRenderer);
}