Format all Java in WebRTC.
BUG=webrtc:6419 TBR=henrika@webrtc.org Review-Url: https://codereview.webrtc.org/2377003002 Cr-Commit-Position: refs/heads/master@{#14432}
This commit is contained in:
@ -31,24 +31,20 @@ public class CameraEnumerationTest {
|
||||
@Test
|
||||
public void testGetClosestSupportedFramerateRange() {
|
||||
assertEquals(new FramerateRange(10000, 30000),
|
||||
getClosestSupportedFramerateRange(
|
||||
Arrays.asList(new FramerateRange(10000, 30000),
|
||||
getClosestSupportedFramerateRange(Arrays.asList(new FramerateRange(10000, 30000),
|
||||
new FramerateRange(30000, 30000)),
|
||||
30 /* requestedFps */));
|
||||
|
||||
assertEquals(new FramerateRange(10000, 20000),
|
||||
getClosestSupportedFramerateRange(
|
||||
Arrays.asList(new FramerateRange(0, 30000),
|
||||
new FramerateRange(10000, 20000),
|
||||
new FramerateRange(14000, 16000),
|
||||
new FramerateRange(15000, 15000)),
|
||||
Arrays.asList(new FramerateRange(0, 30000), new FramerateRange(10000, 20000),
|
||||
new FramerateRange(14000, 16000), new FramerateRange(15000, 15000)),
|
||||
15 /* requestedFps */));
|
||||
|
||||
assertEquals(new FramerateRange(10000, 20000),
|
||||
getClosestSupportedFramerateRange(
|
||||
Arrays.asList(new FramerateRange(15000, 15000),
|
||||
new FramerateRange(10000, 20000),
|
||||
new FramerateRange(10000, 30000)),
|
||||
new FramerateRange(10000, 20000), new FramerateRange(10000, 30000)),
|
||||
10 /* requestedFps */));
|
||||
}
|
||||
}
|
||||
|
||||
@ -33,8 +33,7 @@ public class CallSessionFileRotatingLogSink {
|
||||
}
|
||||
}
|
||||
|
||||
private static native long nativeAddSink(
|
||||
String dirPath, int maxFileSize, int severity);
|
||||
private static native long nativeAddSink(String dirPath, int maxFileSize, int severity);
|
||||
private static native void nativeDeleteSink(long nativeSink);
|
||||
private static native byte[] nativeGetLogData(String dirPath);
|
||||
}
|
||||
|
||||
@ -19,20 +19,20 @@ import java.util.List;
|
||||
public class Camera1Capturer extends CameraCapturer {
|
||||
private final boolean captureToTexture;
|
||||
|
||||
public Camera1Capturer(String cameraName, CameraEventsHandler eventsHandler,
|
||||
boolean captureToTexture) {
|
||||
public Camera1Capturer(
|
||||
String cameraName, CameraEventsHandler eventsHandler, boolean captureToTexture) {
|
||||
super(cameraName, eventsHandler, new Camera1Enumerator(captureToTexture));
|
||||
|
||||
this.captureToTexture = captureToTexture;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void createCameraSession(
|
||||
CameraSession.CreateSessionCallback createSessionCallback, CameraSession.Events events,
|
||||
Context applicationContext, SurfaceTextureHelper surfaceTextureHelper,
|
||||
String cameraName, int width, int height, int framerate) {
|
||||
Camera1Session.create(
|
||||
createSessionCallback, events, captureToTexture, applicationContext, surfaceTextureHelper,
|
||||
Camera1Enumerator.getCameraIndex(cameraName), width, height, framerate);
|
||||
protected void createCameraSession(CameraSession.CreateSessionCallback createSessionCallback,
|
||||
CameraSession.Events events, Context applicationContext,
|
||||
SurfaceTextureHelper surfaceTextureHelper, String cameraName, int width, int height,
|
||||
int framerate) {
|
||||
Camera1Session.create(createSessionCallback, events, captureToTexture, applicationContext,
|
||||
surfaceTextureHelper, Camera1Enumerator.getCameraIndex(cameraName), width, height,
|
||||
framerate);
|
||||
}
|
||||
}
|
||||
|
||||
@ -70,8 +70,8 @@ public class Camera1Enumerator implements CameraEnumerator {
|
||||
}
|
||||
|
||||
@Override
|
||||
public CameraVideoCapturer createCapturer(String deviceName,
|
||||
CameraVideoCapturer.CameraEventsHandler eventsHandler) {
|
||||
public CameraVideoCapturer createCapturer(
|
||||
String deviceName, CameraVideoCapturer.CameraEventsHandler eventsHandler) {
|
||||
return new VideoCapturerAndroid(deviceName, eventsHandler, captureToTexture);
|
||||
}
|
||||
|
||||
@ -181,7 +181,6 @@ public class Camera1Enumerator implements CameraEnumerator {
|
||||
|
||||
String facing =
|
||||
(info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT) ? "front" : "back";
|
||||
return "Camera " + index + ", Facing " + facing
|
||||
+ ", Orientation " + info.orientation;
|
||||
return "Camera " + index + ", Facing " + facing + ", Orientation " + info.orientation;
|
||||
}
|
||||
}
|
||||
|
||||
@ -34,7 +34,7 @@ public class Camera1Session implements CameraSession {
|
||||
private static final Histogram camera1StopTimeMsHistogram =
|
||||
Histogram.createCounts("WebRTC.Android.Camera1.StopTimeMs", 1, 10000, 50);
|
||||
|
||||
private static enum SessionState { RUNNING, STOPPED };
|
||||
private static enum SessionState { RUNNING, STOPPED }
|
||||
|
||||
private final Handler cameraThreadHandler;
|
||||
private final Events events;
|
||||
@ -54,11 +54,10 @@ public class Camera1Session implements CameraSession {
|
||||
private SessionState state;
|
||||
private boolean firstFrameReported = false;
|
||||
|
||||
public static void create(
|
||||
final CreateSessionCallback callback, final Events events,
|
||||
public static void create(final CreateSessionCallback callback, final Events events,
|
||||
final boolean captureToTexture, final Context applicationContext,
|
||||
final SurfaceTextureHelper surfaceTextureHelper,
|
||||
final int cameraId, final int width, final int height, final int framerate) {
|
||||
final SurfaceTextureHelper surfaceTextureHelper, final int cameraId, final int width,
|
||||
final int height, final int framerate) {
|
||||
final long constructionTimeNs = System.nanoTime();
|
||||
Logging.d(TAG, "Open camera " + cameraId);
|
||||
events.onCameraOpening();
|
||||
@ -83,8 +82,8 @@ public class Camera1Session implements CameraSession {
|
||||
android.hardware.Camera.getCameraInfo(cameraId, info);
|
||||
|
||||
final android.hardware.Camera.Parameters parameters = camera.getParameters();
|
||||
final CaptureFormat captureFormat = findClosestCaptureFormat(
|
||||
parameters, width, height, framerate);
|
||||
final CaptureFormat captureFormat =
|
||||
findClosestCaptureFormat(parameters, width, height, framerate);
|
||||
final Size pictureSize = findClosestPictureSize(parameters, width, height);
|
||||
|
||||
updateCameraParameters(camera, parameters, captureFormat, pictureSize, captureToTexture);
|
||||
@ -101,10 +100,9 @@ public class Camera1Session implements CameraSession {
|
||||
// Calculate orientation manually and send it as CVO insted.
|
||||
camera.setDisplayOrientation(0 /* degrees */);
|
||||
|
||||
callback.onDone(new Camera1Session(
|
||||
events, captureToTexture, applicationContext, surfaceTextureHelper,
|
||||
cameraId, width, height, framerate,
|
||||
camera, info, captureFormat, constructionTimeNs));
|
||||
callback.onDone(
|
||||
new Camera1Session(events, captureToTexture, applicationContext, surfaceTextureHelper,
|
||||
cameraId, width, height, framerate, camera, info, captureFormat, constructionTimeNs));
|
||||
}
|
||||
|
||||
private static void updateCameraParameters(android.hardware.Camera camera,
|
||||
@ -136,27 +134,22 @@ public class Camera1Session implements CameraSession {
|
||||
Logging.d(TAG, "Available fps ranges: " + supportedFramerates);
|
||||
|
||||
final CaptureFormat.FramerateRange fpsRange =
|
||||
CameraEnumerationAndroid.getClosestSupportedFramerateRange(
|
||||
supportedFramerates, framerate);
|
||||
CameraEnumerationAndroid.getClosestSupportedFramerateRange(supportedFramerates, framerate);
|
||||
|
||||
final Size previewSize = CameraEnumerationAndroid.getClosestSupportedSize(
|
||||
Camera1Enumerator.convertSizes(parameters.getSupportedPreviewSizes()),
|
||||
width, height);
|
||||
Camera1Enumerator.convertSizes(parameters.getSupportedPreviewSizes()), width, height);
|
||||
|
||||
return new CaptureFormat(previewSize.width, previewSize.height, fpsRange);
|
||||
}
|
||||
|
||||
private static Size findClosestPictureSize(android.hardware.Camera.Parameters parameters,
|
||||
int width, int height) {
|
||||
private static Size findClosestPictureSize(
|
||||
android.hardware.Camera.Parameters parameters, int width, int height) {
|
||||
return CameraEnumerationAndroid.getClosestSupportedSize(
|
||||
Camera1Enumerator.convertSizes(parameters.getSupportedPictureSizes()),
|
||||
width, height);
|
||||
Camera1Enumerator.convertSizes(parameters.getSupportedPictureSizes()), width, height);
|
||||
}
|
||||
|
||||
private Camera1Session(
|
||||
Events events, boolean captureToTexture,
|
||||
Context applicationContext, SurfaceTextureHelper surfaceTextureHelper,
|
||||
int cameraId, int width, int height, int framerate,
|
||||
private Camera1Session(Events events, boolean captureToTexture, Context applicationContext,
|
||||
SurfaceTextureHelper surfaceTextureHelper, int cameraId, int width, int height, int framerate,
|
||||
android.hardware.Camera camera, android.hardware.Camera.CameraInfo info,
|
||||
CaptureFormat captureFormat, long constructionTimeNs) {
|
||||
Logging.d(TAG, "Create new camera1 session on camera " + cameraId);
|
||||
@ -186,8 +179,7 @@ public class Camera1Session implements CameraSession {
|
||||
final long stopStartTime = System.nanoTime();
|
||||
state = SessionState.STOPPED;
|
||||
stopInternal();
|
||||
final int stopTimeMs =
|
||||
(int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStartTime);
|
||||
final int stopTimeMs = (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStartTime);
|
||||
camera1StopTimeMsHistogram.addSample(stopTimeMs);
|
||||
}
|
||||
}
|
||||
@ -312,9 +304,8 @@ public class Camera1Session implements CameraSession {
|
||||
private int getDeviceOrientation() {
|
||||
int orientation = 0;
|
||||
|
||||
WindowManager wm = (WindowManager) applicationContext.getSystemService(
|
||||
Context.WINDOW_SERVICE);
|
||||
switch(wm.getDefaultDisplay().getRotation()) {
|
||||
WindowManager wm = (WindowManager) applicationContext.getSystemService(Context.WINDOW_SERVICE);
|
||||
switch (wm.getDefaultDisplay().getRotation()) {
|
||||
case Surface.ROTATION_90:
|
||||
orientation = 90;
|
||||
break;
|
||||
|
||||
@ -27,14 +27,11 @@ public class Camera2Capturer extends CameraCapturer {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void createCameraSession(
|
||||
CameraSession.CreateSessionCallback createSessionCallback, CameraSession.Events events,
|
||||
Context applicationContext, SurfaceTextureHelper surfaceTextureHelper,
|
||||
String cameraName, int width, int height, int framerate) {
|
||||
Camera2Session.create(
|
||||
createSessionCallback, events,
|
||||
applicationContext, cameraManager,
|
||||
surfaceTextureHelper,
|
||||
cameraName, width, height, framerate);
|
||||
protected void createCameraSession(CameraSession.CreateSessionCallback createSessionCallback,
|
||||
CameraSession.Events events, Context applicationContext,
|
||||
SurfaceTextureHelper surfaceTextureHelper, String cameraName, int width, int height,
|
||||
int framerate) {
|
||||
Camera2Session.create(createSessionCallback, events, applicationContext, cameraManager,
|
||||
surfaceTextureHelper, cameraName, width, height, framerate);
|
||||
}
|
||||
}
|
||||
|
||||
@ -63,8 +63,7 @@ public class Camera2Enumerator implements CameraEnumerator {
|
||||
|
||||
@Override
|
||||
public boolean isFrontFacing(String deviceName) {
|
||||
CameraCharacteristics characteristics
|
||||
= getCameraCharacteristics(deviceName);
|
||||
CameraCharacteristics characteristics = getCameraCharacteristics(deviceName);
|
||||
|
||||
return characteristics != null
|
||||
&& characteristics.get(CameraCharacteristics.LENS_FACING)
|
||||
@ -73,8 +72,7 @@ public class Camera2Enumerator implements CameraEnumerator {
|
||||
|
||||
@Override
|
||||
public boolean isBackFacing(String deviceName) {
|
||||
CameraCharacteristics characteristics
|
||||
= getCameraCharacteristics(deviceName);
|
||||
CameraCharacteristics characteristics = getCameraCharacteristics(deviceName);
|
||||
|
||||
return characteristics != null
|
||||
&& characteristics.get(CameraCharacteristics.LENS_FACING)
|
||||
@ -87,8 +85,8 @@ public class Camera2Enumerator implements CameraEnumerator {
|
||||
}
|
||||
|
||||
@Override
|
||||
public CameraVideoCapturer createCapturer(String deviceName,
|
||||
CameraVideoCapturer.CameraEventsHandler eventsHandler) {
|
||||
public CameraVideoCapturer createCapturer(
|
||||
String deviceName, CameraVideoCapturer.CameraEventsHandler eventsHandler) {
|
||||
return new Camera2Capturer(context, deviceName, eventsHandler);
|
||||
}
|
||||
|
||||
@ -149,8 +147,7 @@ public class Camera2Enumerator implements CameraEnumerator {
|
||||
return fpsRanges[0].getUpper() < 1000 ? 1000 : 1;
|
||||
}
|
||||
|
||||
static List<Size> getSupportedSizes(
|
||||
CameraCharacteristics cameraCharacteristics) {
|
||||
static List<Size> getSupportedSizes(CameraCharacteristics cameraCharacteristics) {
|
||||
final StreamConfigurationMap streamMap =
|
||||
cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
|
||||
final int supportLevel =
|
||||
@ -161,8 +158,8 @@ public class Camera2Enumerator implements CameraEnumerator {
|
||||
|
||||
// Video may be stretched pre LMR1 on legacy implementations.
|
||||
// Filter out formats that have different aspect ratio than the sensor array.
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP_MR1 &&
|
||||
supportLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP_MR1
|
||||
&& supportLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
|
||||
final Rect activeArraySize =
|
||||
cameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
|
||||
final ArrayList<Size> filteredSizes = new ArrayList<Size>();
|
||||
@ -184,8 +181,7 @@ public class Camera2Enumerator implements CameraEnumerator {
|
||||
(CameraManager) context.getSystemService(Context.CAMERA_SERVICE), cameraId);
|
||||
}
|
||||
|
||||
static List<CaptureFormat> getSupportedFormats(
|
||||
CameraManager cameraManager, String cameraId) {
|
||||
static List<CaptureFormat> getSupportedFormats(CameraManager cameraManager, String cameraId) {
|
||||
synchronized (cachedSupportedFormats) {
|
||||
if (cachedSupportedFormats.containsKey(cameraId)) {
|
||||
return cachedSupportedFormats.get(cameraId);
|
||||
@ -220,8 +216,8 @@ public class Camera2Enumerator implements CameraEnumerator {
|
||||
for (Size size : sizes) {
|
||||
long minFrameDurationNs = 0;
|
||||
try {
|
||||
minFrameDurationNs = streamMap.getOutputMinFrameDuration(SurfaceTexture.class,
|
||||
new android.util.Size(size.width, size.height));
|
||||
minFrameDurationNs = streamMap.getOutputMinFrameDuration(
|
||||
SurfaceTexture.class, new android.util.Size(size.width, size.height));
|
||||
} catch (Exception e) {
|
||||
// getOutputMinFrameDuration() is not supported on all devices. Ignore silently.
|
||||
}
|
||||
@ -255,8 +251,7 @@ public class Camera2Enumerator implements CameraEnumerator {
|
||||
final List<CaptureFormat.FramerateRange> ranges = new ArrayList<CaptureFormat.FramerateRange>();
|
||||
for (Range<Integer> range : arrayRanges) {
|
||||
ranges.add(new CaptureFormat.FramerateRange(
|
||||
range.getLower() * unitFactor,
|
||||
range.getUpper() * unitFactor));
|
||||
range.getLower() * unitFactor, range.getUpper() * unitFactor));
|
||||
}
|
||||
return ranges;
|
||||
}
|
||||
|
||||
@ -42,7 +42,7 @@ public class Camera2Session implements CameraSession {
|
||||
private static final Histogram camera2StopTimeMsHistogram =
|
||||
Histogram.createCounts("WebRTC.Android.Camera2.StopTimeMs", 1, 10000, 50);
|
||||
|
||||
private static enum SessionState { RUNNING, STOPPED };
|
||||
private static enum SessionState { RUNNING, STOPPED }
|
||||
|
||||
private final Handler cameraThreadHandler;
|
||||
private final CreateSessionCallback callback;
|
||||
@ -159,11 +159,11 @@ public class Camera2Session implements CameraSession {
|
||||
final CaptureRequest.Builder captureRequestBuilder =
|
||||
cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
|
||||
// Set auto exposure fps range.
|
||||
captureRequestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, new Range<Integer>(
|
||||
captureFormat.framerate.min / fpsUnitFactor,
|
||||
captureRequestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE,
|
||||
new Range<Integer>(captureFormat.framerate.min / fpsUnitFactor,
|
||||
captureFormat.framerate.max / fpsUnitFactor));
|
||||
captureRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE,
|
||||
CaptureRequest.CONTROL_AE_MODE_ON);
|
||||
captureRequestBuilder.set(
|
||||
CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
|
||||
captureRequestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false);
|
||||
chooseStabilizationMode(captureRequestBuilder);
|
||||
chooseFocusMode(captureRequestBuilder);
|
||||
@ -205,8 +205,8 @@ public class Camera2Session implements CameraSession {
|
||||
}
|
||||
|
||||
// Undo camera orientation - we report it as rotation instead.
|
||||
transformMatrix = RendererCommon.rotateTextureMatrix(
|
||||
transformMatrix, -cameraOrientation);
|
||||
transformMatrix =
|
||||
RendererCommon.rotateTextureMatrix(transformMatrix, -cameraOrientation);
|
||||
|
||||
events.onTextureFrameCaptured(Camera2Session.this, captureFormat.width,
|
||||
captureFormat.height, oesTextureId, transformMatrix, rotation, timestampNs);
|
||||
@ -250,12 +250,12 @@ public class Camera2Session implements CameraSession {
|
||||
}
|
||||
|
||||
private void chooseFocusMode(CaptureRequest.Builder captureRequestBuilder) {
|
||||
final int[] availableFocusModes = cameraCharacteristics.get(
|
||||
CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES);
|
||||
final int[] availableFocusModes =
|
||||
cameraCharacteristics.get(CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES);
|
||||
for (int mode : availableFocusModes) {
|
||||
if (mode == CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO) {
|
||||
captureRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE,
|
||||
CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO);
|
||||
captureRequestBuilder.set(
|
||||
CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO);
|
||||
Logging.d(TAG, "Using continuous video auto-focus.");
|
||||
return;
|
||||
}
|
||||
@ -272,23 +272,17 @@ public class Camera2Session implements CameraSession {
|
||||
}
|
||||
}
|
||||
|
||||
public static void create(
|
||||
CreateSessionCallback callback, Events events,
|
||||
public static void create(CreateSessionCallback callback, Events events,
|
||||
Context applicationContext, CameraManager cameraManager,
|
||||
SurfaceTextureHelper surfaceTextureHelper,
|
||||
String cameraId, int width, int height, int framerate) {
|
||||
new Camera2Session(
|
||||
callback, events,
|
||||
applicationContext, cameraManager,
|
||||
surfaceTextureHelper,
|
||||
SurfaceTextureHelper surfaceTextureHelper, String cameraId, int width, int height,
|
||||
int framerate) {
|
||||
new Camera2Session(callback, events, applicationContext, cameraManager, surfaceTextureHelper,
|
||||
cameraId, width, height, framerate);
|
||||
}
|
||||
|
||||
private Camera2Session(
|
||||
CreateSessionCallback callback, Events events,
|
||||
Context applicationContext, CameraManager cameraManager,
|
||||
SurfaceTextureHelper surfaceTextureHelper,
|
||||
String cameraId, int width, int height, int framerate) {
|
||||
private Camera2Session(CreateSessionCallback callback, Events events, Context applicationContext,
|
||||
CameraManager cameraManager, SurfaceTextureHelper surfaceTextureHelper, String cameraId,
|
||||
int width, int height, int framerate) {
|
||||
Logging.d(TAG, "Create new camera2 session on camera " + cameraId);
|
||||
|
||||
constructionTimeNs = System.nanoTime();
|
||||
@ -341,11 +335,9 @@ public class Camera2Session implements CameraSession {
|
||||
}
|
||||
|
||||
final CaptureFormat.FramerateRange bestFpsRange =
|
||||
CameraEnumerationAndroid.getClosestSupportedFramerateRange(
|
||||
framerateRanges, framerate);
|
||||
CameraEnumerationAndroid.getClosestSupportedFramerateRange(framerateRanges, framerate);
|
||||
|
||||
final Size bestSize = CameraEnumerationAndroid.getClosestSupportedSize(
|
||||
sizes, width, height);
|
||||
final Size bestSize = CameraEnumerationAndroid.getClosestSupportedSize(sizes, width, height);
|
||||
|
||||
captureFormat = new CaptureFormat(bestSize.width, bestSize.height, bestFpsRange);
|
||||
Logging.d(TAG, "Using capture format: " + captureFormat);
|
||||
@ -372,8 +364,7 @@ public class Camera2Session implements CameraSession {
|
||||
final long stopStartTime = System.nanoTime();
|
||||
state = SessionState.STOPPED;
|
||||
stopInternal();
|
||||
final int stopTimeMs =
|
||||
(int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStartTime);
|
||||
final int stopTimeMs = (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStartTime);
|
||||
camera2StopTimeMsHistogram.addSample(stopTimeMs);
|
||||
}
|
||||
}
|
||||
@ -417,9 +408,8 @@ public class Camera2Session implements CameraSession {
|
||||
private int getDeviceOrientation() {
|
||||
int orientation = 0;
|
||||
|
||||
WindowManager wm = (WindowManager) applicationContext.getSystemService(
|
||||
Context.WINDOW_SERVICE);
|
||||
switch(wm.getDefaultDisplay().getRotation()) {
|
||||
WindowManager wm = (WindowManager) applicationContext.getSystemService(Context.WINDOW_SERVICE);
|
||||
switch (wm.getDefaultDisplay().getRotation()) {
|
||||
case Surface.ROTATION_90:
|
||||
orientation = 90;
|
||||
break;
|
||||
|
||||
@ -50,8 +50,7 @@ public abstract class CameraCapturer implements CameraVideoCapturer {
|
||||
|
||||
if (switchState == SwitchState.IN_PROGRESS) {
|
||||
if (switchEventsHandler != null) {
|
||||
switchEventsHandler.onCameraSwitchDone(
|
||||
cameraEnumerator.isFrontFacing(cameraName));
|
||||
switchEventsHandler.onCameraSwitchDone(cameraEnumerator.isFrontFacing(cameraName));
|
||||
switchEventsHandler = null;
|
||||
}
|
||||
switchState = SwitchState.IDLE;
|
||||
@ -133,8 +132,7 @@ public abstract class CameraCapturer implements CameraVideoCapturer {
|
||||
|
||||
@Override
|
||||
public void onByteBufferFrameCaptured(
|
||||
CameraSession session, byte[] data, int width, int height, int rotation,
|
||||
long timestamp) {
|
||||
CameraSession session, byte[] data, int width, int height, int rotation, long timestamp) {
|
||||
checkIsOnCameraThread();
|
||||
synchronized (stateLock) {
|
||||
if (session != currentSession) {
|
||||
@ -151,9 +149,8 @@ public abstract class CameraCapturer implements CameraVideoCapturer {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onTextureFrameCaptured(
|
||||
CameraSession session, int width, int height, int oesTextureId, float[] transformMatrix,
|
||||
int rotation, long timestamp) {
|
||||
public void onTextureFrameCaptured(CameraSession session, int width, int height,
|
||||
int oesTextureId, float[] transformMatrix, int rotation, long timestamp) {
|
||||
checkIsOnCameraThread();
|
||||
synchronized (stateLock) {
|
||||
if (session != currentSession) {
|
||||
@ -268,9 +265,8 @@ public abstract class CameraCapturer implements CameraVideoCapturer {
|
||||
cameraThreadHandler.postDelayed(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
createCameraSession(
|
||||
createSessionCallback, cameraSessionEventsHandler, applicationContext, surfaceHelper,
|
||||
cameraName, width, height, framerate);
|
||||
createCameraSession(createSessionCallback, cameraSessionEventsHandler, applicationContext,
|
||||
surfaceHelper, cameraName, width, height, framerate);
|
||||
}
|
||||
}, delayMs);
|
||||
}
|
||||
@ -285,7 +281,6 @@ public abstract class CameraCapturer implements CameraVideoCapturer {
|
||||
ThreadUtils.waitUninterruptibly(stateLock);
|
||||
}
|
||||
|
||||
|
||||
if (currentSession != null) {
|
||||
Logging.d(TAG, "Stop capture: Nulling session");
|
||||
cameraStatistics.release();
|
||||
@ -428,6 +423,6 @@ public abstract class CameraCapturer implements CameraVideoCapturer {
|
||||
|
||||
abstract protected void createCameraSession(
|
||||
CameraSession.CreateSessionCallback createSessionCallback, CameraSession.Events events,
|
||||
Context applicationContext, SurfaceTextureHelper surfaceTextureHelper,
|
||||
String cameraName, int width, int height, int framerate);
|
||||
Context applicationContext, SurfaceTextureHelper surfaceTextureHelper, String cameraName,
|
||||
int width, int height, int framerate);
|
||||
}
|
||||
|
||||
@ -123,7 +123,6 @@ public class CameraEnumerationAndroid {
|
||||
return new Camera1Enumerator().getDeviceNames();
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
* Please use Camera1Enumerator.getDeviceNames().length instead.
|
||||
@ -177,8 +176,8 @@ public class CameraEnumerationAndroid {
|
||||
// lower bound, to allow the framerate to fluctuate based on lightning conditions.
|
||||
public static CaptureFormat.FramerateRange getClosestSupportedFramerateRange(
|
||||
List<CaptureFormat.FramerateRange> supportedFramerates, final int requestedFps) {
|
||||
return Collections.min(supportedFramerates,
|
||||
new ClosestComparator<CaptureFormat.FramerateRange>() {
|
||||
return Collections.min(
|
||||
supportedFramerates, new ClosestComparator<CaptureFormat.FramerateRange>() {
|
||||
// Progressive penalty if the upper bound is further away than |MAX_FPS_DIFF_THRESHOLD|
|
||||
// from requested.
|
||||
private static final int MAX_FPS_DIFF_THRESHOLD = 5000;
|
||||
@ -192,15 +191,14 @@ public class CameraEnumerationAndroid {
|
||||
|
||||
// Use one weight for small |value| less than |threshold|, and another weight above.
|
||||
private int progressivePenalty(int value, int threshold, int lowWeight, int highWeight) {
|
||||
return (value < threshold)
|
||||
? value * lowWeight
|
||||
return (value < threshold) ? value * lowWeight
|
||||
: threshold * lowWeight + (value - threshold) * highWeight;
|
||||
}
|
||||
|
||||
@Override
|
||||
int diff(CaptureFormat.FramerateRange range) {
|
||||
final int minFpsError = progressivePenalty(range.min,
|
||||
MIN_FPS_THRESHOLD, MIN_FPS_LOW_VALUE_WEIGHT, MIN_FPS_HIGH_VALUE_WEIGHT);
|
||||
final int minFpsError = progressivePenalty(
|
||||
range.min, MIN_FPS_THRESHOLD, MIN_FPS_LOW_VALUE_WEIGHT, MIN_FPS_HIGH_VALUE_WEIGHT);
|
||||
final int maxFpsError = progressivePenalty(Math.abs(requestedFps * 1000 - range.max),
|
||||
MAX_FPS_DIFF_THRESHOLD, MAX_FPS_LOW_DIFF_WEIGHT, MAX_FPS_HIGH_DIFF_WEIGHT);
|
||||
return minFpsError + maxFpsError;
|
||||
@ -209,10 +207,8 @@ public class CameraEnumerationAndroid {
|
||||
}
|
||||
|
||||
public static Size getClosestSupportedSize(
|
||||
List<Size> supportedSizes, final int requestedWidth,
|
||||
final int requestedHeight) {
|
||||
return Collections.min(supportedSizes,
|
||||
new ClosestComparator<Size>() {
|
||||
List<Size> supportedSizes, final int requestedWidth, final int requestedHeight) {
|
||||
return Collections.min(supportedSizes, new ClosestComparator<Size>() {
|
||||
@Override
|
||||
int diff(Size size) {
|
||||
return abs(requestedWidth - size.width) + abs(requestedHeight - size.height);
|
||||
|
||||
@ -20,6 +20,6 @@ public interface CameraEnumerator {
|
||||
public boolean isBackFacing(String deviceName);
|
||||
public List<CaptureFormat> getSupportedFormats(String deviceName);
|
||||
|
||||
public CameraVideoCapturer createCapturer(String deviceName,
|
||||
CameraVideoCapturer.CameraEventsHandler eventsHandler);
|
||||
public CameraVideoCapturer createCapturer(
|
||||
String deviceName, CameraVideoCapturer.CameraEventsHandler eventsHandler);
|
||||
}
|
||||
|
||||
@ -22,8 +22,8 @@ public interface CameraSession {
|
||||
void onCameraOpening();
|
||||
void onCameraError(CameraSession session, String error);
|
||||
void onCameraClosed(CameraSession session);
|
||||
void onByteBufferFrameCaptured(CameraSession session, byte[] data, int width, int height,
|
||||
int rotation, long timestamp);
|
||||
void onByteBufferFrameCaptured(
|
||||
CameraSession session, byte[] data, int width, int height, int rotation, long timestamp);
|
||||
void onTextureFrameCaptured(CameraSession session, int width, int height, int oesTextureId,
|
||||
float[] transformMatrix, int rotation, long timestamp);
|
||||
}
|
||||
|
||||
@ -75,7 +75,7 @@ public interface CameraVideoCapturer extends VideoCapturer {
|
||||
@Override
|
||||
public void run() {
|
||||
final int cameraFps = Math.round(frameCount * 1000.0f / CAMERA_OBSERVER_PERIOD_MS);
|
||||
Logging.d(TAG, "Camera fps: " + cameraFps +".");
|
||||
Logging.d(TAG, "Camera fps: " + cameraFps + ".");
|
||||
if (frameCount == 0) {
|
||||
++freezePeriodCount;
|
||||
if (CAMERA_OBSERVER_PERIOD_MS * freezePeriodCount >= CAMERA_FREEZE_REPORT_TIMOUT_MS
|
||||
|
||||
@ -29,9 +29,8 @@ public class DataChannel {
|
||||
public Init() {}
|
||||
|
||||
// Called only by native code.
|
||||
private Init(
|
||||
boolean ordered, int maxRetransmitTimeMs, int maxRetransmits,
|
||||
String protocol, boolean negotiated, int id) {
|
||||
private Init(boolean ordered, int maxRetransmitTimeMs, int maxRetransmits, String protocol,
|
||||
boolean negotiated, int id) {
|
||||
this.ordered = ordered;
|
||||
this.maxRetransmitTimeMs = maxRetransmitTimeMs;
|
||||
this.maxRetransmits = maxRetransmits;
|
||||
@ -73,7 +72,7 @@ public class DataChannel {
|
||||
}
|
||||
|
||||
/** Keep in sync with DataChannelInterface::DataState. */
|
||||
public enum State { CONNECTING, OPEN, CLOSING, CLOSED };
|
||||
public enum State { CONNECTING, OPEN, CLOSING, CLOSED }
|
||||
|
||||
private final long nativeDataChannel;
|
||||
private long nativeObserver;
|
||||
|
||||
@ -15,15 +15,13 @@ import android.view.Surface;
|
||||
|
||||
import javax.microedition.khronos.egl.EGL10;
|
||||
|
||||
|
||||
/**
|
||||
* Holds EGL state and utility methods for handling an egl 1.0 EGLContext, an EGLDisplay,
|
||||
* and an EGLSurface.
|
||||
*/
|
||||
public abstract class EglBase {
|
||||
// EGL wrapper for an actual EGLContext.
|
||||
public static class Context {
|
||||
}
|
||||
public static class Context {}
|
||||
|
||||
// According to the documentation, EGL can be used from multiple threads at the same time if each
|
||||
// thread has its own EGLContext, but in practice it deadlocks on some devices when doing this.
|
||||
@ -39,6 +37,7 @@ public abstract class EglBase {
|
||||
// Android-specific extension.
|
||||
private static final int EGL_RECORDABLE_ANDROID = 0x3142;
|
||||
|
||||
// clang-format off
|
||||
public static final int[] CONFIG_PLAIN = {
|
||||
EGL10.EGL_RED_SIZE, 8,
|
||||
EGL10.EGL_GREEN_SIZE, 8,
|
||||
@ -79,6 +78,7 @@ public abstract class EglBase {
|
||||
EGL_RECORDABLE_ANDROID, 1,
|
||||
EGL10.EGL_NONE
|
||||
};
|
||||
// clang-format on
|
||||
|
||||
// Create a new context with the specified config attributes, sharing data with sharedContext.
|
||||
// |sharedContext| can be null.
|
||||
|
||||
@ -159,9 +159,8 @@ public final class EglBase10 extends EglBase {
|
||||
int[] surfaceAttribs = {EGL10.EGL_WIDTH, width, EGL10.EGL_HEIGHT, height, EGL10.EGL_NONE};
|
||||
eglSurface = egl.eglCreatePbufferSurface(eglDisplay, eglConfig, surfaceAttribs);
|
||||
if (eglSurface == EGL10.EGL_NO_SURFACE) {
|
||||
throw new RuntimeException(
|
||||
"Failed to create pixel buffer surface with size " + width + "x" + height
|
||||
+ ": 0x" + Integer.toHexString(egl.eglGetError()));
|
||||
throw new RuntimeException("Failed to create pixel buffer surface with size " + width + "x"
|
||||
+ height + ": 0x" + Integer.toHexString(egl.eglGetError()));
|
||||
}
|
||||
}
|
||||
|
||||
@ -272,8 +271,7 @@ public final class EglBase10 extends EglBase {
|
||||
private EGLConfig getEglConfig(EGLDisplay eglDisplay, int[] configAttributes) {
|
||||
EGLConfig[] configs = new EGLConfig[1];
|
||||
int[] numConfigs = new int[1];
|
||||
if (!egl.eglChooseConfig(
|
||||
eglDisplay, configAttributes, configs, configs.length, numConfigs)) {
|
||||
if (!egl.eglChooseConfig(eglDisplay, configAttributes, configs, configs.length, numConfigs)) {
|
||||
throw new RuntimeException(
|
||||
"eglChooseConfig failed: 0x" + Integer.toHexString(egl.eglGetError()));
|
||||
}
|
||||
|
||||
@ -37,8 +37,8 @@ public final class EglBase14 extends EglBase {
|
||||
// EGL 1.4 is supported from API 17. But EGLExt that is used for setting presentation
|
||||
// time stamp on a surface is supported from 18 so we require 18.
|
||||
public static boolean isEGL14Supported() {
|
||||
Logging.d(TAG, "SDK version: " + CURRENT_SDK_VERSION
|
||||
+ ". isEGL14Supported: " + (CURRENT_SDK_VERSION >= EGLExt_SDK_VERSION));
|
||||
Logging.d(TAG, "SDK version: " + CURRENT_SDK_VERSION + ". isEGL14Supported: "
|
||||
+ (CURRENT_SDK_VERSION >= EGLExt_SDK_VERSION));
|
||||
return (CURRENT_SDK_VERSION >= EGLExt_SDK_VERSION);
|
||||
}
|
||||
|
||||
@ -101,9 +101,8 @@ public final class EglBase14 extends EglBase {
|
||||
int[] surfaceAttribs = {EGL14.EGL_WIDTH, width, EGL14.EGL_HEIGHT, height, EGL14.EGL_NONE};
|
||||
eglSurface = EGL14.eglCreatePbufferSurface(eglDisplay, eglConfig, surfaceAttribs, 0);
|
||||
if (eglSurface == EGL14.EGL_NO_SURFACE) {
|
||||
throw new RuntimeException(
|
||||
"Failed to create pixel buffer surface with size " + width + "x" + height
|
||||
+ ": 0x" + Integer.toHexString(EGL14.eglGetError()));
|
||||
throw new RuntimeException("Failed to create pixel buffer surface with size " + width + "x"
|
||||
+ height + ": 0x" + Integer.toHexString(EGL14.eglGetError()));
|
||||
}
|
||||
}
|
||||
|
||||
@ -202,7 +201,8 @@ public final class EglBase14 extends EglBase {
|
||||
throw new RuntimeException("No EGLSurface - can't swap buffers");
|
||||
}
|
||||
synchronized (EglBase.lock) {
|
||||
// See https://android.googlesource.com/platform/frameworks/native/+/tools_r22.2/opengl/specs/EGL_ANDROID_presentation_time.txt
|
||||
// See
|
||||
// https://android.googlesource.com/platform/frameworks/native/+/tools_r22.2/opengl/specs/EGL_ANDROID_presentation_time.txt
|
||||
EGLExt.eglPresentationTimeANDROID(eglDisplay, eglSurface, timeStampNs);
|
||||
EGL14.eglSwapBuffers(eglDisplay, eglSurface);
|
||||
}
|
||||
|
||||
@ -25,6 +25,7 @@ import java.util.Map;
|
||||
* manually to free the resources held by this object.
|
||||
*/
|
||||
public class GlRectDrawer implements RendererCommon.GlDrawer {
|
||||
// clang-format off
|
||||
// Simple vertex shader, used for both YUV and OES.
|
||||
private static final String VERTEX_SHADER_STRING =
|
||||
"varying vec2 interp_tc;\n"
|
||||
@ -76,11 +77,11 @@ public class GlRectDrawer implements RendererCommon.GlDrawer {
|
||||
+ "void main() {\n"
|
||||
+ " gl_FragColor = texture2D(oes_tex, interp_tc);\n"
|
||||
+ "}\n";
|
||||
// clang-format on
|
||||
|
||||
// Vertex coordinates in Normalized Device Coordinates, i.e. (-1, -1) is bottom-left and (1, 1) is
|
||||
// top-right.
|
||||
private static final FloatBuffer FULL_RECTANGLE_BUF =
|
||||
GlUtil.createFloatBuffer(new float[] {
|
||||
private static final FloatBuffer FULL_RECTANGLE_BUF = GlUtil.createFloatBuffer(new float[] {
|
||||
-1.0f, -1.0f, // Bottom left.
|
||||
1.0f, -1.0f, // Bottom right.
|
||||
-1.0f, 1.0f, // Top left.
|
||||
@ -88,8 +89,7 @@ public class GlRectDrawer implements RendererCommon.GlDrawer {
|
||||
});
|
||||
|
||||
// Texture coordinates - (0, 0) is bottom-left and (1, 1) is top-right.
|
||||
private static final FloatBuffer FULL_RECTANGLE_TEX_BUF =
|
||||
GlUtil.createFloatBuffer(new float[] {
|
||||
private static final FloatBuffer FULL_RECTANGLE_TEX_BUF = GlUtil.createFloatBuffer(new float[] {
|
||||
0.0f, 0.0f, // Bottom left.
|
||||
1.0f, 0.0f, // Bottom right.
|
||||
0.0f, 1.0f, // Top left.
|
||||
|
||||
@ -25,13 +25,11 @@ public class GlShader {
|
||||
}
|
||||
GLES20.glShaderSource(shader, source);
|
||||
GLES20.glCompileShader(shader);
|
||||
int[] compileStatus = new int[] {
|
||||
GLES20.GL_FALSE
|
||||
};
|
||||
int[] compileStatus = new int[] {GLES20.GL_FALSE};
|
||||
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
|
||||
if (compileStatus[0] != GLES20.GL_TRUE) {
|
||||
Logging.e(TAG, "Could not compile shader " + shaderType + ":" +
|
||||
GLES20.glGetShaderInfoLog(shader));
|
||||
Logging.e(
|
||||
TAG, "Could not compile shader " + shaderType + ":" + GLES20.glGetShaderInfoLog(shader));
|
||||
throw new RuntimeException(GLES20.glGetShaderInfoLog(shader));
|
||||
}
|
||||
GlUtil.checkNoGLES2Error("compileShader");
|
||||
@ -50,13 +48,10 @@ public class GlShader {
|
||||
GLES20.glAttachShader(program, vertexShader);
|
||||
GLES20.glAttachShader(program, fragmentShader);
|
||||
GLES20.glLinkProgram(program);
|
||||
int[] linkStatus = new int[] {
|
||||
GLES20.GL_FALSE
|
||||
};
|
||||
int[] linkStatus = new int[] {GLES20.GL_FALSE};
|
||||
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
|
||||
if (linkStatus[0] != GLES20.GL_TRUE) {
|
||||
Logging.e(TAG, "Could not link program: " +
|
||||
GLES20.glGetProgramInfoLog(program));
|
||||
Logging.e(TAG, "Could not link program: " + GLES20.glGetProgramInfoLog(program));
|
||||
throw new RuntimeException(GLES20.glGetProgramInfoLog(program));
|
||||
}
|
||||
// According to the documentation of glLinkProgram():
|
||||
|
||||
@ -52,8 +52,8 @@ public class GlTextureFrameBuffer {
|
||||
GlUtil.checkNoGLES2Error("Generate framebuffer");
|
||||
|
||||
// Attach the texture to the framebuffer as color attachment.
|
||||
GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0,
|
||||
GLES20.GL_TEXTURE_2D, textureId, 0);
|
||||
GLES20.glFramebufferTexture2D(
|
||||
GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, textureId, 0);
|
||||
GlUtil.checkNoGLES2Error("Attach texture to framebuffer");
|
||||
|
||||
// Restore normal framebuffer.
|
||||
|
||||
@ -42,11 +42,7 @@ public class MediaCodecVideoDecoder {
|
||||
private static final long MAX_DECODE_TIME_MS = 200;
|
||||
|
||||
// Tracks webrtc::VideoCodecType.
|
||||
public enum VideoCodecType {
|
||||
VIDEO_CODEC_VP8,
|
||||
VIDEO_CODEC_VP9,
|
||||
VIDEO_CODEC_H264
|
||||
}
|
||||
public enum VideoCodecType { VIDEO_CODEC_VP8, VIDEO_CODEC_VP9, VIDEO_CODEC_H264 }
|
||||
|
||||
// Timeout for input buffer dequeue.
|
||||
private static final int DEQUEUE_INPUT_TIMEOUT = 500000;
|
||||
@ -70,14 +66,13 @@ public class MediaCodecVideoDecoder {
|
||||
private static final String VP9_MIME_TYPE = "video/x-vnd.on2.vp9";
|
||||
private static final String H264_MIME_TYPE = "video/avc";
|
||||
// List of supported HW VP8 decoders.
|
||||
private static final String[] supportedVp8HwCodecPrefixes =
|
||||
{"OMX.qcom.", "OMX.Nvidia.", "OMX.Exynos.", "OMX.Intel." };
|
||||
private static final String[] supportedVp8HwCodecPrefixes = {
|
||||
"OMX.qcom.", "OMX.Nvidia.", "OMX.Exynos.", "OMX.Intel."};
|
||||
// List of supported HW VP9 decoders.
|
||||
private static final String[] supportedVp9HwCodecPrefixes =
|
||||
{"OMX.qcom.", "OMX.Exynos." };
|
||||
private static final String[] supportedVp9HwCodecPrefixes = {"OMX.qcom.", "OMX.Exynos."};
|
||||
// List of supported HW H.264 decoders.
|
||||
private static final String[] supportedH264HwCodecPrefixes =
|
||||
{"OMX.qcom.", "OMX.Intel.", "OMX.Exynos." };
|
||||
private static final String[] supportedH264HwCodecPrefixes = {
|
||||
"OMX.qcom.", "OMX.Intel.", "OMX.Exynos."};
|
||||
|
||||
// NV12 color format supported by QCOM codec, but not declared in MediaCodec -
|
||||
// see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h
|
||||
@ -87,11 +82,9 @@ public class MediaCodecVideoDecoder {
|
||||
private static final int COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04;
|
||||
// Allowable color formats supported by codec - in order of preference.
|
||||
private static final List<Integer> supportedColorList = Arrays.asList(
|
||||
CodecCapabilities.COLOR_FormatYUV420Planar,
|
||||
CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
|
||||
CodecCapabilities.COLOR_FormatYUV420Planar, CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
|
||||
CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
|
||||
COLOR_QCOM_FORMATYVU420PackedSemiPlanar32m4ka,
|
||||
COLOR_QCOM_FORMATYVU420PackedSemiPlanar16m4ka,
|
||||
COLOR_QCOM_FORMATYVU420PackedSemiPlanar32m4ka, COLOR_QCOM_FORMATYVU420PackedSemiPlanar16m4ka,
|
||||
COLOR_QCOM_FORMATYVU420PackedSemiPlanar64x32Tile2m8ka,
|
||||
COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m);
|
||||
|
||||
@ -108,8 +101,8 @@ public class MediaCodecVideoDecoder {
|
||||
private TextureListener textureListener;
|
||||
private int droppedFrames;
|
||||
private Surface surface = null;
|
||||
private final Queue<DecodedOutputBuffer>
|
||||
dequeuedSurfaceOutputBuffers = new LinkedList<DecodedOutputBuffer>();
|
||||
private final Queue<DecodedOutputBuffer> dequeuedSurfaceOutputBuffers =
|
||||
new LinkedList<DecodedOutputBuffer>();
|
||||
|
||||
// MediaCodec error handler - invoked when critical error happens which may prevent
|
||||
// further use of media codec API. Now it means that one of media codec instances
|
||||
@ -142,18 +135,18 @@ public class MediaCodecVideoDecoder {
|
||||
|
||||
// Functions to query if HW decoding is supported.
|
||||
public static boolean isVp8HwSupported() {
|
||||
return !hwDecoderDisabledTypes.contains(VP8_MIME_TYPE) &&
|
||||
(findDecoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes) != null);
|
||||
return !hwDecoderDisabledTypes.contains(VP8_MIME_TYPE)
|
||||
&& (findDecoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes) != null);
|
||||
}
|
||||
|
||||
public static boolean isVp9HwSupported() {
|
||||
return !hwDecoderDisabledTypes.contains(VP9_MIME_TYPE) &&
|
||||
(findDecoder(VP9_MIME_TYPE, supportedVp9HwCodecPrefixes) != null);
|
||||
return !hwDecoderDisabledTypes.contains(VP9_MIME_TYPE)
|
||||
&& (findDecoder(VP9_MIME_TYPE, supportedVp9HwCodecPrefixes) != null);
|
||||
}
|
||||
|
||||
public static boolean isH264HwSupported() {
|
||||
return !hwDecoderDisabledTypes.contains(H264_MIME_TYPE) &&
|
||||
(findDecoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes) != null);
|
||||
return !hwDecoderDisabledTypes.contains(H264_MIME_TYPE)
|
||||
&& (findDecoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes) != null);
|
||||
}
|
||||
|
||||
public static void printStackTrace() {
|
||||
@ -178,8 +171,7 @@ public class MediaCodecVideoDecoder {
|
||||
public final int colorFormat; // Color format supported by codec.
|
||||
}
|
||||
|
||||
private static DecoderProperties findDecoder(
|
||||
String mime, String[] supportedCodecPrefixes) {
|
||||
private static DecoderProperties findDecoder(String mime, String[] supportedCodecPrefixes) {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
|
||||
return null; // MediaCodec.setParameters is missing.
|
||||
}
|
||||
@ -233,8 +225,8 @@ public class MediaCodecVideoDecoder {
|
||||
for (int codecColorFormat : capabilities.colorFormats) {
|
||||
if (codecColorFormat == supportedColorFormat) {
|
||||
// Found supported HW decoder.
|
||||
Logging.d(TAG, "Found target decoder " + name +
|
||||
". Color: 0x" + Integer.toHexString(codecColorFormat));
|
||||
Logging.d(TAG, "Found target decoder " + name + ". Color: 0x"
|
||||
+ Integer.toHexString(codecColorFormat));
|
||||
return new DecoderProperties(name, codecColorFormat);
|
||||
}
|
||||
}
|
||||
@ -246,16 +238,14 @@ public class MediaCodecVideoDecoder {
|
||||
|
||||
private void checkOnMediaCodecThread() throws IllegalStateException {
|
||||
if (mediaCodecThread.getId() != Thread.currentThread().getId()) {
|
||||
throw new IllegalStateException(
|
||||
"MediaCodecVideoDecoder previously operated on " + mediaCodecThread +
|
||||
" but is now called on " + Thread.currentThread());
|
||||
throw new IllegalStateException("MediaCodecVideoDecoder previously operated on "
|
||||
+ mediaCodecThread + " but is now called on " + Thread.currentThread());
|
||||
}
|
||||
}
|
||||
|
||||
// Pass null in |surfaceTextureHelper| to configure the codec for ByteBuffer output.
|
||||
private boolean initDecode(
|
||||
VideoCodecType type, int width, int height,
|
||||
SurfaceTextureHelper surfaceTextureHelper) {
|
||||
VideoCodecType type, int width, int height, SurfaceTextureHelper surfaceTextureHelper) {
|
||||
if (mediaCodecThread != null) {
|
||||
throw new RuntimeException("initDecode: Forgot to release()?");
|
||||
}
|
||||
@ -280,9 +270,8 @@ public class MediaCodecVideoDecoder {
|
||||
throw new RuntimeException("Cannot find HW decoder for " + type);
|
||||
}
|
||||
|
||||
Logging.d(TAG, "Java initDecode: " + type + " : "+ width + " x " + height +
|
||||
". Color: 0x" + Integer.toHexString(properties.colorFormat) +
|
||||
". Use Surface: " + useSurface);
|
||||
Logging.d(TAG, "Java initDecode: " + type + " : " + width + " x " + height + ". Color: 0x"
|
||||
+ Integer.toHexString(properties.colorFormat) + ". Use Surface: " + useSurface);
|
||||
|
||||
runningInstance = this; // Decoder is now running and can be queried for stack traces.
|
||||
mediaCodecThread = Thread.currentThread();
|
||||
@ -317,8 +306,8 @@ public class MediaCodecVideoDecoder {
|
||||
hasDecodedFirstFrame = false;
|
||||
dequeuedSurfaceOutputBuffers.clear();
|
||||
droppedFrames = 0;
|
||||
Logging.d(TAG, "Input buffers: " + inputBuffers.length +
|
||||
". Output buffers: " + outputBuffers.length);
|
||||
Logging.d(TAG,
|
||||
"Input buffers: " + inputBuffers.length + ". Output buffers: " + outputBuffers.length);
|
||||
return true;
|
||||
} catch (IllegalStateException e) {
|
||||
Logging.e(TAG, "initDecode failed", e);
|
||||
@ -406,12 +395,11 @@ public class MediaCodecVideoDecoder {
|
||||
try {
|
||||
inputBuffers[inputBufferIndex].position(0);
|
||||
inputBuffers[inputBufferIndex].limit(size);
|
||||
decodeStartTimeMs.add(new TimeStamps(SystemClock.elapsedRealtime(), timeStampMs,
|
||||
ntpTimeStamp));
|
||||
decodeStartTimeMs.add(
|
||||
new TimeStamps(SystemClock.elapsedRealtime(), timeStampMs, ntpTimeStamp));
|
||||
mediaCodec.queueInputBuffer(inputBufferIndex, 0, size, presentationTimeStamUs, 0);
|
||||
return true;
|
||||
}
|
||||
catch (IllegalStateException e) {
|
||||
} catch (IllegalStateException e) {
|
||||
Logging.e(TAG, "decode failed", e);
|
||||
return false;
|
||||
}
|
||||
@ -511,8 +499,7 @@ public class MediaCodecVideoDecoder {
|
||||
|
||||
public void addBufferToRender(DecodedOutputBuffer buffer) {
|
||||
if (bufferToRender != null) {
|
||||
Logging.e(TAG,
|
||||
"Unexpected addBufferToRender() called while waiting for a texture.");
|
||||
Logging.e(TAG, "Unexpected addBufferToRender() called while waiting for a texture.");
|
||||
throw new IllegalStateException("Waiting for a texture.");
|
||||
}
|
||||
bufferToRender = buffer;
|
||||
@ -530,8 +517,8 @@ public class MediaCodecVideoDecoder {
|
||||
int oesTextureId, float[] transformMatrix, long timestampNs) {
|
||||
synchronized (newFrameLock) {
|
||||
if (renderedBuffer != null) {
|
||||
Logging.e(TAG,
|
||||
"Unexpected onTextureFrameAvailable() called while already holding a texture.");
|
||||
Logging.e(
|
||||
TAG, "Unexpected onTextureFrameAvailable() called while already holding a texture.");
|
||||
throw new IllegalStateException("Already holding a texture.");
|
||||
}
|
||||
// |timestampNs| is always zero on some Android versions.
|
||||
@ -550,7 +537,7 @@ public class MediaCodecVideoDecoder {
|
||||
if (renderedBuffer == null && timeoutMs > 0 && isWaitingForTexture()) {
|
||||
try {
|
||||
newFrameLock.wait(timeoutMs);
|
||||
} catch(InterruptedException e) {
|
||||
} catch (InterruptedException e) {
|
||||
// Restore the interrupted status by reinterrupting the thread.
|
||||
Thread.currentThread().interrupt();
|
||||
}
|
||||
@ -588,8 +575,8 @@ public class MediaCodecVideoDecoder {
|
||||
// MediaCodec.INFO_TRY_AGAIN_LATER.
|
||||
final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
|
||||
while (true) {
|
||||
final int result = mediaCodec.dequeueOutputBuffer(
|
||||
info, TimeUnit.MILLISECONDS.toMicros(dequeueTimeoutMs));
|
||||
final int result =
|
||||
mediaCodec.dequeueOutputBuffer(info, TimeUnit.MILLISECONDS.toMicros(dequeueTimeoutMs));
|
||||
switch (result) {
|
||||
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
|
||||
outputBuffers = mediaCodec.getOutputBuffers();
|
||||
@ -604,8 +591,8 @@ public class MediaCodecVideoDecoder {
|
||||
int new_width = format.getInteger(MediaFormat.KEY_WIDTH);
|
||||
int new_height = format.getInteger(MediaFormat.KEY_HEIGHT);
|
||||
if (hasDecodedFirstFrame && (new_width != width || new_height != height)) {
|
||||
throw new RuntimeException("Unexpected size change. Configured " + width + "*" +
|
||||
height + ". New " + new_width + "*" + new_height);
|
||||
throw new RuntimeException("Unexpected size change. Configured " + width + "*" + height
|
||||
+ ". New " + new_width + "*" + new_height);
|
||||
}
|
||||
width = format.getInteger(MediaFormat.KEY_WIDTH);
|
||||
height = format.getInteger(MediaFormat.KEY_HEIGHT);
|
||||
@ -639,14 +626,9 @@ public class MediaCodecVideoDecoder {
|
||||
+ ". Might be caused by resuming H264 decoding after a pause.");
|
||||
decodeTimeMs = MAX_DECODE_TIME_MS;
|
||||
}
|
||||
return new DecodedOutputBuffer(result,
|
||||
info.offset,
|
||||
info.size,
|
||||
TimeUnit.MICROSECONDS.toMillis(info.presentationTimeUs),
|
||||
timeStamps.timeStampMs,
|
||||
timeStamps.ntpTimeStampMs,
|
||||
decodeTimeMs,
|
||||
SystemClock.elapsedRealtime());
|
||||
return new DecodedOutputBuffer(result, info.offset, info.size,
|
||||
TimeUnit.MICROSECONDS.toMillis(info.presentationTimeUs), timeStamps.timeStampMs,
|
||||
timeStamps.ntpTimeStampMs, decodeTimeMs, SystemClock.elapsedRealtime());
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -686,18 +668,17 @@ public class MediaCodecVideoDecoder {
|
||||
// TODO(perkj): Re-add the below log when VideoRenderGUI has been removed or fixed to
|
||||
// return the one and only texture even if it does not render.
|
||||
Logging.w(TAG, "Draining decoder. Dropping frame with TS: "
|
||||
+ droppedFrame.presentationTimeStampMs +
|
||||
". Total number of dropped frames: " + droppedFrames);
|
||||
+ droppedFrame.presentationTimeStampMs + ". Total number of dropped frames: "
|
||||
+ droppedFrames);
|
||||
} else {
|
||||
Logging.w(TAG, "Too many output buffers " + dequeuedSurfaceOutputBuffers.size() +
|
||||
". Dropping frame with TS: " + droppedFrame.presentationTimeStampMs +
|
||||
". Total number of dropped frames: " + droppedFrames);
|
||||
Logging.w(TAG, "Too many output buffers " + dequeuedSurfaceOutputBuffers.size()
|
||||
+ ". Dropping frame with TS: " + droppedFrame.presentationTimeStampMs
|
||||
+ ". Total number of dropped frames: " + droppedFrames);
|
||||
}
|
||||
|
||||
mediaCodec.releaseOutputBuffer(droppedFrame.index, false /* render */);
|
||||
return new DecodedTextureBuffer(0, null,
|
||||
droppedFrame.presentationTimeStampMs, droppedFrame.timeStampMs,
|
||||
droppedFrame.ntpTimeStampMs, droppedFrame.decodeTimeMs,
|
||||
return new DecodedTextureBuffer(0, null, droppedFrame.presentationTimeStampMs,
|
||||
droppedFrame.timeStampMs, droppedFrame.ntpTimeStampMs, droppedFrame.decodeTimeMs,
|
||||
SystemClock.elapsedRealtime() - droppedFrame.endDecodeTimeMs);
|
||||
}
|
||||
return null;
|
||||
|
||||
@ -42,11 +42,7 @@ public class MediaCodecVideoEncoder {
|
||||
private static final String TAG = "MediaCodecVideoEncoder";
|
||||
|
||||
// Tracks webrtc::VideoCodecType.
|
||||
public enum VideoCodecType {
|
||||
VIDEO_CODEC_VP8,
|
||||
VIDEO_CODEC_VP9,
|
||||
VIDEO_CODEC_H264
|
||||
}
|
||||
public enum VideoCodecType { VIDEO_CODEC_VP8, VIDEO_CODEC_VP9, VIDEO_CODEC_H264 }
|
||||
|
||||
private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000; // Timeout for codec releasing.
|
||||
private static final int DEQUEUE_TIMEOUT = 0; // Non-blocking, no wait.
|
||||
@ -115,54 +111,43 @@ public class MediaCodecVideoEncoder {
|
||||
"OMX.qcom.", Build.VERSION_CODES.KITKAT, BitrateAdjustmentType.NO_ADJUSTMENT);
|
||||
private static final MediaCodecProperties exynosVp8HwProperties = new MediaCodecProperties(
|
||||
"OMX.Exynos.", Build.VERSION_CODES.M, BitrateAdjustmentType.DYNAMIC_ADJUSTMENT);
|
||||
private static final MediaCodecProperties[] vp8HwList = new MediaCodecProperties[] {
|
||||
qcomVp8HwProperties, exynosVp8HwProperties
|
||||
};
|
||||
private static final MediaCodecProperties[] vp8HwList =
|
||||
new MediaCodecProperties[] {qcomVp8HwProperties, exynosVp8HwProperties};
|
||||
|
||||
// List of supported HW VP9 encoders.
|
||||
private static final MediaCodecProperties qcomVp9HwProperties = new MediaCodecProperties(
|
||||
"OMX.qcom.", Build.VERSION_CODES.M, BitrateAdjustmentType.NO_ADJUSTMENT);
|
||||
private static final MediaCodecProperties exynosVp9HwProperties = new MediaCodecProperties(
|
||||
"OMX.Exynos.", Build.VERSION_CODES.M, BitrateAdjustmentType.NO_ADJUSTMENT);
|
||||
private static final MediaCodecProperties[] vp9HwList = new MediaCodecProperties[] {
|
||||
qcomVp9HwProperties, exynosVp9HwProperties
|
||||
};
|
||||
private static final MediaCodecProperties[] vp9HwList =
|
||||
new MediaCodecProperties[] {qcomVp9HwProperties, exynosVp9HwProperties};
|
||||
|
||||
// List of supported HW H.264 encoders.
|
||||
private static final MediaCodecProperties qcomH264HwProperties = new MediaCodecProperties(
|
||||
"OMX.qcom.", Build.VERSION_CODES.KITKAT, BitrateAdjustmentType.NO_ADJUSTMENT);
|
||||
private static final MediaCodecProperties exynosH264HwProperties = new MediaCodecProperties(
|
||||
"OMX.Exynos.", Build.VERSION_CODES.LOLLIPOP, BitrateAdjustmentType.FRAMERATE_ADJUSTMENT);
|
||||
private static final MediaCodecProperties[] h264HwList = new MediaCodecProperties[] {
|
||||
qcomH264HwProperties, exynosH264HwProperties
|
||||
};
|
||||
private static final MediaCodecProperties[] h264HwList =
|
||||
new MediaCodecProperties[] {qcomH264HwProperties, exynosH264HwProperties};
|
||||
|
||||
// List of devices with poor H.264 encoder quality.
|
||||
private static final String[] H264_HW_EXCEPTION_MODELS = new String[] {
|
||||
// HW H.264 encoder on below devices has poor bitrate control - actual
|
||||
// bitrates deviates a lot from the target value.
|
||||
"SAMSUNG-SGH-I337",
|
||||
"Nexus 7",
|
||||
"Nexus 4"
|
||||
};
|
||||
private static final String[] H264_HW_EXCEPTION_MODELS =
|
||||
new String[] {"SAMSUNG-SGH-I337", "Nexus 7", "Nexus 4"};
|
||||
|
||||
// Bitrate modes - should be in sync with OMX_VIDEO_CONTROLRATETYPE defined
|
||||
// in OMX_Video.h
|
||||
private static final int VIDEO_ControlRateConstant = 2;
|
||||
// NV12 color format supported by QCOM codec, but not declared in MediaCodec -
|
||||
// see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h
|
||||
private static final int
|
||||
COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04;
|
||||
private static final int COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04;
|
||||
// Allowable color formats supported by codec - in order of preference.
|
||||
private static final int[] supportedColorList = {
|
||||
CodecCapabilities.COLOR_FormatYUV420Planar,
|
||||
private static final int[] supportedColorList = {CodecCapabilities.COLOR_FormatYUV420Planar,
|
||||
CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
|
||||
CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
|
||||
COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m
|
||||
};
|
||||
private static final int[] supportedSurfaceColorList = {
|
||||
CodecCapabilities.COLOR_FormatSurface
|
||||
};
|
||||
COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m};
|
||||
private static final int[] supportedSurfaceColorList = {CodecCapabilities.COLOR_FormatSurface};
|
||||
private VideoCodecType type;
|
||||
private int colorFormat; // Used by native code.
|
||||
|
||||
@ -209,33 +194,33 @@ public class MediaCodecVideoEncoder {
|
||||
|
||||
// Functions to query if HW encoding is supported.
|
||||
public static boolean isVp8HwSupported() {
|
||||
return !hwEncoderDisabledTypes.contains(VP8_MIME_TYPE) &&
|
||||
(findHwEncoder(VP8_MIME_TYPE, vp8HwList, supportedColorList) != null);
|
||||
return !hwEncoderDisabledTypes.contains(VP8_MIME_TYPE)
|
||||
&& (findHwEncoder(VP8_MIME_TYPE, vp8HwList, supportedColorList) != null);
|
||||
}
|
||||
|
||||
public static boolean isVp9HwSupported() {
|
||||
return !hwEncoderDisabledTypes.contains(VP9_MIME_TYPE) &&
|
||||
(findHwEncoder(VP9_MIME_TYPE, vp9HwList, supportedColorList) != null);
|
||||
return !hwEncoderDisabledTypes.contains(VP9_MIME_TYPE)
|
||||
&& (findHwEncoder(VP9_MIME_TYPE, vp9HwList, supportedColorList) != null);
|
||||
}
|
||||
|
||||
public static boolean isH264HwSupported() {
|
||||
return !hwEncoderDisabledTypes.contains(H264_MIME_TYPE) &&
|
||||
(findHwEncoder(H264_MIME_TYPE, h264HwList, supportedColorList) != null);
|
||||
return !hwEncoderDisabledTypes.contains(H264_MIME_TYPE)
|
||||
&& (findHwEncoder(H264_MIME_TYPE, h264HwList, supportedColorList) != null);
|
||||
}
|
||||
|
||||
public static boolean isVp8HwSupportedUsingTextures() {
|
||||
return !hwEncoderDisabledTypes.contains(VP8_MIME_TYPE) &&
|
||||
(findHwEncoder(VP8_MIME_TYPE, vp8HwList, supportedSurfaceColorList) != null);
|
||||
return !hwEncoderDisabledTypes.contains(VP8_MIME_TYPE)
|
||||
&& (findHwEncoder(VP8_MIME_TYPE, vp8HwList, supportedSurfaceColorList) != null);
|
||||
}
|
||||
|
||||
public static boolean isVp9HwSupportedUsingTextures() {
|
||||
return !hwEncoderDisabledTypes.contains(VP9_MIME_TYPE) &&
|
||||
(findHwEncoder(VP9_MIME_TYPE, vp9HwList, supportedSurfaceColorList) != null);
|
||||
return !hwEncoderDisabledTypes.contains(VP9_MIME_TYPE)
|
||||
&& (findHwEncoder(VP9_MIME_TYPE, vp9HwList, supportedSurfaceColorList) != null);
|
||||
}
|
||||
|
||||
public static boolean isH264HwSupportedUsingTextures() {
|
||||
return !hwEncoderDisabledTypes.contains(H264_MIME_TYPE) &&
|
||||
(findHwEncoder(H264_MIME_TYPE, h264HwList, supportedSurfaceColorList) != null);
|
||||
return !hwEncoderDisabledTypes.contains(H264_MIME_TYPE)
|
||||
&& (findHwEncoder(H264_MIME_TYPE, h264HwList, supportedSurfaceColorList) != null);
|
||||
}
|
||||
|
||||
// Helper struct for findHwEncoder() below.
|
||||
@ -296,14 +281,14 @@ public class MediaCodecVideoEncoder {
|
||||
for (MediaCodecProperties codecProperties : supportedHwCodecProperties) {
|
||||
if (name.startsWith(codecProperties.codecPrefix)) {
|
||||
if (Build.VERSION.SDK_INT < codecProperties.minSdk) {
|
||||
Logging.w(TAG, "Codec " + name + " is disabled due to SDK version " +
|
||||
Build.VERSION.SDK_INT);
|
||||
Logging.w(
|
||||
TAG, "Codec " + name + " is disabled due to SDK version " + Build.VERSION.SDK_INT);
|
||||
continue;
|
||||
}
|
||||
if (codecProperties.bitrateAdjustmentType != BitrateAdjustmentType.NO_ADJUSTMENT) {
|
||||
bitrateAdjustmentType = codecProperties.bitrateAdjustmentType;
|
||||
Logging.w(TAG, "Codec " + name
|
||||
+ " requires bitrate adjustment: " + bitrateAdjustmentType);
|
||||
Logging.w(
|
||||
TAG, "Codec " + name + " requires bitrate adjustment: " + bitrateAdjustmentType);
|
||||
}
|
||||
supportedCodec = true;
|
||||
break;
|
||||
@ -329,9 +314,9 @@ public class MediaCodecVideoEncoder {
|
||||
for (int codecColorFormat : capabilities.colorFormats) {
|
||||
if (codecColorFormat == supportedColorFormat) {
|
||||
// Found supported HW encoder.
|
||||
Logging.d(TAG, "Found target encoder for mime " + mime + " : " + name
|
||||
+ ". Color: 0x" + Integer.toHexString(codecColorFormat)
|
||||
+ ". Bitrate adjustment: " + bitrateAdjustmentType);
|
||||
Logging.d(TAG, "Found target encoder for mime " + mime + " : " + name + ". Color: 0x"
|
||||
+ Integer.toHexString(codecColorFormat) + ". Bitrate adjustment: "
|
||||
+ bitrateAdjustmentType);
|
||||
return new EncoderProperties(name, codecColorFormat, bitrateAdjustmentType);
|
||||
}
|
||||
}
|
||||
@ -342,9 +327,8 @@ public class MediaCodecVideoEncoder {
|
||||
|
||||
private void checkOnMediaCodecThread() {
|
||||
if (mediaCodecThread.getId() != Thread.currentThread().getId()) {
|
||||
throw new RuntimeException(
|
||||
"MediaCodecVideoEncoder previously operated on " + mediaCodecThread +
|
||||
" but is now called on " + Thread.currentThread());
|
||||
throw new RuntimeException("MediaCodecVideoEncoder previously operated on " + mediaCodecThread
|
||||
+ " but is now called on " + Thread.currentThread());
|
||||
}
|
||||
}
|
||||
|
||||
@ -373,8 +357,8 @@ public class MediaCodecVideoEncoder {
|
||||
boolean initEncode(VideoCodecType type, int width, int height, int kbps, int fps,
|
||||
EglBase14.Context sharedContext) {
|
||||
final boolean useSurface = sharedContext != null;
|
||||
Logging.d(TAG, "Java initEncode: " + type + " : " + width + " x " + height +
|
||||
". @ " + kbps + " kbps. Fps: " + fps + ". Encode from texture : " + useSurface);
|
||||
Logging.d(TAG, "Java initEncode: " + type + " : " + width + " x " + height + ". @ " + kbps
|
||||
+ " kbps. Fps: " + fps + ". Encode from texture : " + useSurface);
|
||||
|
||||
this.width = width;
|
||||
this.height = height;
|
||||
@ -411,9 +395,8 @@ public class MediaCodecVideoEncoder {
|
||||
} else {
|
||||
fps = Math.min(fps, MAXIMUM_INITIAL_FPS);
|
||||
}
|
||||
Logging.d(TAG, "Color format: " + colorFormat +
|
||||
". Bitrate adjustment: " + bitrateAdjustmentType +
|
||||
". Initial fps: " + fps);
|
||||
Logging.d(TAG, "Color format: " + colorFormat + ". Bitrate adjustment: " + bitrateAdjustmentType
|
||||
+ ". Initial fps: " + fps);
|
||||
targetBitrateBps = 1000 * kbps;
|
||||
targetFps = fps;
|
||||
bitrateAccumulatorMax = targetBitrateBps / 8.0;
|
||||
@ -436,8 +419,7 @@ public class MediaCodecVideoEncoder {
|
||||
Logging.e(TAG, "Can not create media encoder");
|
||||
return false;
|
||||
}
|
||||
mediaCodec.configure(
|
||||
format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
|
||||
mediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
|
||||
|
||||
if (useSurface) {
|
||||
eglBase = new EglBase14(sharedContext, EglBase.CONFIG_RECORDABLE);
|
||||
@ -464,8 +446,7 @@ public class MediaCodecVideoEncoder {
|
||||
}
|
||||
|
||||
boolean encodeBuffer(
|
||||
boolean isKeyframe, int inputBuffer, int size,
|
||||
long presentationTimestampUs) {
|
||||
boolean isKeyframe, int inputBuffer, int size, long presentationTimestampUs) {
|
||||
checkOnMediaCodecThread();
|
||||
try {
|
||||
if (isKeyframe) {
|
||||
@ -478,11 +459,9 @@ public class MediaCodecVideoEncoder {
|
||||
b.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
|
||||
mediaCodec.setParameters(b);
|
||||
}
|
||||
mediaCodec.queueInputBuffer(
|
||||
inputBuffer, 0, size, presentationTimestampUs, 0);
|
||||
mediaCodec.queueInputBuffer(inputBuffer, 0, size, presentationTimestampUs, 0);
|
||||
return true;
|
||||
}
|
||||
catch (IllegalStateException e) {
|
||||
} catch (IllegalStateException e) {
|
||||
Logging.e(TAG, "encodeBuffer failed", e);
|
||||
return false;
|
||||
}
|
||||
@ -505,8 +484,7 @@ public class MediaCodecVideoEncoder {
|
||||
drawer.drawOes(oesTextureId, transformationMatrix, width, height, 0, 0, width, height);
|
||||
eglBase.swapBuffers(TimeUnit.MICROSECONDS.toNanos(presentationTimestampUs));
|
||||
return true;
|
||||
}
|
||||
catch (RuntimeException e) {
|
||||
} catch (RuntimeException e) {
|
||||
Logging.e(TAG, "encodeTexture failed", e);
|
||||
return false;
|
||||
}
|
||||
@ -580,13 +558,13 @@ public class MediaCodecVideoEncoder {
|
||||
// Adjust actual encoder bitrate based on bitrate adjustment type.
|
||||
if (bitrateAdjustmentType == BitrateAdjustmentType.FRAMERATE_ADJUSTMENT && targetFps > 0) {
|
||||
codecBitrateBps = BITRATE_ADJUSTMENT_FPS * targetBitrateBps / targetFps;
|
||||
Logging.v(TAG, "setRates: " + kbps + " -> " + (codecBitrateBps / 1000)
|
||||
+ " kbps. Fps: " + targetFps);
|
||||
Logging.v(TAG,
|
||||
"setRates: " + kbps + " -> " + (codecBitrateBps / 1000) + " kbps. Fps: " + targetFps);
|
||||
} else if (bitrateAdjustmentType == BitrateAdjustmentType.DYNAMIC_ADJUSTMENT) {
|
||||
Logging.v(TAG, "setRates: " + kbps + " kbps. Fps: " + targetFps
|
||||
+ ". ExpScale: " + bitrateAdjustmentScaleExp);
|
||||
Logging.v(TAG, "setRates: " + kbps + " kbps. Fps: " + targetFps + ". ExpScale: "
|
||||
+ bitrateAdjustmentScaleExp);
|
||||
if (bitrateAdjustmentScaleExp != 0) {
|
||||
codecBitrateBps = (int)(codecBitrateBps * getBitrateScale(bitrateAdjustmentScaleExp));
|
||||
codecBitrateBps = (int) (codecBitrateBps * getBitrateScale(bitrateAdjustmentScaleExp));
|
||||
}
|
||||
} else {
|
||||
Logging.v(TAG, "setRates: " + kbps + " kbps. Fps: " + targetFps);
|
||||
@ -618,8 +596,7 @@ public class MediaCodecVideoEncoder {
|
||||
// Helper struct for dequeueOutputBuffer() below.
|
||||
static class OutputBufferInfo {
|
||||
public OutputBufferInfo(
|
||||
int index, ByteBuffer buffer,
|
||||
boolean isKeyFrame, long presentationTimestampUs) {
|
||||
int index, ByteBuffer buffer, boolean isKeyFrame, long presentationTimestampUs) {
|
||||
this.index = index;
|
||||
this.buffer = buffer;
|
||||
this.isKeyFrame = isKeyFrame;
|
||||
@ -641,11 +618,9 @@ public class MediaCodecVideoEncoder {
|
||||
int result = mediaCodec.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT);
|
||||
// Check if this is config frame and save configuration data.
|
||||
if (result >= 0) {
|
||||
boolean isConfigFrame =
|
||||
(info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0;
|
||||
boolean isConfigFrame = (info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0;
|
||||
if (isConfigFrame) {
|
||||
Logging.d(TAG, "Config frame generated. Offset: " + info.offset +
|
||||
". Size: " + info.size);
|
||||
Logging.d(TAG, "Config frame generated. Offset: " + info.offset + ". Size: " + info.size);
|
||||
configData = ByteBuffer.allocateDirect(info.size);
|
||||
outputBuffers[result].position(info.offset);
|
||||
outputBuffers[result].limit(info.offset + info.size);
|
||||
@ -666,27 +641,23 @@ public class MediaCodecVideoEncoder {
|
||||
reportEncodedFrame(info.size);
|
||||
|
||||
// Check key frame flag.
|
||||
boolean isKeyFrame =
|
||||
(info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
|
||||
boolean isKeyFrame = (info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
|
||||
if (isKeyFrame) {
|
||||
Logging.d(TAG, "Sync frame generated");
|
||||
}
|
||||
if (isKeyFrame && type == VideoCodecType.VIDEO_CODEC_H264) {
|
||||
Logging.d(TAG, "Appending config frame of size " + configData.capacity() +
|
||||
" to output buffer with offset " + info.offset + ", size " +
|
||||
info.size);
|
||||
Logging.d(TAG, "Appending config frame of size " + configData.capacity()
|
||||
+ " to output buffer with offset " + info.offset + ", size " + info.size);
|
||||
// For H.264 key frame append SPS and PPS NALs at the start
|
||||
ByteBuffer keyFrameBuffer = ByteBuffer.allocateDirect(
|
||||
configData.capacity() + info.size);
|
||||
ByteBuffer keyFrameBuffer = ByteBuffer.allocateDirect(configData.capacity() + info.size);
|
||||
configData.rewind();
|
||||
keyFrameBuffer.put(configData);
|
||||
keyFrameBuffer.put(outputBuffer);
|
||||
keyFrameBuffer.position(0);
|
||||
return new OutputBufferInfo(result, keyFrameBuffer,
|
||||
isKeyFrame, info.presentationTimeUs);
|
||||
return new OutputBufferInfo(result, keyFrameBuffer, isKeyFrame, info.presentationTimeUs);
|
||||
} else {
|
||||
return new OutputBufferInfo(result, outputBuffer.slice(),
|
||||
isKeyFrame, info.presentationTimeUs);
|
||||
return new OutputBufferInfo(
|
||||
result, outputBuffer.slice(), isKeyFrame, info.presentationTimeUs);
|
||||
}
|
||||
} else if (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
|
||||
outputBuffers = mediaCodec.getOutputBuffers();
|
||||
@ -705,7 +676,7 @@ public class MediaCodecVideoEncoder {
|
||||
|
||||
private double getBitrateScale(int bitrateAdjustmentScaleExp) {
|
||||
return Math.pow(BITRATE_CORRECTION_MAX_SCALE,
|
||||
(double)bitrateAdjustmentScaleExp / BITRATE_CORRECTION_STEPS);
|
||||
(double) bitrateAdjustmentScaleExp / BITRATE_CORRECTION_STEPS);
|
||||
}
|
||||
|
||||
private void reportEncodedFrame(int size) {
|
||||
@ -727,8 +698,7 @@ public class MediaCodecVideoEncoder {
|
||||
// Do bitrate adjustment every 3 seconds if actual encoder bitrate deviates too much
|
||||
// form the target value.
|
||||
if (bitrateObservationTimeMs > 1000 * BITRATE_CORRECTION_SEC) {
|
||||
Logging.d(TAG, "Acc: " + (int)bitrateAccumulator
|
||||
+ ". Max: " + (int)bitrateAccumulatorMax
|
||||
Logging.d(TAG, "Acc: " + (int) bitrateAccumulator + ". Max: " + (int) bitrateAccumulatorMax
|
||||
+ ". ExpScale: " + bitrateAdjustmentScaleExp);
|
||||
boolean bitrateAdjustmentScaleChanged = false;
|
||||
if (bitrateAccumulator > bitrateAccumulatorMax) {
|
||||
@ -745,8 +715,8 @@ public class MediaCodecVideoEncoder {
|
||||
if (bitrateAdjustmentScaleChanged) {
|
||||
bitrateAdjustmentScaleExp = Math.min(bitrateAdjustmentScaleExp, BITRATE_CORRECTION_STEPS);
|
||||
bitrateAdjustmentScaleExp = Math.max(bitrateAdjustmentScaleExp, -BITRATE_CORRECTION_STEPS);
|
||||
Logging.d(TAG, "Adjusting bitrate scale to " + bitrateAdjustmentScaleExp
|
||||
+ ". Value: " + getBitrateScale(bitrateAdjustmentScaleExp));
|
||||
Logging.d(TAG, "Adjusting bitrate scale to " + bitrateAdjustmentScaleExp + ". Value: "
|
||||
+ getBitrateScale(bitrateAdjustmentScaleExp));
|
||||
setRates(targetBitrateBps / 1000, targetFps);
|
||||
}
|
||||
bitrateObservationTimeMs = 0;
|
||||
|
||||
@ -48,7 +48,7 @@ public class MediaConstraints {
|
||||
if (other == null || getClass() != other.getClass()) {
|
||||
return false;
|
||||
}
|
||||
KeyValuePair that = (KeyValuePair)other;
|
||||
KeyValuePair that = (KeyValuePair) other;
|
||||
return key.equals(that.key) && value.equals(that.value);
|
||||
}
|
||||
|
||||
@ -78,7 +78,7 @@ public class MediaConstraints {
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return "mandatory: " + stringifyKeyValuePairList(mandatory) +
|
||||
", optional: " + stringifyKeyValuePairList(optional);
|
||||
return "mandatory: " + stringifyKeyValuePairList(mandatory) + ", optional: "
|
||||
+ stringifyKeyValuePairList(optional);
|
||||
}
|
||||
}
|
||||
|
||||
@ -8,15 +8,12 @@
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
/** Java wrapper for a C++ MediaSourceInterface. */
|
||||
public class MediaSource {
|
||||
/** Tracks MediaSourceInterface.SourceState */
|
||||
public enum State {
|
||||
INITIALIZING, LIVE, ENDED, MUTED
|
||||
}
|
||||
public enum State { INITIALIZING, LIVE, ENDED, MUTED }
|
||||
|
||||
final long nativeSource; // Package-protected for PeerConnectionFactory.
|
||||
|
||||
|
||||
@ -89,21 +89,16 @@ public class MediaStream {
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return "[" + label() + ":A=" + audioTracks.size() +
|
||||
":V=" + videoTracks.size() + "]";
|
||||
return "[" + label() + ":A=" + audioTracks.size() + ":V=" + videoTracks.size() + "]";
|
||||
}
|
||||
|
||||
private static native boolean nativeAddAudioTrack(
|
||||
long nativeStream, long nativeAudioTrack);
|
||||
private static native boolean nativeAddAudioTrack(long nativeStream, long nativeAudioTrack);
|
||||
|
||||
private static native boolean nativeAddVideoTrack(
|
||||
long nativeStream, long nativeVideoTrack);
|
||||
private static native boolean nativeAddVideoTrack(long nativeStream, long nativeVideoTrack);
|
||||
|
||||
private static native boolean nativeRemoveAudioTrack(
|
||||
long nativeStream, long nativeAudioTrack);
|
||||
private static native boolean nativeRemoveAudioTrack(long nativeStream, long nativeAudioTrack);
|
||||
|
||||
private static native boolean nativeRemoveVideoTrack(
|
||||
long nativeStream, long nativeVideoTrack);
|
||||
private static native boolean nativeRemoveVideoTrack(long nativeStream, long nativeVideoTrack);
|
||||
|
||||
private static native String nativeLabel(long nativeStream);
|
||||
|
||||
|
||||
@ -51,8 +51,7 @@ public class MediaStreamTrack {
|
||||
|
||||
private static native boolean nativeEnabled(long nativeTrack);
|
||||
|
||||
private static native boolean nativeSetEnabled(
|
||||
long nativeTrack, boolean enabled);
|
||||
private static native boolean nativeSetEnabled(long nativeTrack, boolean enabled);
|
||||
|
||||
private static native State nativeState(long nativeTrack);
|
||||
|
||||
|
||||
@ -140,8 +140,7 @@ public class NetworkMonitor {
|
||||
return;
|
||||
}
|
||||
if (autoDetector == null) {
|
||||
autoDetector = new NetworkMonitorAutoDetect(
|
||||
new NetworkMonitorAutoDetect.Observer() {
|
||||
autoDetector = new NetworkMonitorAutoDetect(new NetworkMonitorAutoDetect.Observer() {
|
||||
|
||||
@Override
|
||||
public void onConnectionTypeChanged(ConnectionType newConnectionType) {
|
||||
@ -157,8 +156,7 @@ public class NetworkMonitor {
|
||||
public void onNetworkDisconnect(long networkHandle) {
|
||||
notifyObserversOfNetworkDisconnect(networkHandle);
|
||||
}
|
||||
},
|
||||
applicationContext);
|
||||
}, applicationContext);
|
||||
final NetworkMonitorAutoDetect.NetworkState networkState =
|
||||
autoDetector.getCurrentNetworkState();
|
||||
updateCurrentConnectionType(NetworkMonitorAutoDetect.getConnectionType(networkState));
|
||||
@ -241,8 +239,8 @@ public class NetworkMonitor {
|
||||
private native void nativeNotifyConnectionTypeChanged(long nativePtr);
|
||||
private native void nativeNotifyOfNetworkConnect(long nativePtr, NetworkInformation networkInfo);
|
||||
private native void nativeNotifyOfNetworkDisconnect(long nativePtr, long networkHandle);
|
||||
private native void nativeNotifyOfActiveNetworkList(long nativePtr,
|
||||
NetworkInformation[] networkInfos);
|
||||
private native void nativeNotifyOfActiveNetworkList(
|
||||
long nativePtr, NetworkInformation[] networkInfos);
|
||||
|
||||
// For testing only.
|
||||
static void resetInstanceForTests(Context context) {
|
||||
|
||||
@ -58,19 +58,19 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
|
||||
|
||||
public static class IPAddress {
|
||||
public final byte[] address;
|
||||
public IPAddress (byte[] address) {
|
||||
public IPAddress(byte[] address) {
|
||||
this.address = address;
|
||||
}
|
||||
}
|
||||
|
||||
/** Java version of NetworkMonitor.NetworkInformation */
|
||||
public static class NetworkInformation{
|
||||
public static class NetworkInformation {
|
||||
public final String name;
|
||||
public final ConnectionType type;
|
||||
public final long handle;
|
||||
public final IPAddress[] ipAddresses;
|
||||
public NetworkInformation(String name, ConnectionType type, long handle,
|
||||
IPAddress[] addresses) {
|
||||
public NetworkInformation(
|
||||
String name, ConnectionType type, long handle, IPAddress[] addresses) {
|
||||
this.name = name;
|
||||
this.type = type;
|
||||
this.handle = handle;
|
||||
@ -112,7 +112,6 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
|
||||
*/
|
||||
@SuppressLint("NewApi")
|
||||
private class SimpleNetworkCallback extends NetworkCallback {
|
||||
|
||||
@Override
|
||||
public void onAvailable(Network network) {
|
||||
Logging.d(TAG, "Network becomes available: " + network.toString());
|
||||
@ -120,8 +119,7 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onCapabilitiesChanged(
|
||||
Network network, NetworkCapabilities networkCapabilities) {
|
||||
public void onCapabilitiesChanged(Network network, NetworkCapabilities networkCapabilities) {
|
||||
// A capabilities change may indicate the ConnectionType has changed,
|
||||
// so forward the new NetworkInformation along to the observer.
|
||||
Logging.d(TAG, "capabilities changed: " + networkCapabilities.toString());
|
||||
@ -140,8 +138,8 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
|
||||
public void onLosing(Network network, int maxMsToLive) {
|
||||
// Tell the network is going to lose in MaxMsToLive milliseconds.
|
||||
// We may use this signal later.
|
||||
Logging.d(TAG,
|
||||
"Network " + network.toString() + " is about to lose in " + maxMsToLive + "ms");
|
||||
Logging.d(
|
||||
TAG, "Network " + network.toString() + " is about to lose in " + maxMsToLive + "ms");
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -303,15 +301,13 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
|
||||
if (connectionType == ConnectionType.CONNECTION_UNKNOWN
|
||||
|| connectionType == ConnectionType.CONNECTION_UNKNOWN_CELLULAR) {
|
||||
Logging.d(TAG, "Network " + network.toString() + " connection type is " + connectionType
|
||||
+ " because it has type " + networkState.getNetworkType()
|
||||
+ " and subtype " + networkState.getNetworkSubType());
|
||||
+ " because it has type " + networkState.getNetworkType() + " and subtype "
|
||||
+ networkState.getNetworkSubType());
|
||||
}
|
||||
|
||||
NetworkInformation networkInformation = new NetworkInformation(
|
||||
linkProperties.getInterfaceName(),
|
||||
connectionType,
|
||||
networkToNetId(network),
|
||||
getIPAddresses(linkProperties));
|
||||
NetworkInformation networkInformation =
|
||||
new NetworkInformation(linkProperties.getInterfaceName(), connectionType,
|
||||
networkToNetId(network), getIPAddresses(linkProperties));
|
||||
return networkInformation;
|
||||
}
|
||||
|
||||
@ -324,8 +320,7 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
|
||||
if (connectivityManager == null) {
|
||||
return false;
|
||||
}
|
||||
final NetworkCapabilities capabilities =
|
||||
connectivityManager.getNetworkCapabilities(network);
|
||||
final NetworkCapabilities capabilities = connectivityManager.getNetworkCapabilities(network);
|
||||
return capabilities != null && capabilities.hasCapability(NET_CAPABILITY_INTERNET);
|
||||
}
|
||||
|
||||
@ -369,7 +364,6 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/** Queries the WifiManager for SSID of the current Wifi connection. */
|
||||
static class WifiManagerDelegate {
|
||||
private final Context context;
|
||||
@ -384,8 +378,8 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
|
||||
}
|
||||
|
||||
String getWifiSSID() {
|
||||
final Intent intent = context.registerReceiver(null,
|
||||
new IntentFilter(WifiManager.NETWORK_STATE_CHANGED_ACTION));
|
||||
final Intent intent = context.registerReceiver(
|
||||
null, new IntentFilter(WifiManager.NETWORK_STATE_CHANGED_ACTION));
|
||||
if (intent != null) {
|
||||
final WifiInfo wifiInfo = intent.getParcelableExtra(WifiManager.EXTRA_WIFI_INFO);
|
||||
if (wifiInfo != null) {
|
||||
@ -397,7 +391,6 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
|
||||
}
|
||||
return "";
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static final long INVALID_NET_ID = -1;
|
||||
@ -507,7 +500,8 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
|
||||
* Registers a BroadcastReceiver in the given context.
|
||||
*/
|
||||
private void registerReceiver() {
|
||||
if (isRegistered) return;
|
||||
if (isRegistered)
|
||||
return;
|
||||
|
||||
isRegistered = true;
|
||||
context.registerReceiver(this, intentFilter);
|
||||
@ -517,7 +511,8 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
|
||||
* Unregisters the BroadcastReceiver in the given context.
|
||||
*/
|
||||
private void unregisterReceiver() {
|
||||
if (!isRegistered) return;
|
||||
if (!isRegistered)
|
||||
return;
|
||||
|
||||
isRegistered = false;
|
||||
context.unregisterReceiver(this);
|
||||
@ -581,7 +576,8 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
|
||||
}
|
||||
|
||||
private String getWifiSSID(NetworkState networkState) {
|
||||
if (getConnectionType(networkState) != ConnectionType.CONNECTION_WIFI) return "";
|
||||
if (getConnectionType(networkState) != ConnectionType.CONNECTION_WIFI)
|
||||
return "";
|
||||
return wifiManagerDelegate.getWifiSSID();
|
||||
}
|
||||
|
||||
@ -597,7 +593,8 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
|
||||
private void connectionTypeChanged(NetworkState networkState) {
|
||||
ConnectionType newConnectionType = getConnectionType(networkState);
|
||||
String newWifiSSID = getWifiSSID(networkState);
|
||||
if (newConnectionType == connectionType && newWifiSSID.equals(wifiSSID)) return;
|
||||
if (newConnectionType == connectionType && newWifiSSID.equals(wifiSSID))
|
||||
return;
|
||||
|
||||
connectionType = newConnectionType;
|
||||
wifiSSID = newWifiSSID;
|
||||
|
||||
@ -8,7 +8,6 @@
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import java.util.Collections;
|
||||
@ -27,19 +26,28 @@ public class PeerConnection {
|
||||
}
|
||||
|
||||
/** Tracks PeerConnectionInterface::IceGatheringState */
|
||||
public enum IceGatheringState { NEW, GATHERING, COMPLETE };
|
||||
|
||||
public enum IceGatheringState { NEW, GATHERING, COMPLETE }
|
||||
|
||||
/** Tracks PeerConnectionInterface::IceConnectionState */
|
||||
public enum IceConnectionState {
|
||||
NEW, CHECKING, CONNECTED, COMPLETED, FAILED, DISCONNECTED, CLOSED
|
||||
};
|
||||
NEW,
|
||||
CHECKING,
|
||||
CONNECTED,
|
||||
COMPLETED,
|
||||
FAILED,
|
||||
DISCONNECTED,
|
||||
CLOSED
|
||||
}
|
||||
|
||||
/** Tracks PeerConnectionInterface::SignalingState */
|
||||
public enum SignalingState {
|
||||
STABLE, HAVE_LOCAL_OFFER, HAVE_LOCAL_PRANSWER, HAVE_REMOTE_OFFER,
|
||||
HAVE_REMOTE_PRANSWER, CLOSED
|
||||
};
|
||||
STABLE,
|
||||
HAVE_LOCAL_OFFER,
|
||||
HAVE_LOCAL_PRANSWER,
|
||||
HAVE_REMOTE_OFFER,
|
||||
HAVE_REMOTE_PRANSWER,
|
||||
CLOSED
|
||||
}
|
||||
|
||||
/** Java version of PeerConnectionObserver. */
|
||||
public static interface Observer {
|
||||
@ -97,39 +105,25 @@ public class PeerConnection {
|
||||
}
|
||||
|
||||
/** Java version of PeerConnectionInterface.IceTransportsType */
|
||||
public enum IceTransportsType {
|
||||
NONE, RELAY, NOHOST, ALL
|
||||
};
|
||||
public enum IceTransportsType { NONE, RELAY, NOHOST, ALL }
|
||||
|
||||
/** Java version of PeerConnectionInterface.BundlePolicy */
|
||||
public enum BundlePolicy {
|
||||
BALANCED, MAXBUNDLE, MAXCOMPAT
|
||||
};
|
||||
public enum BundlePolicy { BALANCED, MAXBUNDLE, MAXCOMPAT }
|
||||
|
||||
/** Java version of PeerConnectionInterface.RtcpMuxPolicy */
|
||||
public enum RtcpMuxPolicy {
|
||||
NEGOTIATE, REQUIRE
|
||||
};
|
||||
public enum RtcpMuxPolicy { NEGOTIATE, REQUIRE }
|
||||
|
||||
/** Java version of PeerConnectionInterface.TcpCandidatePolicy */
|
||||
public enum TcpCandidatePolicy {
|
||||
ENABLED, DISABLED
|
||||
};
|
||||
public enum TcpCandidatePolicy { ENABLED, DISABLED }
|
||||
|
||||
/** Java version of PeerConnectionInterface.CandidateNetworkPolicy */
|
||||
public enum CandidateNetworkPolicy {
|
||||
ALL, LOW_COST
|
||||
};
|
||||
public enum CandidateNetworkPolicy { ALL, LOW_COST }
|
||||
|
||||
/** Java version of rtc::KeyType */
|
||||
public enum KeyType {
|
||||
RSA, ECDSA
|
||||
}
|
||||
public enum KeyType { RSA, ECDSA }
|
||||
|
||||
/** Java version of PeerConnectionInterface.ContinualGatheringPolicy */
|
||||
public enum ContinualGatheringPolicy {
|
||||
GATHER_ONCE, GATHER_CONTINUALLY
|
||||
}
|
||||
public enum ContinualGatheringPolicy { GATHER_ONCE, GATHER_CONTINUALLY }
|
||||
|
||||
/** Java version of PeerConnectionInterface.RTCConfiguration */
|
||||
public static class RTCConfiguration {
|
||||
@ -187,26 +181,20 @@ public class PeerConnection {
|
||||
|
||||
public native SessionDescription getRemoteDescription();
|
||||
|
||||
public native DataChannel createDataChannel(
|
||||
String label, DataChannel.Init init);
|
||||
public native DataChannel createDataChannel(String label, DataChannel.Init init);
|
||||
|
||||
public native void createOffer(
|
||||
SdpObserver observer, MediaConstraints constraints);
|
||||
public native void createOffer(SdpObserver observer, MediaConstraints constraints);
|
||||
|
||||
public native void createAnswer(
|
||||
SdpObserver observer, MediaConstraints constraints);
|
||||
public native void createAnswer(SdpObserver observer, MediaConstraints constraints);
|
||||
|
||||
public native void setLocalDescription(
|
||||
SdpObserver observer, SessionDescription sdp);
|
||||
public native void setLocalDescription(SdpObserver observer, SessionDescription sdp);
|
||||
|
||||
public native void setRemoteDescription(
|
||||
SdpObserver observer, SessionDescription sdp);
|
||||
public native void setRemoteDescription(SdpObserver observer, SessionDescription sdp);
|
||||
|
||||
public native boolean setConfiguration(RTCConfiguration config);
|
||||
|
||||
public boolean addIceCandidate(IceCandidate candidate) {
|
||||
return nativeAddIceCandidate(
|
||||
candidate.sdpMid, candidate.sdpMLineIndex, candidate.sdp);
|
||||
return nativeAddIceCandidate(candidate.sdpMid, candidate.sdpMLineIndex, candidate.sdp);
|
||||
}
|
||||
|
||||
public boolean removeIceCandidates(final IceCandidate[] candidates) {
|
||||
@ -314,8 +302,7 @@ public class PeerConnection {
|
||||
|
||||
private native void nativeRemoveLocalStream(long nativeStream);
|
||||
|
||||
private native boolean nativeGetStats(
|
||||
StatsObserver observer, long nativeTrack);
|
||||
private native boolean nativeGetStats(StatsObserver observer, long nativeTrack);
|
||||
|
||||
private native RtpSender nativeCreateSender(String kind, String stream_id);
|
||||
|
||||
@ -323,9 +310,7 @@ public class PeerConnection {
|
||||
|
||||
private native List<RtpReceiver> nativeGetReceivers();
|
||||
|
||||
private native boolean nativeStartRtcEventLog(
|
||||
int file_descriptor, int max_size_bytes);
|
||||
private native boolean nativeStartRtcEventLog(int file_descriptor, int max_size_bytes);
|
||||
|
||||
private native void nativeStopRtcEventLog();
|
||||
|
||||
}
|
||||
|
||||
@ -8,7 +8,6 @@
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import java.util.List;
|
||||
@ -52,9 +51,8 @@ public class PeerConnectionFactory {
|
||||
// |renderEGLContext| can be provided to suport HW video decoding to
|
||||
// texture and will be used to create a shared EGL context on video
|
||||
// decoding thread.
|
||||
public static native boolean initializeAndroidGlobals(
|
||||
Object context, boolean initializeAudio, boolean initializeVideo,
|
||||
boolean videoHwAcceleration);
|
||||
public static native boolean initializeAndroidGlobals(Object context, boolean initializeAudio,
|
||||
boolean initializeVideo, boolean videoHwAcceleration);
|
||||
|
||||
// Field trial initialization. Must be called before PeerConnectionFactory
|
||||
// is created.
|
||||
@ -81,51 +79,44 @@ public class PeerConnectionFactory {
|
||||
}
|
||||
}
|
||||
|
||||
public PeerConnection createPeerConnection(
|
||||
PeerConnection.RTCConfiguration rtcConfig,
|
||||
MediaConstraints constraints,
|
||||
PeerConnection.Observer observer) {
|
||||
public PeerConnection createPeerConnection(PeerConnection.RTCConfiguration rtcConfig,
|
||||
MediaConstraints constraints, PeerConnection.Observer observer) {
|
||||
long nativeObserver = nativeCreateObserver(observer);
|
||||
if (nativeObserver == 0) {
|
||||
return null;
|
||||
}
|
||||
long nativePeerConnection = nativeCreatePeerConnection(
|
||||
nativeFactory, rtcConfig, constraints, nativeObserver);
|
||||
long nativePeerConnection =
|
||||
nativeCreatePeerConnection(nativeFactory, rtcConfig, constraints, nativeObserver);
|
||||
if (nativePeerConnection == 0) {
|
||||
return null;
|
||||
}
|
||||
return new PeerConnection(nativePeerConnection, nativeObserver);
|
||||
}
|
||||
|
||||
public PeerConnection createPeerConnection(
|
||||
List<PeerConnection.IceServer> iceServers,
|
||||
MediaConstraints constraints,
|
||||
PeerConnection.Observer observer) {
|
||||
PeerConnection.RTCConfiguration rtcConfig =
|
||||
new PeerConnection.RTCConfiguration(iceServers);
|
||||
public PeerConnection createPeerConnection(List<PeerConnection.IceServer> iceServers,
|
||||
MediaConstraints constraints, PeerConnection.Observer observer) {
|
||||
PeerConnection.RTCConfiguration rtcConfig = new PeerConnection.RTCConfiguration(iceServers);
|
||||
return createPeerConnection(rtcConfig, constraints, observer);
|
||||
}
|
||||
|
||||
public MediaStream createLocalMediaStream(String label) {
|
||||
return new MediaStream(
|
||||
nativeCreateLocalMediaStream(nativeFactory, label));
|
||||
return new MediaStream(nativeCreateLocalMediaStream(nativeFactory, label));
|
||||
}
|
||||
|
||||
public VideoSource createVideoSource(VideoCapturer capturer) {
|
||||
final EglBase.Context eglContext =
|
||||
localEglbase == null ? null : localEglbase.getEglBaseContext();
|
||||
long nativeAndroidVideoTrackSource = nativeCreateVideoSource(
|
||||
nativeFactory, eglContext, capturer.isScreencast());
|
||||
VideoCapturer.CapturerObserver capturerObserver
|
||||
= new VideoCapturer.AndroidVideoTrackSourceObserver(nativeAndroidVideoTrackSource);
|
||||
nativeInitializeVideoCapturer(nativeFactory, capturer, nativeAndroidVideoTrackSource,
|
||||
capturerObserver);
|
||||
long nativeAndroidVideoTrackSource =
|
||||
nativeCreateVideoSource(nativeFactory, eglContext, capturer.isScreencast());
|
||||
VideoCapturer.CapturerObserver capturerObserver =
|
||||
new VideoCapturer.AndroidVideoTrackSourceObserver(nativeAndroidVideoTrackSource);
|
||||
nativeInitializeVideoCapturer(
|
||||
nativeFactory, capturer, nativeAndroidVideoTrackSource, capturerObserver);
|
||||
return new VideoSource(nativeAndroidVideoTrackSource);
|
||||
}
|
||||
|
||||
public VideoTrack createVideoTrack(String id, VideoSource source) {
|
||||
return new VideoTrack(nativeCreateVideoTrack(
|
||||
nativeFactory, id, source.nativeSource));
|
||||
return new VideoTrack(nativeCreateVideoTrack(nativeFactory, id, source.nativeSource));
|
||||
}
|
||||
|
||||
public AudioSource createAudioSource(MediaConstraints constraints) {
|
||||
@ -133,8 +124,7 @@ public class PeerConnectionFactory {
|
||||
}
|
||||
|
||||
public AudioTrack createAudioTrack(String id, AudioSource source) {
|
||||
return new AudioTrack(nativeCreateAudioTrack(
|
||||
nativeFactory, id, source.nativeSource));
|
||||
return new AudioTrack(nativeCreateAudioTrack(nativeFactory, id, source.nativeSource));
|
||||
}
|
||||
|
||||
// Starts recording an AEC dump. Ownership of the file is transfered to the
|
||||
@ -161,8 +151,8 @@ public class PeerConnectionFactory {
|
||||
* renderer.
|
||||
* @param remoteEglContext Must be the same as used by any remote video renderer.
|
||||
*/
|
||||
public void setVideoHwAccelerationOptions(EglBase.Context localEglContext,
|
||||
EglBase.Context remoteEglContext) {
|
||||
public void setVideoHwAccelerationOptions(
|
||||
EglBase.Context localEglContext, EglBase.Context remoteEglContext) {
|
||||
if (localEglbase != null) {
|
||||
Logging.w(TAG, "Egl context already set.");
|
||||
localEglbase.release();
|
||||
@ -173,8 +163,8 @@ public class PeerConnectionFactory {
|
||||
}
|
||||
localEglbase = EglBase.create(localEglContext);
|
||||
remoteEglbase = EglBase.create(remoteEglContext);
|
||||
nativeSetVideoHwAccelerationOptions(nativeFactory, localEglbase.getEglBaseContext(),
|
||||
remoteEglbase.getEglBaseContext());
|
||||
nativeSetVideoHwAccelerationOptions(
|
||||
nativeFactory, localEglbase.getEglBaseContext(), remoteEglbase.getEglBaseContext());
|
||||
}
|
||||
|
||||
public void dispose() {
|
||||
@ -227,21 +217,18 @@ public class PeerConnectionFactory {
|
||||
|
||||
private static native long nativeCreatePeerConnectionFactory(Options options);
|
||||
|
||||
private static native long nativeCreateObserver(
|
||||
PeerConnection.Observer observer);
|
||||
private static native long nativeCreateObserver(PeerConnection.Observer observer);
|
||||
|
||||
private static native long nativeCreatePeerConnection(
|
||||
long nativeFactory, PeerConnection.RTCConfiguration rtcConfig,
|
||||
MediaConstraints constraints, long nativeObserver);
|
||||
private static native long nativeCreatePeerConnection(long nativeFactory,
|
||||
PeerConnection.RTCConfiguration rtcConfig, MediaConstraints constraints, long nativeObserver);
|
||||
|
||||
private static native long nativeCreateLocalMediaStream(
|
||||
long nativeFactory, String label);
|
||||
private static native long nativeCreateLocalMediaStream(long nativeFactory, String label);
|
||||
|
||||
private static native long nativeCreateVideoSource(
|
||||
long nativeFactory, EglBase.Context eglContext, boolean is_screencast);
|
||||
|
||||
private static native void nativeInitializeVideoCapturer(
|
||||
long native_factory, VideoCapturer j_video_capturer, long native_source,
|
||||
private static native void nativeInitializeVideoCapturer(long native_factory,
|
||||
VideoCapturer j_video_capturer, long native_source,
|
||||
VideoCapturer.CapturerObserver j_frame_observer);
|
||||
|
||||
private static native long nativeCreateVideoTrack(
|
||||
@ -258,8 +245,7 @@ public class PeerConnectionFactory {
|
||||
|
||||
private static native void nativeStopAecDump(long nativeFactory);
|
||||
|
||||
@Deprecated
|
||||
public native void nativeSetOptions(long nativeFactory, Options options);
|
||||
@Deprecated public native void nativeSetOptions(long nativeFactory, Options options);
|
||||
|
||||
private static native void nativeSetVideoHwAccelerationOptions(
|
||||
long nativeFactory, Object localEGLContext, Object remoteEGLContext);
|
||||
|
||||
@ -42,8 +42,8 @@ public class RendererCommon {
|
||||
*/
|
||||
void drawOes(int oesTextureId, float[] texMatrix, int frameWidth, int frameHeight,
|
||||
int viewportX, int viewportY, int viewportWidth, int viewportHeight);
|
||||
void drawRgb(int textureId, float[] texMatrix, int frameWidth, int frameHeight,
|
||||
int viewportX, int viewportY, int viewportWidth, int viewportHeight);
|
||||
void drawRgb(int textureId, float[] texMatrix, int frameWidth, int frameHeight, int viewportX,
|
||||
int viewportY, int viewportWidth, int viewportHeight);
|
||||
void drawYuv(int[] yuvTextures, float[] texMatrix, int frameWidth, int frameHeight,
|
||||
int viewportX, int viewportY, int viewportWidth, int viewportHeight);
|
||||
|
||||
@ -116,6 +116,7 @@ public class RendererCommon {
|
||||
// The minimum fraction of the frame content that will be shown for |SCALE_ASPECT_BALANCED|.
|
||||
// This limits excessive cropping when adjusting display size.
|
||||
private static float BALANCED_VISIBLE_FRACTION = 0.5625f;
|
||||
// clang-format off
|
||||
public static final float[] identityMatrix() {
|
||||
return new float[] {
|
||||
1, 0, 0, 0,
|
||||
@ -140,6 +141,7 @@ public class RendererCommon {
|
||||
0, 0, 1, 0,
|
||||
1, 0, 0, 1};
|
||||
}
|
||||
// clang-format on
|
||||
|
||||
/**
|
||||
* Returns texture matrix that will have the effect of rotating the frame |rotationDegree|
|
||||
@ -189,8 +191,8 @@ public class RendererCommon {
|
||||
/**
|
||||
* Calculate display size based on scaling type, video aspect ratio, and maximum display size.
|
||||
*/
|
||||
public static Point getDisplaySize(ScalingType scalingType, float videoAspectRatio,
|
||||
int maxDisplayWidth, int maxDisplayHeight) {
|
||||
public static Point getDisplaySize(
|
||||
ScalingType scalingType, float videoAspectRatio, int maxDisplayWidth, int maxDisplayHeight) {
|
||||
return getDisplaySize(convertScalingTypeToVisibleFraction(scalingType), videoAspectRatio,
|
||||
maxDisplayWidth, maxDisplayHeight);
|
||||
}
|
||||
@ -230,17 +232,17 @@ public class RendererCommon {
|
||||
* Calculate display size based on minimum fraction of the video that must remain visible,
|
||||
* video aspect ratio, and maximum display size.
|
||||
*/
|
||||
private static Point getDisplaySize(float minVisibleFraction, float videoAspectRatio,
|
||||
int maxDisplayWidth, int maxDisplayHeight) {
|
||||
private static Point getDisplaySize(
|
||||
float minVisibleFraction, float videoAspectRatio, int maxDisplayWidth, int maxDisplayHeight) {
|
||||
// If there is no constraint on the amount of cropping, fill the allowed display area.
|
||||
if (minVisibleFraction == 0 || videoAspectRatio == 0) {
|
||||
return new Point(maxDisplayWidth, maxDisplayHeight);
|
||||
}
|
||||
// Each dimension is constrained on max display size and how much we are allowed to crop.
|
||||
final int width = Math.min(maxDisplayWidth,
|
||||
Math.round(maxDisplayHeight / minVisibleFraction * videoAspectRatio));
|
||||
final int height = Math.min(maxDisplayHeight,
|
||||
Math.round(maxDisplayWidth / minVisibleFraction / videoAspectRatio));
|
||||
final int width = Math.min(
|
||||
maxDisplayWidth, Math.round(maxDisplayHeight / minVisibleFraction * videoAspectRatio));
|
||||
final int height = Math.min(
|
||||
maxDisplayHeight, Math.round(maxDisplayWidth / minVisibleFraction / videoAspectRatio));
|
||||
return new Point(width, height);
|
||||
}
|
||||
}
|
||||
|
||||
@ -48,8 +48,8 @@ public class RtpReceiver {
|
||||
// Will be released in dispose().
|
||||
private static native long nativeGetTrack(long nativeRtpReceiver);
|
||||
|
||||
private static native boolean nativeSetParameters(long nativeRtpReceiver,
|
||||
RtpParameters parameters);
|
||||
private static native boolean nativeSetParameters(
|
||||
long nativeRtpReceiver, RtpParameters parameters);
|
||||
|
||||
private static native RtpParameters nativeGetParameters(long nativeRtpReceiver);
|
||||
|
||||
|
||||
@ -30,8 +30,7 @@ public class RtpSender {
|
||||
// not appropriate when the track is owned by, for example, another RtpSender
|
||||
// or a MediaStream.
|
||||
public boolean setTrack(MediaStreamTrack track, boolean takeOwnership) {
|
||||
if (!nativeSetTrack(nativeRtpSender,
|
||||
(track == null) ? 0 : track.nativeTrack)) {
|
||||
if (!nativeSetTrack(nativeRtpSender, (track == null) ? 0 : track.nativeTrack)) {
|
||||
return false;
|
||||
}
|
||||
if (cachedTrack != null && ownsTrack) {
|
||||
@ -65,20 +64,17 @@ public class RtpSender {
|
||||
free(nativeRtpSender);
|
||||
}
|
||||
|
||||
private static native boolean nativeSetTrack(long nativeRtpSender,
|
||||
long nativeTrack);
|
||||
private static native boolean nativeSetTrack(long nativeRtpSender, long nativeTrack);
|
||||
|
||||
// This should increment the reference count of the track.
|
||||
// Will be released in dispose() or setTrack().
|
||||
private static native long nativeGetTrack(long nativeRtpSender);
|
||||
|
||||
private static native boolean nativeSetParameters(long nativeRtpSender,
|
||||
RtpParameters parameters);
|
||||
private static native boolean nativeSetParameters(long nativeRtpSender, RtpParameters parameters);
|
||||
|
||||
private static native RtpParameters nativeGetParameters(long nativeRtpSender);
|
||||
|
||||
private static native String nativeId(long nativeRtpSender);
|
||||
|
||||
private static native void free(long nativeRtpSender);
|
||||
}
|
||||
;
|
||||
};
|
||||
|
||||
@ -35,11 +35,10 @@ import java.util.List;
|
||||
* frames. At any time, at most one frame is being processed.
|
||||
*/
|
||||
@TargetApi(21)
|
||||
public class ScreenCapturerAndroid implements
|
||||
VideoCapturer, SurfaceTextureHelper.OnTextureFrameAvailableListener {
|
||||
|
||||
private static final int DISPLAY_FLAGS = DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC
|
||||
| DisplayManager.VIRTUAL_DISPLAY_FLAG_PRESENTATION;
|
||||
public class ScreenCapturerAndroid
|
||||
implements VideoCapturer, SurfaceTextureHelper.OnTextureFrameAvailableListener {
|
||||
private static final int DISPLAY_FLAGS =
|
||||
DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC | DisplayManager.VIRTUAL_DISPLAY_FLAG_PRESENTATION;
|
||||
// DPI for VirtualDisplay, does not seem to matter for us.
|
||||
private static final int VIRTUAL_DISPLAY_DPI = 400;
|
||||
|
||||
@ -65,8 +64,7 @@ public class ScreenCapturerAndroid implements
|
||||
* @param mediaProjectionCallback MediaProjection callback to implement application specific
|
||||
* logic in events such as when the user revokes a previously granted capture permission.
|
||||
**/
|
||||
public ScreenCapturerAndroid(
|
||||
Intent mediaProjectionPermissionResultData,
|
||||
public ScreenCapturerAndroid(Intent mediaProjectionPermissionResultData,
|
||||
MediaProjection.Callback mediaProjectionCallback) {
|
||||
this.mediaProjectionPermissionResultData = mediaProjectionPermissionResultData;
|
||||
this.mediaProjectionCallback = mediaProjectionCallback;
|
||||
@ -79,10 +77,8 @@ public class ScreenCapturerAndroid implements
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized void initialize(
|
||||
final SurfaceTextureHelper surfaceTextureHelper,
|
||||
final Context applicationContext,
|
||||
final VideoCapturer.CapturerObserver capturerObserver) {
|
||||
public synchronized void initialize(final SurfaceTextureHelper surfaceTextureHelper,
|
||||
final Context applicationContext, final VideoCapturer.CapturerObserver capturerObserver) {
|
||||
checkNotDisposed();
|
||||
|
||||
if (capturerObserver == null) {
|
||||
@ -95,13 +91,13 @@ public class ScreenCapturerAndroid implements
|
||||
}
|
||||
this.surfaceTextureHelper = surfaceTextureHelper;
|
||||
|
||||
mediaProjectionManager = (MediaProjectionManager)
|
||||
applicationContext.getSystemService(Context.MEDIA_PROJECTION_SERVICE);
|
||||
mediaProjectionManager = (MediaProjectionManager) applicationContext.getSystemService(
|
||||
Context.MEDIA_PROJECTION_SERVICE);
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized void startCapture(final int width, final int height,
|
||||
final int ignoredFramerate) {
|
||||
public synchronized void startCapture(
|
||||
final int width, final int height, final int ignoredFramerate) {
|
||||
checkNotDisposed();
|
||||
|
||||
this.width = width;
|
||||
@ -143,7 +139,6 @@ public class ScreenCapturerAndroid implements
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public synchronized void dispose() {
|
||||
isDisposed = true;
|
||||
@ -184,9 +179,8 @@ public class ScreenCapturerAndroid implements
|
||||
|
||||
private void createVirtualDisplay() {
|
||||
surfaceTextureHelper.getSurfaceTexture().setDefaultBufferSize(width, height);
|
||||
virtualDisplay = mediaProjection.createVirtualDisplay(
|
||||
"WebRTC_ScreenCapture", width, height, VIRTUAL_DISPLAY_DPI,
|
||||
DISPLAY_FLAGS, new Surface(surfaceTextureHelper.getSurfaceTexture()),
|
||||
virtualDisplay = mediaProjection.createVirtualDisplay("WebRTC_ScreenCapture", width, height,
|
||||
VIRTUAL_DISPLAY_DPI, DISPLAY_FLAGS, new Surface(surfaceTextureHelper.getSurfaceTexture()),
|
||||
null /* callback */, null /* callback handler */);
|
||||
}
|
||||
|
||||
@ -194,8 +188,8 @@ public class ScreenCapturerAndroid implements
|
||||
@Override
|
||||
public void onTextureFrameAvailable(int oesTextureId, float[] transformMatrix, long timestampNs) {
|
||||
numCapturedFrames++;
|
||||
capturerObserver.onTextureFrameCaptured(width, height, oesTextureId, transformMatrix,
|
||||
0 /* rotation */, timestampNs);
|
||||
capturerObserver.onTextureFrameCaptured(
|
||||
width, height, oesTextureId, transformMatrix, 0 /* rotation */, timestampNs);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -207,4 +201,3 @@ public class ScreenCapturerAndroid implements
|
||||
return numCapturedFrames;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -8,7 +8,6 @@
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
/**
|
||||
@ -19,7 +18,9 @@ package org.webrtc;
|
||||
public class SessionDescription {
|
||||
/** Java-land enum version of SessionDescriptionInterface's type() string. */
|
||||
public static enum Type {
|
||||
OFFER, PRANSWER, ANSWER;
|
||||
OFFER,
|
||||
PRANSWER,
|
||||
ANSWER;
|
||||
|
||||
public String canonicalForm() {
|
||||
return name().toLowerCase();
|
||||
|
||||
@ -12,7 +12,6 @@ package org.webrtc;
|
||||
|
||||
/** Java version of webrtc::StatsReport. */
|
||||
public class StatsReport {
|
||||
|
||||
/** Java version of webrtc::StatsReport::Value. */
|
||||
public static class Value {
|
||||
public final String name;
|
||||
@ -45,8 +44,13 @@ public class StatsReport {
|
||||
|
||||
public String toString() {
|
||||
StringBuilder builder = new StringBuilder();
|
||||
builder.append("id: ").append(id).append(", type: ").append(type)
|
||||
.append(", timestamp: ").append(timestamp).append(", values: ");
|
||||
builder.append("id: ")
|
||||
.append(id)
|
||||
.append(", type: ")
|
||||
.append(type)
|
||||
.append(", timestamp: ")
|
||||
.append(timestamp)
|
||||
.append(", values: ");
|
||||
for (int i = 0; i < values.length; ++i) {
|
||||
builder.append(values[i].toString()).append(", ");
|
||||
}
|
||||
|
||||
@ -83,8 +83,7 @@ class SurfaceTextureHelper {
|
||||
|
||||
// Vertex coordinates in Normalized Device Coordinates, i.e.
|
||||
// (-1, -1) is bottom-left and (1, 1) is top-right.
|
||||
private static final FloatBuffer DEVICE_RECTANGLE =
|
||||
GlUtil.createFloatBuffer(new float[] {
|
||||
private static final FloatBuffer DEVICE_RECTANGLE = GlUtil.createFloatBuffer(new float[] {
|
||||
-1.0f, -1.0f, // Bottom left.
|
||||
1.0f, -1.0f, // Bottom right.
|
||||
-1.0f, 1.0f, // Top left.
|
||||
@ -92,14 +91,14 @@ class SurfaceTextureHelper {
|
||||
});
|
||||
|
||||
// Texture coordinates - (0, 0) is bottom-left and (1, 1) is top-right.
|
||||
private static final FloatBuffer TEXTURE_RECTANGLE =
|
||||
GlUtil.createFloatBuffer(new float[] {
|
||||
private static final FloatBuffer TEXTURE_RECTANGLE = GlUtil.createFloatBuffer(new float[] {
|
||||
0.0f, 0.0f, // Bottom left.
|
||||
1.0f, 0.0f, // Bottom right.
|
||||
0.0f, 1.0f, // Top left.
|
||||
1.0f, 1.0f // Top right.
|
||||
});
|
||||
|
||||
// clang-format off
|
||||
private static final String VERTEX_SHADER =
|
||||
"varying vec2 interp_tc;\n"
|
||||
+ "attribute vec4 in_pos;\n"
|
||||
@ -140,12 +139,13 @@ class SurfaceTextureHelper {
|
||||
+ " gl_FragColor.a = coeffs.a + dot(coeffs.rgb,\n"
|
||||
+ " texture2D(oesTex, interp_tc + 1.5 * xUnit).rgb);\n"
|
||||
+ "}\n";
|
||||
// clang-format on
|
||||
|
||||
private int texMatrixLoc;
|
||||
private int xUnitLoc;
|
||||
private int coeffsLoc;;
|
||||
private int coeffsLoc;
|
||||
|
||||
YuvConverter (EglBase.Context sharedContext) {
|
||||
YuvConverter(EglBase.Context sharedContext) {
|
||||
eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PIXEL_RGBA_BUFFER);
|
||||
eglBase.createDummyPbufferSurface();
|
||||
eglBase.makeCurrent();
|
||||
@ -165,11 +165,10 @@ class SurfaceTextureHelper {
|
||||
eglBase.detachCurrent();
|
||||
}
|
||||
|
||||
synchronized void convert(ByteBuffer buf,
|
||||
int width, int height, int stride, int textureId, float [] transformMatrix) {
|
||||
synchronized void convert(
|
||||
ByteBuffer buf, int width, int height, int stride, int textureId, float[] transformMatrix) {
|
||||
if (released) {
|
||||
throw new IllegalStateException(
|
||||
"YuvConverter.convert called on released object");
|
||||
throw new IllegalStateException("YuvConverter.convert called on released object");
|
||||
}
|
||||
|
||||
// We draw into a buffer laid out like
|
||||
@ -202,17 +201,15 @@ class SurfaceTextureHelper {
|
||||
// has to be a multiple of 8 pixels.
|
||||
|
||||
if (stride % 8 != 0) {
|
||||
throw new IllegalArgumentException(
|
||||
"Invalid stride, must be a multiple of 8");
|
||||
throw new IllegalArgumentException("Invalid stride, must be a multiple of 8");
|
||||
}
|
||||
if (stride < width){
|
||||
throw new IllegalArgumentException(
|
||||
"Invalid stride, must >= width");
|
||||
if (stride < width) {
|
||||
throw new IllegalArgumentException("Invalid stride, must >= width");
|
||||
}
|
||||
|
||||
int y_width = (width+3) / 4;
|
||||
int uv_width = (width+7) / 8;
|
||||
int uv_height = (height+1)/2;
|
||||
int y_width = (width + 3) / 4;
|
||||
int uv_width = (width + 7) / 8;
|
||||
int uv_height = (height + 1) / 2;
|
||||
int total_height = height + uv_height;
|
||||
int size = stride * total_height;
|
||||
|
||||
@ -222,18 +219,16 @@ class SurfaceTextureHelper {
|
||||
// Produce a frame buffer starting at top-left corner, not
|
||||
// bottom-left.
|
||||
transformMatrix =
|
||||
RendererCommon.multiplyMatrices(transformMatrix,
|
||||
RendererCommon.verticalFlipMatrix());
|
||||
RendererCommon.multiplyMatrices(transformMatrix, RendererCommon.verticalFlipMatrix());
|
||||
|
||||
// Create new pBuffferSurface with the correct size if needed.
|
||||
if (eglBase.hasSurface()) {
|
||||
if (eglBase.surfaceWidth() != stride/4 ||
|
||||
eglBase.surfaceHeight() != total_height){
|
||||
if (eglBase.surfaceWidth() != stride / 4 || eglBase.surfaceHeight() != total_height) {
|
||||
eglBase.releaseSurface();
|
||||
eglBase.createPbufferSurface(stride/4, total_height);
|
||||
eglBase.createPbufferSurface(stride / 4, total_height);
|
||||
}
|
||||
} else {
|
||||
eglBase.createPbufferSurface(stride/4, total_height);
|
||||
eglBase.createPbufferSurface(stride / 4, total_height);
|
||||
}
|
||||
|
||||
eglBase.makeCurrent();
|
||||
@ -245,9 +240,7 @@ class SurfaceTextureHelper {
|
||||
// Draw Y
|
||||
GLES20.glViewport(0, 0, y_width, height);
|
||||
// Matrix * (1;0;0;0) / width. Note that opengl uses column major order.
|
||||
GLES20.glUniform2f(xUnitLoc,
|
||||
transformMatrix[0] / width,
|
||||
transformMatrix[1] / width);
|
||||
GLES20.glUniform2f(xUnitLoc, transformMatrix[0] / width, transformMatrix[1] / width);
|
||||
// Y'UV444 to RGB888, see
|
||||
// https://en.wikipedia.org/wiki/YUV#Y.27UV444_to_RGB888_conversion.
|
||||
// We use the ITU-R coefficients for U and V */
|
||||
@ -257,19 +250,18 @@ class SurfaceTextureHelper {
|
||||
// Draw U
|
||||
GLES20.glViewport(0, height, uv_width, uv_height);
|
||||
// Matrix * (1;0;0;0) / (width / 2). Note that opengl uses column major order.
|
||||
GLES20.glUniform2f(xUnitLoc,
|
||||
2.0f * transformMatrix[0] / width,
|
||||
2.0f * transformMatrix[1] / width);
|
||||
GLES20.glUniform2f(
|
||||
xUnitLoc, 2.0f * transformMatrix[0] / width, 2.0f * transformMatrix[1] / width);
|
||||
GLES20.glUniform4f(coeffsLoc, -0.169f, -0.331f, 0.499f, 0.5f);
|
||||
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
|
||||
|
||||
// Draw V
|
||||
GLES20.glViewport(stride/8, height, uv_width, uv_height);
|
||||
GLES20.glViewport(stride / 8, height, uv_width, uv_height);
|
||||
GLES20.glUniform4f(coeffsLoc, 0.499f, -0.418f, -0.0813f, 0.5f);
|
||||
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
|
||||
|
||||
GLES20.glReadPixels(0, 0, stride/4, total_height, GLES20.GL_RGBA,
|
||||
GLES20.GL_UNSIGNED_BYTE, buf);
|
||||
GLES20.glReadPixels(
|
||||
0, 0, stride / 4, total_height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buf);
|
||||
|
||||
GlUtil.checkNoGLES2Error("YuvConverter.convert");
|
||||
|
||||
@ -351,7 +343,7 @@ class SurfaceTextureHelper {
|
||||
if (yuvConverter != null)
|
||||
return yuvConverter;
|
||||
|
||||
synchronized(this) {
|
||||
synchronized (this) {
|
||||
if (yuvConverter == null)
|
||||
yuvConverter = new YuvConverter(eglBase.getEglBaseContext());
|
||||
return yuvConverter;
|
||||
@ -409,7 +401,8 @@ class SurfaceTextureHelper {
|
||||
*/
|
||||
public void returnTextureFrame() {
|
||||
handler.post(new Runnable() {
|
||||
@Override public void run() {
|
||||
@Override
|
||||
public void run() {
|
||||
isTextureInUse = false;
|
||||
if (isQuitting) {
|
||||
release();
|
||||
@ -442,8 +435,8 @@ class SurfaceTextureHelper {
|
||||
});
|
||||
}
|
||||
|
||||
public void textureToYUV(ByteBuffer buf,
|
||||
int width, int height, int stride, int textureId, float [] transformMatrix) {
|
||||
public void textureToYUV(
|
||||
ByteBuffer buf, int width, int height, int stride, int textureId, float[] transformMatrix) {
|
||||
if (textureId != oesTextureId)
|
||||
throw new IllegalStateException("textureToByteBuffer called with unexpected textureId");
|
||||
|
||||
|
||||
@ -33,8 +33,8 @@ import javax.microedition.khronos.egl.EGLContext;
|
||||
* Interaction from the Activity lifecycle in surfaceCreated, surfaceChanged, and surfaceDestroyed.
|
||||
* Interaction with the layout framework in onMeasure and onSizeChanged.
|
||||
*/
|
||||
public class SurfaceViewRenderer extends SurfaceView
|
||||
implements SurfaceHolder.Callback, VideoRenderer.Callbacks {
|
||||
public class SurfaceViewRenderer
|
||||
extends SurfaceView implements SurfaceHolder.Callback, VideoRenderer.Callbacks {
|
||||
private static final String TAG = "SurfaceViewRenderer";
|
||||
|
||||
// Dedicated render thread.
|
||||
@ -103,13 +103,15 @@ public class SurfaceViewRenderer extends SurfaceView
|
||||
|
||||
// Runnable for posting frames to render thread.
|
||||
private final Runnable renderFrameRunnable = new Runnable() {
|
||||
@Override public void run() {
|
||||
@Override
|
||||
public void run() {
|
||||
renderFrameOnRenderThread();
|
||||
}
|
||||
};
|
||||
// Runnable for clearing Surface to black.
|
||||
private final Runnable makeBlackRunnable = new Runnable() {
|
||||
@Override public void run() {
|
||||
@Override
|
||||
public void run() {
|
||||
makeBlack();
|
||||
}
|
||||
};
|
||||
@ -134,8 +136,7 @@ public class SurfaceViewRenderer extends SurfaceView
|
||||
* Initialize this class, sharing resources with |sharedContext|. It is allowed to call init() to
|
||||
* reinitialize the renderer after a previous init()/release() cycle.
|
||||
*/
|
||||
public void init(
|
||||
EglBase.Context sharedContext, RendererCommon.RendererEvents rendererEvents) {
|
||||
public void init(EglBase.Context sharedContext, RendererCommon.RendererEvents rendererEvents) {
|
||||
init(sharedContext, rendererEvents, EglBase.CONFIG_PLAIN, new GlRectDrawer());
|
||||
}
|
||||
|
||||
@ -145,9 +146,9 @@ public class SurfaceViewRenderer extends SurfaceView
|
||||
* |drawer|. It is allowed to call init() to reinitialize the renderer after a previous
|
||||
* init()/release() cycle.
|
||||
*/
|
||||
public void init(
|
||||
final EglBase.Context sharedContext, RendererCommon.RendererEvents rendererEvents,
|
||||
final int[] configAttributes, RendererCommon.GlDrawer drawer) {
|
||||
public void init(final EglBase.Context sharedContext,
|
||||
RendererCommon.RendererEvents rendererEvents, final int[] configAttributes,
|
||||
RendererCommon.GlDrawer drawer) {
|
||||
synchronized (handlerLock) {
|
||||
if (renderThreadHandler != null) {
|
||||
throw new IllegalStateException(getResourceName() + "Already initialized");
|
||||
@ -210,7 +211,8 @@ public class SurfaceViewRenderer extends SurfaceView
|
||||
// when the EGL context is lost. It might be dangerous to delete them manually in
|
||||
// Activity.onDestroy().
|
||||
renderThreadHandler.postAtFrontOfQueue(new Runnable() {
|
||||
@Override public void run() {
|
||||
@Override
|
||||
public void run() {
|
||||
drawer.release();
|
||||
drawer = null;
|
||||
if (yuvTextures != null) {
|
||||
@ -289,8 +291,7 @@ public class SurfaceViewRenderer extends SurfaceView
|
||||
}
|
||||
synchronized (handlerLock) {
|
||||
if (renderThreadHandler == null) {
|
||||
Logging.d(TAG, getResourceName()
|
||||
+ "Dropping frame - Not initialized or already released.");
|
||||
Logging.d(TAG, getResourceName() + "Dropping frame - Not initialized or already released.");
|
||||
VideoRenderer.renderFrameDone(frame);
|
||||
return;
|
||||
}
|
||||
@ -335,8 +336,8 @@ public class SurfaceViewRenderer extends SurfaceView
|
||||
return;
|
||||
}
|
||||
desiredLayoutSize = getDesiredLayoutSize(widthSpec, heightSpec);
|
||||
isNewSize = (desiredLayoutSize.x != getMeasuredWidth()
|
||||
|| desiredLayoutSize.y != getMeasuredHeight());
|
||||
isNewSize =
|
||||
(desiredLayoutSize.x != getMeasuredWidth() || desiredLayoutSize.y != getMeasuredHeight());
|
||||
setMeasuredDimension(desiredLayoutSize.x, desiredLayoutSize.y);
|
||||
}
|
||||
if (isNewSize) {
|
||||
@ -504,11 +505,11 @@ public class SurfaceViewRenderer extends SurfaceView
|
||||
}
|
||||
yuvUploader.uploadYuvData(
|
||||
yuvTextures, frame.width, frame.height, frame.yuvStrides, frame.yuvPlanes);
|
||||
drawer.drawYuv(yuvTextures, texMatrix, frame.rotatedWidth(), frame.rotatedHeight(),
|
||||
0, 0, surfaceSize.x, surfaceSize.y);
|
||||
drawer.drawYuv(yuvTextures, texMatrix, frame.rotatedWidth(), frame.rotatedHeight(), 0, 0,
|
||||
surfaceSize.x, surfaceSize.y);
|
||||
} else {
|
||||
drawer.drawOes(frame.textureId, texMatrix, frame.rotatedWidth(), frame.rotatedHeight(),
|
||||
0, 0, surfaceSize.x, surfaceSize.y);
|
||||
drawer.drawOes(frame.textureId, texMatrix, frame.rotatedWidth(), frame.rotatedHeight(), 0, 0,
|
||||
surfaceSize.x, surfaceSize.y);
|
||||
}
|
||||
|
||||
eglBase.swapBuffers();
|
||||
@ -547,8 +548,8 @@ public class SurfaceViewRenderer extends SurfaceView
|
||||
synchronized (layoutLock) {
|
||||
if (frameWidth != frame.width || frameHeight != frame.height
|
||||
|| frameRotation != frame.rotationDegree) {
|
||||
Logging.d(TAG, getResourceName() + "Reporting frame resolution changed to "
|
||||
+ frame.width + "x" + frame.height + " with rotation " + frame.rotationDegree);
|
||||
Logging.d(TAG, getResourceName() + "Reporting frame resolution changed to " + frame.width
|
||||
+ "x" + frame.height + " with rotation " + frame.rotationDegree);
|
||||
if (rendererEvents != null) {
|
||||
rendererEvents.onFrameResolutionChanged(frame.width, frame.height, frame.rotationDegree);
|
||||
}
|
||||
@ -556,7 +557,8 @@ public class SurfaceViewRenderer extends SurfaceView
|
||||
frameHeight = frame.height;
|
||||
frameRotation = frame.rotationDegree;
|
||||
post(new Runnable() {
|
||||
@Override public void run() {
|
||||
@Override
|
||||
public void run() {
|
||||
requestLayout();
|
||||
}
|
||||
});
|
||||
@ -566,12 +568,12 @@ public class SurfaceViewRenderer extends SurfaceView
|
||||
|
||||
private void logStatistics() {
|
||||
synchronized (statisticsLock) {
|
||||
Logging.d(TAG, getResourceName() + "Frames received: "
|
||||
+ framesReceived + ". Dropped: " + framesDropped + ". Rendered: " + framesRendered);
|
||||
Logging.d(TAG, getResourceName() + "Frames received: " + framesReceived + ". Dropped: "
|
||||
+ framesDropped + ". Rendered: " + framesRendered);
|
||||
if (framesReceived > 0 && framesRendered > 0) {
|
||||
final long timeSinceFirstFrameNs = System.nanoTime() - firstFrameTimeNs;
|
||||
Logging.d(TAG, getResourceName() + "Duration: " + (int) (timeSinceFirstFrameNs / 1e6) +
|
||||
" ms. FPS: " + framesRendered * 1e9 / timeSinceFirstFrameNs);
|
||||
Logging.d(TAG, getResourceName() + "Duration: " + (int) (timeSinceFirstFrameNs / 1e6)
|
||||
+ " ms. FPS: " + framesRendered * 1e9 / timeSinceFirstFrameNs);
|
||||
Logging.d(TAG, getResourceName() + "Average render time: "
|
||||
+ (int) (renderTimeNs / (1000 * framesRendered)) + " us.");
|
||||
}
|
||||
|
||||
@ -24,14 +24,13 @@ public interface VideoCapturer {
|
||||
void onCapturerStopped();
|
||||
|
||||
// Delivers a captured frame. Called on a Java thread owned by VideoCapturer.
|
||||
void onByteBufferFrameCaptured(byte[] data, int width, int height, int rotation,
|
||||
long timeStamp);
|
||||
void onByteBufferFrameCaptured(
|
||||
byte[] data, int width, int height, int rotation, long timeStamp);
|
||||
|
||||
// Delivers a captured frame in a texture with id |oesTextureId|. Called on a Java thread
|
||||
// owned by VideoCapturer.
|
||||
void onTextureFrameCaptured(
|
||||
int width, int height, int oesTextureId, float[] transformMatrix, int rotation,
|
||||
long timestamp);
|
||||
void onTextureFrameCaptured(int width, int height, int oesTextureId, float[] transformMatrix,
|
||||
int rotation, long timestamp);
|
||||
}
|
||||
|
||||
// An implementation of CapturerObserver that forwards all calls from
|
||||
@ -55,25 +54,23 @@ public interface VideoCapturer {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onByteBufferFrameCaptured(byte[] data, int width, int height,
|
||||
int rotation, long timeStamp) {
|
||||
nativeOnByteBufferFrameCaptured(nativeSource, data, data.length, width, height, rotation,
|
||||
timeStamp);
|
||||
public void onByteBufferFrameCaptured(
|
||||
byte[] data, int width, int height, int rotation, long timeStamp) {
|
||||
nativeOnByteBufferFrameCaptured(
|
||||
nativeSource, data, data.length, width, height, rotation, timeStamp);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onTextureFrameCaptured(
|
||||
int width, int height, int oesTextureId, float[] transformMatrix, int rotation,
|
||||
long timestamp) {
|
||||
nativeOnTextureFrameCaptured(nativeSource, width, height, oesTextureId, transformMatrix,
|
||||
rotation, timestamp);
|
||||
public void onTextureFrameCaptured(int width, int height, int oesTextureId,
|
||||
float[] transformMatrix, int rotation, long timestamp) {
|
||||
nativeOnTextureFrameCaptured(
|
||||
nativeSource, width, height, oesTextureId, transformMatrix, rotation, timestamp);
|
||||
}
|
||||
|
||||
private native void nativeCapturerStarted(long nativeSource,
|
||||
boolean success);
|
||||
private native void nativeCapturerStarted(long nativeSource, boolean success);
|
||||
private native void nativeCapturerStopped(long nativeSource);
|
||||
private native void nativeOnByteBufferFrameCaptured(long nativeSource,
|
||||
byte[] data, int length, int width, int height, int rotation, long timeStamp);
|
||||
private native void nativeOnByteBufferFrameCaptured(long nativeSource, byte[] data, int length,
|
||||
int width, int height, int rotation, long timeStamp);
|
||||
private native void nativeOnTextureFrameCaptured(long nativeSource, int width, int height,
|
||||
int oesTextureId, float[] transformMatrix, int rotation, long timestamp);
|
||||
}
|
||||
|
||||
@ -40,9 +40,8 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
||||
// the camera has been stopped.
|
||||
// TODO(magjed): This class name is now confusing - rename to Camera1VideoCapturer.
|
||||
@SuppressWarnings("deprecation")
|
||||
public class VideoCapturerAndroid implements
|
||||
CameraVideoCapturer,
|
||||
android.hardware.Camera.PreviewCallback,
|
||||
public class VideoCapturerAndroid
|
||||
implements CameraVideoCapturer, android.hardware.Camera.PreviewCallback,
|
||||
SurfaceTextureHelper.OnTextureFrameAvailableListener {
|
||||
private static final String TAG = "VideoCapturerAndroid";
|
||||
private static final int CAMERA_STOP_TIMEOUT_MS = 7000;
|
||||
@ -98,15 +97,14 @@ public class VideoCapturerAndroid implements
|
||||
}
|
||||
};
|
||||
|
||||
public static VideoCapturerAndroid create(String name,
|
||||
CameraEventsHandler eventsHandler) {
|
||||
public static VideoCapturerAndroid create(String name, CameraEventsHandler eventsHandler) {
|
||||
return VideoCapturerAndroid.create(name, eventsHandler, false /* captureToTexture */);
|
||||
}
|
||||
|
||||
// Use ctor directly instead.
|
||||
@Deprecated
|
||||
public static VideoCapturerAndroid create(String name,
|
||||
CameraEventsHandler eventsHandler, boolean captureToTexture) {
|
||||
public static VideoCapturerAndroid create(
|
||||
String name, CameraEventsHandler eventsHandler, boolean captureToTexture) {
|
||||
try {
|
||||
return new VideoCapturerAndroid(name, eventsHandler, captureToTexture);
|
||||
} catch (RuntimeException e) {
|
||||
@ -176,7 +174,8 @@ public class VideoCapturerAndroid implements
|
||||
@Override
|
||||
public void changeCaptureFormat(final int width, final int height, final int framerate) {
|
||||
maybePostOnCameraThread(new Runnable() {
|
||||
@Override public void run() {
|
||||
@Override
|
||||
public void run() {
|
||||
startPreviewOnCameraThread(width, height, framerate);
|
||||
}
|
||||
});
|
||||
@ -195,8 +194,8 @@ public class VideoCapturerAndroid implements
|
||||
return isCapturingToTexture;
|
||||
}
|
||||
|
||||
public VideoCapturerAndroid(String cameraName, CameraEventsHandler eventsHandler,
|
||||
boolean captureToTexture) {
|
||||
public VideoCapturerAndroid(
|
||||
String cameraName, CameraEventsHandler eventsHandler, boolean captureToTexture) {
|
||||
if (android.hardware.Camera.getNumberOfCameras() == 0) {
|
||||
throw new RuntimeException("No cameras available");
|
||||
}
|
||||
@ -332,8 +331,8 @@ public class VideoCapturerAndroid implements
|
||||
|
||||
camera.setPreviewTexture(surfaceHelper.getSurfaceTexture());
|
||||
|
||||
Logging.d(TAG, "Camera orientation: " + info.orientation +
|
||||
" .Device orientation: " + getDeviceOrientation());
|
||||
Logging.d(TAG, "Camera orientation: " + info.orientation + " .Device orientation: "
|
||||
+ getDeviceOrientation());
|
||||
camera.setErrorCallback(cameraErrorCallback);
|
||||
startPreviewOnCameraThread(width, height, framerate);
|
||||
frameObserver.onCapturerStarted(true);
|
||||
@ -343,7 +342,7 @@ public class VideoCapturerAndroid implements
|
||||
|
||||
// Start camera observer.
|
||||
cameraStatistics = new CameraStatistics(surfaceHelper, eventsHandler);
|
||||
} catch (IOException|RuntimeException e) {
|
||||
} catch (IOException | RuntimeException e) {
|
||||
Logging.e(TAG, "startCapture failed", e);
|
||||
// Make sure the camera is released.
|
||||
stopCaptureOnCameraThread(true /* stopHandler */);
|
||||
@ -392,8 +391,7 @@ public class VideoCapturerAndroid implements
|
||||
}
|
||||
|
||||
// Update camera parameters.
|
||||
Logging.d(TAG, "isVideoStabilizationSupported: " +
|
||||
parameters.isVideoStabilizationSupported());
|
||||
Logging.d(TAG, "isVideoStabilizationSupported: " + parameters.isVideoStabilizationSupported());
|
||||
if (parameters.isVideoStabilizationSupported()) {
|
||||
parameters.setVideoStabilization(true);
|
||||
}
|
||||
@ -453,7 +451,8 @@ public class VideoCapturerAndroid implements
|
||||
Logging.d(TAG, "stopCapture");
|
||||
final CountDownLatch barrier = new CountDownLatch(1);
|
||||
final boolean didPost = maybePostOnCameraThread(new Runnable() {
|
||||
@Override public void run() {
|
||||
@Override
|
||||
public void run() {
|
||||
stopCaptureOnCameraThread(true /* stopHandler */);
|
||||
barrier.countDown();
|
||||
}
|
||||
@ -535,9 +534,8 @@ public class VideoCapturerAndroid implements
|
||||
private int getDeviceOrientation() {
|
||||
int orientation = 0;
|
||||
|
||||
WindowManager wm = (WindowManager) applicationContext.getSystemService(
|
||||
Context.WINDOW_SERVICE);
|
||||
switch(wm.getDefaultDisplay().getRotation()) {
|
||||
WindowManager wm = (WindowManager) applicationContext.getSystemService(Context.WINDOW_SERVICE);
|
||||
switch (wm.getDefaultDisplay().getRotation()) {
|
||||
case Surface.ROTATION_90:
|
||||
orientation = 90;
|
||||
break;
|
||||
@ -579,8 +577,7 @@ public class VideoCapturerAndroid implements
|
||||
throw new RuntimeException("Unexpected camera in callback!");
|
||||
}
|
||||
|
||||
final long captureTimeNs =
|
||||
TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
|
||||
final long captureTimeNs = TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
|
||||
|
||||
if (eventsHandler != null && !firstFrameReported) {
|
||||
eventsHandler.onFirstFrameAvailable();
|
||||
@ -588,14 +585,13 @@ public class VideoCapturerAndroid implements
|
||||
}
|
||||
|
||||
cameraStatistics.addFrame();
|
||||
frameObserver.onByteBufferFrameCaptured(data, captureFormat.width, captureFormat.height,
|
||||
getFrameOrientation(), captureTimeNs);
|
||||
frameObserver.onByteBufferFrameCaptured(
|
||||
data, captureFormat.width, captureFormat.height, getFrameOrientation(), captureTimeNs);
|
||||
camera.addCallbackBuffer(data);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onTextureFrameAvailable(
|
||||
int oesTextureId, float[] transformMatrix, long timestampNs) {
|
||||
public void onTextureFrameAvailable(int oesTextureId, float[] transformMatrix, long timestampNs) {
|
||||
checkIsOnCameraThread();
|
||||
if (!isCameraRunning.get()) {
|
||||
Logging.e(TAG, "onTextureFrameAvailable: Camera is stopped");
|
||||
|
||||
@ -61,11 +61,13 @@ public class VideoRenderer {
|
||||
// top-left corner of the image, but in glTexImage2D() the first element corresponds to the
|
||||
// bottom-left corner. This discrepancy is corrected by setting a vertical flip as sampling
|
||||
// matrix.
|
||||
// clang-format off
|
||||
samplingMatrix = new float[] {
|
||||
1, 0, 0, 0,
|
||||
0, -1, 0, 0,
|
||||
0, 0, 1, 0,
|
||||
0, 1, 0, 1};
|
||||
// clang-format on
|
||||
}
|
||||
|
||||
/**
|
||||
@ -97,14 +99,13 @@ public class VideoRenderer {
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return width + "x" + height + ":" + yuvStrides[0] + ":" + yuvStrides[1] +
|
||||
":" + yuvStrides[2];
|
||||
return width + "x" + height + ":" + yuvStrides[0] + ":" + yuvStrides[1] + ":" + yuvStrides[2];
|
||||
}
|
||||
}
|
||||
|
||||
// Helper native function to do a video frame plane copying.
|
||||
public static native void nativeCopyPlane(ByteBuffer src, int width,
|
||||
int height, int srcStride, ByteBuffer dst, int dstStride);
|
||||
public static native void nativeCopyPlane(
|
||||
ByteBuffer src, int width, int height, int srcStride, ByteBuffer dst, int dstStride);
|
||||
|
||||
/** The real meat of VideoSinkInterface. */
|
||||
public static interface Callbacks {
|
||||
|
||||
@ -78,7 +78,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
||||
// TODO(magjed): Delete GL resources in release(). Must be synchronized with draw(). We are
|
||||
// currently leaking resources to avoid a rare crash in release() where the EGLContext has
|
||||
// become invalid beforehand.
|
||||
private int[] yuvTextures = { 0, 0, 0 };
|
||||
private int[] yuvTextures = {0, 0, 0};
|
||||
private final RendererCommon.YuvUploader yuvUploader = new RendererCommon.YuvUploader();
|
||||
private final RendererCommon.GlDrawer drawer;
|
||||
// Resources for making a deep copy of incoming OES texture frame.
|
||||
@ -90,7 +90,8 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
||||
private I420Frame pendingFrame;
|
||||
private final Object pendingFrameLock = new Object();
|
||||
// Type of video frame used for recent frame rendering.
|
||||
private static enum RendererType { RENDERER_YUV, RENDERER_TEXTURE };
|
||||
private static enum RendererType { RENDERER_YUV, RENDERER_TEXTURE }
|
||||
|
||||
private RendererType rendererType;
|
||||
private RendererCommon.ScalingType scalingType;
|
||||
private boolean mirror;
|
||||
@ -136,9 +137,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
||||
// it rendered up right.
|
||||
private int rotationDegree;
|
||||
|
||||
private YuvImageRenderer(
|
||||
GLSurfaceView surface, int id,
|
||||
int x, int y, int width, int height,
|
||||
private YuvImageRenderer(GLSurfaceView surface, int id, int x, int y, int width, int height,
|
||||
RendererCommon.ScalingType scalingType, boolean mirror, RendererCommon.GlDrawer drawer) {
|
||||
Logging.d(TAG, "YuvImageRenderer.Create id: " + id);
|
||||
this.surface = surface;
|
||||
@ -167,8 +166,8 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
||||
}
|
||||
|
||||
private void createTextures() {
|
||||
Logging.d(TAG, " YuvImageRenderer.createTextures " + id + " on GL thread:" +
|
||||
Thread.currentThread().getId());
|
||||
Logging.d(TAG, " YuvImageRenderer.createTextures " + id + " on GL thread:"
|
||||
+ Thread.currentThread().getId());
|
||||
|
||||
// Generate 3 texture ids for Y/U/V and place them into |yuvTextures|.
|
||||
for (int i = 0; i < 3; i++) {
|
||||
@ -179,14 +178,13 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
||||
}
|
||||
|
||||
private void updateLayoutMatrix() {
|
||||
synchronized(updateLayoutLock) {
|
||||
synchronized (updateLayoutLock) {
|
||||
if (!updateLayoutProperties) {
|
||||
return;
|
||||
}
|
||||
// Initialize to maximum allowed area. Round to integer coordinates inwards the layout
|
||||
// bounding box (ceil left/top and floor right/bottom) to not break constraints.
|
||||
displayLayout.set(
|
||||
(screenWidth * layoutInPercentage.left + 99) / 100,
|
||||
displayLayout.set((screenWidth * layoutInPercentage.left + 99) / 100,
|
||||
(screenHeight * layoutInPercentage.top + 99) / 100,
|
||||
(screenWidth * layoutInPercentage.right) / 100,
|
||||
(screenHeight * layoutInPercentage.bottom) / 100);
|
||||
@ -197,12 +195,12 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
||||
? (float) videoWidth / videoHeight
|
||||
: (float) videoHeight / videoWidth;
|
||||
// Adjust display size based on |scalingType|.
|
||||
final Point displaySize = RendererCommon.getDisplaySize(scalingType,
|
||||
videoAspectRatio, displayLayout.width(), displayLayout.height());
|
||||
final Point displaySize = RendererCommon.getDisplaySize(
|
||||
scalingType, videoAspectRatio, displayLayout.width(), displayLayout.height());
|
||||
displayLayout.inset((displayLayout.width() - displaySize.x) / 2,
|
||||
(displayLayout.height() - displaySize.y) / 2);
|
||||
Logging.d(TAG, " Adjusted display size: " + displayLayout.width() + " x "
|
||||
+ displayLayout.height());
|
||||
Logging.d(TAG,
|
||||
" Adjusted display size: " + displayLayout.width() + " x " + displayLayout.height());
|
||||
layoutMatrix = RendererCommon.getLayoutMatrix(
|
||||
mirror, videoAspectRatio, (float) displayLayout.width() / displayLayout.height());
|
||||
updateLayoutProperties = false;
|
||||
@ -242,9 +240,8 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
||||
GlUtil.checkNoGLES2Error("glBindFramebuffer");
|
||||
|
||||
// Copy the OES texture content. This will also normalize the sampling matrix.
|
||||
drawer.drawOes(pendingFrame.textureId, rotatedSamplingMatrix,
|
||||
textureCopy.getWidth(), textureCopy.getHeight(),
|
||||
0, 0, textureCopy.getWidth(), textureCopy.getHeight());
|
||||
drawer.drawOes(pendingFrame.textureId, rotatedSamplingMatrix, textureCopy.getWidth(),
|
||||
textureCopy.getHeight(), 0, 0, textureCopy.getWidth(), textureCopy.getHeight());
|
||||
rotatedSamplingMatrix = RendererCommon.identityMatrix();
|
||||
|
||||
// Restore normal framebuffer.
|
||||
@ -263,8 +260,8 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
||||
// OpenGL defaults to lower left origin - flip viewport position vertically.
|
||||
final int viewportY = screenHeight - displayLayout.bottom;
|
||||
if (rendererType == RendererType.RENDERER_YUV) {
|
||||
drawer.drawYuv(yuvTextures, texMatrix, videoWidth, videoHeight,
|
||||
displayLayout.left, viewportY, displayLayout.width(), displayLayout.height());
|
||||
drawer.drawYuv(yuvTextures, texMatrix, videoWidth, videoHeight, displayLayout.left,
|
||||
viewportY, displayLayout.width(), displayLayout.height());
|
||||
} else {
|
||||
drawer.drawRgb(textureCopy.getTextureId(), texMatrix, videoWidth, videoHeight,
|
||||
displayLayout.left, viewportY, displayLayout.width(), displayLayout.height());
|
||||
@ -281,25 +278,23 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
||||
|
||||
private void logStatistics() {
|
||||
long timeSinceFirstFrameNs = System.nanoTime() - startTimeNs;
|
||||
Logging.d(TAG, "ID: " + id + ". Type: " + rendererType +
|
||||
". Frames received: " + framesReceived +
|
||||
". Dropped: " + framesDropped + ". Rendered: " + framesRendered);
|
||||
Logging.d(TAG, "ID: " + id + ". Type: " + rendererType + ". Frames received: "
|
||||
+ framesReceived + ". Dropped: " + framesDropped + ". Rendered: " + framesRendered);
|
||||
if (framesReceived > 0 && framesRendered > 0) {
|
||||
Logging.d(TAG, "Duration: " + (int)(timeSinceFirstFrameNs / 1e6) +
|
||||
" ms. FPS: " + framesRendered * 1e9 / timeSinceFirstFrameNs);
|
||||
Logging.d(TAG, "Draw time: " +
|
||||
(int) (drawTimeNs / (1000 * framesRendered)) + " us. Copy time: " +
|
||||
(int) (copyTimeNs / (1000 * framesReceived)) + " us");
|
||||
Logging.d(TAG, "Duration: " + (int) (timeSinceFirstFrameNs / 1e6) + " ms. FPS: "
|
||||
+ framesRendered * 1e9 / timeSinceFirstFrameNs);
|
||||
Logging.d(TAG, "Draw time: " + (int) (drawTimeNs / (1000 * framesRendered))
|
||||
+ " us. Copy time: " + (int) (copyTimeNs / (1000 * framesReceived)) + " us");
|
||||
}
|
||||
}
|
||||
|
||||
public void setScreenSize(final int screenWidth, final int screenHeight) {
|
||||
synchronized(updateLayoutLock) {
|
||||
synchronized (updateLayoutLock) {
|
||||
if (screenWidth == this.screenWidth && screenHeight == this.screenHeight) {
|
||||
return;
|
||||
}
|
||||
Logging.d(TAG, "ID: " + id + ". YuvImageRenderer.setScreenSize: " +
|
||||
screenWidth + " x " + screenHeight);
|
||||
Logging.d(TAG, "ID: " + id + ". YuvImageRenderer.setScreenSize: " + screenWidth + " x "
|
||||
+ screenHeight);
|
||||
this.screenWidth = screenWidth;
|
||||
this.screenHeight = screenHeight;
|
||||
updateLayoutProperties = true;
|
||||
@ -310,14 +305,13 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
||||
RendererCommon.ScalingType scalingType, boolean mirror) {
|
||||
final Rect layoutInPercentage =
|
||||
new Rect(x, y, Math.min(100, x + width), Math.min(100, y + height));
|
||||
synchronized(updateLayoutLock) {
|
||||
synchronized (updateLayoutLock) {
|
||||
if (layoutInPercentage.equals(this.layoutInPercentage) && scalingType == this.scalingType
|
||||
&& mirror == this.mirror) {
|
||||
return;
|
||||
}
|
||||
Logging.d(TAG, "ID: " + id + ". YuvImageRenderer.setPosition: (" + x + ", " + y +
|
||||
") " + width + " x " + height + ". Scaling: " + scalingType +
|
||||
". Mirror: " + mirror);
|
||||
Logging.d(TAG, "ID: " + id + ". YuvImageRenderer.setPosition: (" + x + ", " + y + ") "
|
||||
+ width + " x " + height + ". Scaling: " + scalingType + ". Mirror: " + mirror);
|
||||
this.layoutInPercentage.set(layoutInPercentage);
|
||||
this.scalingType = scalingType;
|
||||
this.mirror = mirror;
|
||||
@ -331,14 +325,14 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
||||
return;
|
||||
}
|
||||
if (rendererEvents != null) {
|
||||
Logging.d(TAG, "ID: " + id +
|
||||
". Reporting frame resolution changed to " + videoWidth + " x " + videoHeight);
|
||||
Logging.d(TAG, "ID: " + id + ". Reporting frame resolution changed to " + videoWidth + " x "
|
||||
+ videoHeight);
|
||||
rendererEvents.onFrameResolutionChanged(videoWidth, videoHeight, rotation);
|
||||
}
|
||||
|
||||
synchronized (updateLayoutLock) {
|
||||
Logging.d(TAG, "ID: " + id + ". YuvImageRenderer.setSize: " +
|
||||
videoWidth + " x " + videoHeight + " rotation " + rotation);
|
||||
Logging.d(TAG, "ID: " + id + ". YuvImageRenderer.setSize: " + videoWidth + " x "
|
||||
+ videoHeight + " rotation " + rotation);
|
||||
|
||||
this.videoWidth = videoWidth;
|
||||
this.videoHeight = videoHeight;
|
||||
@ -366,11 +360,10 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
||||
synchronized (pendingFrameLock) {
|
||||
// Check input frame parameters.
|
||||
if (frame.yuvFrame) {
|
||||
if (frame.yuvStrides[0] < frame.width ||
|
||||
frame.yuvStrides[1] < frame.width / 2 ||
|
||||
frame.yuvStrides[2] < frame.width / 2) {
|
||||
Logging.e(TAG, "Incorrect strides " + frame.yuvStrides[0] + ", " +
|
||||
frame.yuvStrides[1] + ", " + frame.yuvStrides[2]);
|
||||
if (frame.yuvStrides[0] < frame.width || frame.yuvStrides[1] < frame.width / 2
|
||||
|| frame.yuvStrides[2] < frame.width / 2) {
|
||||
Logging.e(TAG, "Incorrect strides " + frame.yuvStrides[0] + ", " + frame.yuvStrides[1]
|
||||
+ ", " + frame.yuvStrides[2]);
|
||||
VideoRenderer.renderFrameDone(frame);
|
||||
return;
|
||||
}
|
||||
@ -394,8 +387,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
||||
}
|
||||
|
||||
/** Passes GLSurfaceView to video renderer. */
|
||||
public static synchronized void setView(GLSurfaceView surface,
|
||||
Runnable eglContextReadyCallback) {
|
||||
public static synchronized void setView(GLSurfaceView surface, Runnable eglContextReadyCallback) {
|
||||
Logging.d(TAG, "VideoRendererGui.setView");
|
||||
instance = new VideoRendererGui(surface);
|
||||
eglContextReady = eglContextReadyCallback;
|
||||
@ -407,7 +399,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
||||
|
||||
/** Releases GLSurfaceView video renderer. */
|
||||
public static synchronized void dispose() {
|
||||
if (instance == null){
|
||||
if (instance == null) {
|
||||
return;
|
||||
}
|
||||
Logging.d(TAG, "VideoRendererGui.dispose");
|
||||
@ -431,14 +423,12 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
||||
*/
|
||||
public static VideoRenderer createGui(int x, int y, int width, int height,
|
||||
RendererCommon.ScalingType scalingType, boolean mirror) throws Exception {
|
||||
YuvImageRenderer javaGuiRenderer = create(
|
||||
x, y, width, height, scalingType, mirror);
|
||||
YuvImageRenderer javaGuiRenderer = create(x, y, width, height, scalingType, mirror);
|
||||
return new VideoRenderer(javaGuiRenderer);
|
||||
}
|
||||
|
||||
public static VideoRenderer.Callbacks createGuiRenderer(
|
||||
int x, int y, int width, int height,
|
||||
RendererCommon.ScalingType scalingType, boolean mirror) {
|
||||
int x, int y, int width, int height, RendererCommon.ScalingType scalingType, boolean mirror) {
|
||||
return create(x, y, width, height, scalingType, mirror);
|
||||
}
|
||||
|
||||
@ -447,8 +437,8 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
||||
* resolution (width, height). All parameters are in percentage of
|
||||
* screen resolution.
|
||||
*/
|
||||
public static synchronized YuvImageRenderer create(int x, int y, int width, int height,
|
||||
RendererCommon.ScalingType scalingType, boolean mirror) {
|
||||
public static synchronized YuvImageRenderer create(
|
||||
int x, int y, int width, int height, RendererCommon.ScalingType scalingType, boolean mirror) {
|
||||
return create(x, y, width, height, scalingType, mirror, new GlRectDrawer());
|
||||
}
|
||||
|
||||
@ -460,19 +450,16 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
||||
public static synchronized YuvImageRenderer create(int x, int y, int width, int height,
|
||||
RendererCommon.ScalingType scalingType, boolean mirror, RendererCommon.GlDrawer drawer) {
|
||||
// Check display region parameters.
|
||||
if (x < 0 || x > 100 || y < 0 || y > 100 ||
|
||||
width < 0 || width > 100 || height < 0 || height > 100 ||
|
||||
x + width > 100 || y + height > 100) {
|
||||
if (x < 0 || x > 100 || y < 0 || y > 100 || width < 0 || width > 100 || height < 0
|
||||
|| height > 100 || x + width > 100 || y + height > 100) {
|
||||
throw new RuntimeException("Incorrect window parameters.");
|
||||
}
|
||||
|
||||
if (instance == null) {
|
||||
throw new RuntimeException(
|
||||
"Attempt to create yuv renderer before setting GLSurfaceView");
|
||||
throw new RuntimeException("Attempt to create yuv renderer before setting GLSurfaceView");
|
||||
}
|
||||
final YuvImageRenderer yuvImageRenderer = new YuvImageRenderer(
|
||||
instance.surface, instance.yuvImageRenderers.size(),
|
||||
x, y, width, height, scalingType, mirror, drawer);
|
||||
final YuvImageRenderer yuvImageRenderer = new YuvImageRenderer(instance.surface,
|
||||
instance.yuvImageRenderers.size(), x, y, width, height, scalingType, mirror, drawer);
|
||||
synchronized (instance.yuvImageRenderers) {
|
||||
if (instance.onSurfaceCreatedCalled) {
|
||||
// onSurfaceCreated has already been called for VideoRendererGui -
|
||||
@ -483,8 +470,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
||||
@Override
|
||||
public void run() {
|
||||
yuvImageRenderer.createTextures();
|
||||
yuvImageRenderer.setScreenSize(
|
||||
instance.screenWidth, instance.screenHeight);
|
||||
yuvImageRenderer.setScreenSize(instance.screenWidth, instance.screenHeight);
|
||||
countDownLatch.countDown();
|
||||
}
|
||||
});
|
||||
@ -501,13 +487,11 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
||||
return yuvImageRenderer;
|
||||
}
|
||||
|
||||
public static synchronized void update(
|
||||
VideoRenderer.Callbacks renderer, int x, int y, int width, int height,
|
||||
RendererCommon.ScalingType scalingType, boolean mirror) {
|
||||
public static synchronized void update(VideoRenderer.Callbacks renderer, int x, int y, int width,
|
||||
int height, RendererCommon.ScalingType scalingType, boolean mirror) {
|
||||
Logging.d(TAG, "VideoRendererGui.update");
|
||||
if (instance == null) {
|
||||
throw new RuntimeException(
|
||||
"Attempt to update yuv renderer before setting GLSurfaceView");
|
||||
throw new RuntimeException("Attempt to update yuv renderer before setting GLSurfaceView");
|
||||
}
|
||||
synchronized (instance.yuvImageRenderers) {
|
||||
for (YuvImageRenderer yuvImageRenderer : instance.yuvImageRenderers) {
|
||||
@ -522,8 +506,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
||||
VideoRenderer.Callbacks renderer, RendererCommon.RendererEvents rendererEvents) {
|
||||
Logging.d(TAG, "VideoRendererGui.setRendererEvents");
|
||||
if (instance == null) {
|
||||
throw new RuntimeException(
|
||||
"Attempt to set renderer events before setting GLSurfaceView");
|
||||
throw new RuntimeException("Attempt to set renderer events before setting GLSurfaceView");
|
||||
}
|
||||
synchronized (instance.yuvImageRenderers) {
|
||||
for (YuvImageRenderer yuvImageRenderer : instance.yuvImageRenderers) {
|
||||
@ -537,8 +520,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
||||
public static synchronized void remove(VideoRenderer.Callbacks renderer) {
|
||||
Logging.d(TAG, "VideoRendererGui.remove");
|
||||
if (instance == null) {
|
||||
throw new RuntimeException(
|
||||
"Attempt to remove renderer before setting GLSurfaceView");
|
||||
throw new RuntimeException("Attempt to remove renderer before setting GLSurfaceView");
|
||||
}
|
||||
synchronized (instance.yuvImageRenderers) {
|
||||
final int index = instance.yuvImageRenderers.indexOf(renderer);
|
||||
@ -553,8 +535,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
||||
public static synchronized void reset(VideoRenderer.Callbacks renderer) {
|
||||
Logging.d(TAG, "VideoRendererGui.reset");
|
||||
if (instance == null) {
|
||||
throw new RuntimeException(
|
||||
"Attempt to reset renderer before setting GLSurfaceView");
|
||||
throw new RuntimeException("Attempt to reset renderer before setting GLSurfaceView");
|
||||
}
|
||||
synchronized (instance.yuvImageRenderers) {
|
||||
for (YuvImageRenderer yuvImageRenderer : instance.yuvImageRenderers) {
|
||||
@ -621,8 +602,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
||||
|
||||
@Override
|
||||
public void onSurfaceChanged(GL10 unused, int width, int height) {
|
||||
Logging.d(TAG, "VideoRendererGui.onSurfaceChanged: " +
|
||||
width + " x " + height + " ");
|
||||
Logging.d(TAG, "VideoRendererGui.onSurfaceChanged: " + width + " x " + height + " ");
|
||||
screenWidth = width;
|
||||
screenHeight = height;
|
||||
synchronized (yuvImageRenderers) {
|
||||
@ -645,5 +625,4 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@ -8,7 +8,6 @@
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
/**
|
||||
|
||||
@ -14,8 +14,7 @@ import java.util.LinkedList;
|
||||
|
||||
/** Java version of VideoTrackInterface. */
|
||||
public class VideoTrack extends MediaStreamTrack {
|
||||
private final LinkedList<VideoRenderer> renderers =
|
||||
new LinkedList<VideoRenderer>();
|
||||
private final LinkedList<VideoRenderer> renderers = new LinkedList<VideoRenderer>();
|
||||
|
||||
public VideoTrack(long nativeTrack) {
|
||||
super(nativeTrack);
|
||||
@ -43,9 +42,7 @@ public class VideoTrack extends MediaStreamTrack {
|
||||
|
||||
private static native void free(long nativeTrack);
|
||||
|
||||
private static native void nativeAddRenderer(
|
||||
long nativeTrack, long nativeRenderer);
|
||||
private static native void nativeAddRenderer(long nativeTrack, long nativeRenderer);
|
||||
|
||||
private static native void nativeRemoveRenderer(
|
||||
long nativeTrack, long nativeRenderer);
|
||||
private static native void nativeRemoveRenderer(long nativeTrack, long nativeRenderer);
|
||||
}
|
||||
|
||||
@ -19,8 +19,7 @@ import android.test.suitebuilder.annotation.SmallTest;
|
||||
public class Camera1CapturerUsingByteBufferTest extends InstrumentationTestCase {
|
||||
static final String TAG = "Camera1CapturerUsingByteBufferTest";
|
||||
|
||||
private class TestObjectFactory
|
||||
extends CameraVideoCapturerTestFixtures.TestObjectFactory {
|
||||
private class TestObjectFactory extends CameraVideoCapturerTestFixtures.TestObjectFactory {
|
||||
@Override
|
||||
public boolean isCapturingToTexture() {
|
||||
return false;
|
||||
|
||||
@ -19,8 +19,7 @@ import android.test.suitebuilder.annotation.SmallTest;
|
||||
public class Camera1CapturerUsingTextureTest extends InstrumentationTestCase {
|
||||
static final String TAG = "Camera1CapturerUsingTextureTest";
|
||||
|
||||
private class TestObjectFactory
|
||||
extends CameraVideoCapturerTestFixtures.TestObjectFactory {
|
||||
private class TestObjectFactory extends CameraVideoCapturerTestFixtures.TestObjectFactory {
|
||||
@Override
|
||||
public CameraEnumerator getCameraEnumerator() {
|
||||
return new Camera1Enumerator();
|
||||
|
||||
@ -143,8 +143,7 @@ public class Camera2CapturerTest extends InstrumentationTestCase {
|
||||
}
|
||||
}
|
||||
|
||||
private class TestObjectFactory
|
||||
extends CameraVideoCapturerTestFixtures.TestObjectFactory {
|
||||
private class TestObjectFactory extends CameraVideoCapturerTestFixtures.TestObjectFactory {
|
||||
@Override
|
||||
public CameraEnumerator getCameraEnumerator() {
|
||||
return new Camera2Enumerator(getAppContext());
|
||||
|
||||
@ -114,8 +114,8 @@ class CameraVideoCapturerTestFixtures {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onByteBufferFrameCaptured(byte[] frame, int width, int height, int rotation,
|
||||
long timeStamp) {
|
||||
public void onByteBufferFrameCaptured(
|
||||
byte[] frame, int width, int height, int rotation, long timeStamp) {
|
||||
synchronized (frameLock) {
|
||||
++framesCaptured;
|
||||
frameSize = frame.length;
|
||||
@ -126,9 +126,8 @@ class CameraVideoCapturerTestFixtures {
|
||||
}
|
||||
}
|
||||
@Override
|
||||
public void onTextureFrameCaptured(
|
||||
int width, int height, int oesTextureId, float[] transformMatrix, int rotation,
|
||||
long timeStamp) {
|
||||
public void onTextureFrameCaptured(int width, int height, int oesTextureId,
|
||||
float[] transformMatrix, int rotation, long timeStamp) {
|
||||
synchronized (frameLock) {
|
||||
++framesCaptured;
|
||||
frameWidth = width;
|
||||
@ -182,8 +181,7 @@ class CameraVideoCapturerTestFixtures {
|
||||
}
|
||||
}
|
||||
|
||||
static class CameraEvents implements
|
||||
CameraVideoCapturer.CameraEventsHandler {
|
||||
static class CameraEvents implements CameraVideoCapturer.CameraEventsHandler {
|
||||
public boolean onCameraOpeningCalled;
|
||||
public boolean onFirstFrameAvailableCalled;
|
||||
public final Object onCameraFreezedLock = new Object();
|
||||
@ -275,8 +273,7 @@ class CameraVideoCapturerTestFixtures {
|
||||
}
|
||||
|
||||
public CameraVideoCapturer createCapturer(
|
||||
String name,
|
||||
CameraVideoCapturer.CameraEventsHandler eventsHandler) {
|
||||
String name, CameraVideoCapturer.CameraEventsHandler eventsHandler) {
|
||||
return cameraEnumerator.createCapturer(name, eventsHandler);
|
||||
}
|
||||
|
||||
@ -374,8 +371,8 @@ class CameraVideoCapturerTestFixtures {
|
||||
instance.surfaceTextureHelper.dispose();
|
||||
}
|
||||
|
||||
private VideoTrackWithRenderer createVideoTrackWithRenderer(CameraVideoCapturer capturer,
|
||||
VideoRenderer.Callbacks rendererCallbacks) {
|
||||
private VideoTrackWithRenderer createVideoTrackWithRenderer(
|
||||
CameraVideoCapturer capturer, VideoRenderer.Callbacks rendererCallbacks) {
|
||||
VideoTrackWithRenderer videoTrackWithRenderer = new VideoTrackWithRenderer();
|
||||
videoTrackWithRenderer.source = peerConnectionFactory.createVideoSource(capturer);
|
||||
capturer.startCapture(DEFAULT_WIDTH, DEFAULT_HEIGHT, DEFAULT_FPS);
|
||||
@ -410,7 +407,8 @@ class CameraVideoCapturerTestFixtures {
|
||||
private void waitUntilIdle(CapturerInstance capturerInstance) throws InterruptedException {
|
||||
final CountDownLatch barrier = new CountDownLatch(1);
|
||||
capturerInstance.surfaceTextureHelper.getHandler().post(new Runnable() {
|
||||
@Override public void run() {
|
||||
@Override
|
||||
public void run() {
|
||||
barrier.countDown();
|
||||
}
|
||||
});
|
||||
@ -461,8 +459,8 @@ class CameraVideoCapturerTestFixtures {
|
||||
|
||||
public void switchCamera() throws InterruptedException {
|
||||
if (!testObjectFactory.haveTwoCameras()) {
|
||||
Logging.w(TAG,
|
||||
"Skipping test switch video capturer because the device doesn't have two cameras.");
|
||||
Logging.w(
|
||||
TAG, "Skipping test switch video capturer because the device doesn't have two cameras.");
|
||||
return;
|
||||
}
|
||||
|
||||
@ -547,23 +545,23 @@ class CameraVideoCapturerTestFixtures {
|
||||
public void startStopWithDifferentResolutions() throws InterruptedException {
|
||||
final CapturerInstance capturerInstance = createCapturer(true /* initialize */);
|
||||
|
||||
for(int i = 0; i < 3 ; ++i) {
|
||||
for (int i = 0; i < 3; ++i) {
|
||||
startCapture(capturerInstance, i);
|
||||
assertTrue(capturerInstance.observer.waitForCapturerToStart());
|
||||
capturerInstance.observer.waitForNextCapturedFrame();
|
||||
|
||||
// Check the frame size. The actual width and height depend on how the capturer is mounted.
|
||||
final boolean identicalResolution = (
|
||||
capturerInstance.observer.frameWidth() == capturerInstance.format.width
|
||||
final boolean identicalResolution =
|
||||
(capturerInstance.observer.frameWidth() == capturerInstance.format.width
|
||||
&& capturerInstance.observer.frameHeight() == capturerInstance.format.height);
|
||||
final boolean flippedResolution = (
|
||||
capturerInstance.observer.frameWidth() == capturerInstance.format.height
|
||||
final boolean flippedResolution =
|
||||
(capturerInstance.observer.frameWidth() == capturerInstance.format.height
|
||||
&& capturerInstance.observer.frameHeight() == capturerInstance.format.width);
|
||||
if (!identicalResolution && !flippedResolution) {
|
||||
fail("Wrong resolution, got: "
|
||||
+ capturerInstance.observer.frameWidth() + "x" + capturerInstance.observer.frameHeight()
|
||||
+ " expected: "+ capturerInstance.format.width + "x" + capturerInstance.format.height
|
||||
+ " or " + capturerInstance.format.height + "x" + capturerInstance.format.width);
|
||||
fail("Wrong resolution, got: " + capturerInstance.observer.frameWidth() + "x"
|
||||
+ capturerInstance.observer.frameHeight() + " expected: "
|
||||
+ capturerInstance.format.width + "x" + capturerInstance.format.height + " or "
|
||||
+ capturerInstance.format.height + "x" + capturerInstance.format.width);
|
||||
}
|
||||
|
||||
if (testObjectFactory.isCapturingToTexture()) {
|
||||
@ -600,8 +598,7 @@ class CameraVideoCapturerTestFixtures {
|
||||
disposeCapturer(capturerInstance);
|
||||
}
|
||||
|
||||
public void returnBufferLateEndToEnd()
|
||||
throws InterruptedException {
|
||||
public void returnBufferLateEndToEnd() throws InterruptedException {
|
||||
final CapturerInstance capturerInstance = createCapturer(false /* initialize */);
|
||||
final VideoTrackWithRenderer videoTrackWithRenderer =
|
||||
createVideoTrackWithFakeAsyncRenderer(capturerInstance.capturer);
|
||||
|
||||
@ -53,7 +53,8 @@ public final class GlRectDrawerTest extends ActivityTestCase {
|
||||
fail("ByteBuffers of size " + width + "x" + height + " not equal at position "
|
||||
+ "(" + x + ", " + y + "). Expected color (R,G,B): "
|
||||
+ "(" + expectedR + ", " + expectedG + ", " + expectedB + ")"
|
||||
+ " but was: " + "(" + actualR + ", " + actualG + ", " + actualB + ").");
|
||||
+ " but was: "
|
||||
+ "(" + actualR + ", " + actualG + ", " + actualB + ").");
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -92,14 +93,14 @@ public final class GlRectDrawerTest extends ActivityTestCase {
|
||||
final int rgbTexture = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
|
||||
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
|
||||
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, rgbTexture);
|
||||
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGB, WIDTH,
|
||||
HEIGHT, 0, GLES20.GL_RGB, GLES20.GL_UNSIGNED_BYTE, rgbPlane);
|
||||
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGB, WIDTH, HEIGHT, 0, GLES20.GL_RGB,
|
||||
GLES20.GL_UNSIGNED_BYTE, rgbPlane);
|
||||
GlUtil.checkNoGLES2Error("glTexImage2D");
|
||||
|
||||
// Draw the RGB frame onto the pixel buffer.
|
||||
final GlRectDrawer drawer = new GlRectDrawer();
|
||||
drawer.drawRgb(rgbTexture, RendererCommon.identityMatrix(), WIDTH, HEIGHT,
|
||||
0 /* viewportX */, 0 /* viewportY */, WIDTH, HEIGHT);
|
||||
drawer.drawRgb(rgbTexture, RendererCommon.identityMatrix(), WIDTH, HEIGHT, 0 /* viewportX */,
|
||||
0 /* viewportY */, WIDTH, HEIGHT);
|
||||
|
||||
// Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9.
|
||||
final ByteBuffer rgbaData = ByteBuffer.allocateDirect(WIDTH * HEIGHT * 4);
|
||||
@ -139,15 +140,15 @@ public final class GlRectDrawerTest extends ActivityTestCase {
|
||||
for (int i = 0; i < 3; ++i) {
|
||||
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
|
||||
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
|
||||
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, WIDTH,
|
||||
HEIGHT, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, yuvPlanes[i]);
|
||||
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, WIDTH, HEIGHT, 0,
|
||||
GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, yuvPlanes[i]);
|
||||
GlUtil.checkNoGLES2Error("glTexImage2D");
|
||||
}
|
||||
|
||||
// Draw the YUV frame onto the pixel buffer.
|
||||
final GlRectDrawer drawer = new GlRectDrawer();
|
||||
drawer.drawYuv(yuvTextures, RendererCommon.identityMatrix(), WIDTH, HEIGHT,
|
||||
0 /* viewportX */, 0 /* viewportY */, WIDTH, HEIGHT);
|
||||
drawer.drawYuv(yuvTextures, RendererCommon.identityMatrix(), WIDTH, HEIGHT, 0 /* viewportX */,
|
||||
0 /* viewportY */, WIDTH, HEIGHT);
|
||||
|
||||
// Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9.
|
||||
final ByteBuffer data = ByteBuffer.allocateDirect(WIDTH * HEIGHT * 4);
|
||||
@ -212,8 +213,7 @@ public final class GlRectDrawerTest extends ActivityTestCase {
|
||||
private final int rgbTexture;
|
||||
|
||||
public StubOesTextureProducer(
|
||||
EglBase.Context sharedContext, SurfaceTexture surfaceTexture, int width,
|
||||
int height) {
|
||||
EglBase.Context sharedContext, SurfaceTexture surfaceTexture, int width, int height) {
|
||||
eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PLAIN);
|
||||
surfaceTexture.setDefaultBufferSize(width, height);
|
||||
eglBase.createSurface(surfaceTexture);
|
||||
@ -232,8 +232,8 @@ public final class GlRectDrawerTest extends ActivityTestCase {
|
||||
// Upload RGB data to texture.
|
||||
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
|
||||
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, rgbTexture);
|
||||
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGB, WIDTH,
|
||||
HEIGHT, 0, GLES20.GL_RGB, GLES20.GL_UNSIGNED_BYTE, rgbPlane);
|
||||
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGB, WIDTH, HEIGHT, 0, GLES20.GL_RGB,
|
||||
GLES20.GL_UNSIGNED_BYTE, rgbPlane);
|
||||
// Draw the RGB data onto the SurfaceTexture.
|
||||
drawer.drawRgb(rgbTexture, RendererCommon.identityMatrix(), WIDTH, HEIGHT,
|
||||
0 /* viewportX */, 0 /* viewportY */, WIDTH, HEIGHT);
|
||||
|
||||
@ -28,8 +28,7 @@ public final class MediaCodecVideoEncoderTest extends ActivityTestCase {
|
||||
@SmallTest
|
||||
public static void testInitializeUsingByteBuffer() {
|
||||
if (!MediaCodecVideoEncoder.isVp8HwSupported()) {
|
||||
Log.i(TAG,
|
||||
"Hardware does not support VP8 encoding, skipping testInitReleaseUsingByteBuffer");
|
||||
Log.i(TAG, "Hardware does not support VP8 encoding, skipping testInitReleaseUsingByteBuffer");
|
||||
return;
|
||||
}
|
||||
MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
|
||||
@ -46,9 +45,8 @@ public final class MediaCodecVideoEncoderTest extends ActivityTestCase {
|
||||
}
|
||||
EglBase14 eglBase = new EglBase14(null, EglBase.CONFIG_PLAIN);
|
||||
MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
|
||||
assertTrue(encoder.initEncode(
|
||||
MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, 640, 480, 300, 30,
|
||||
eglBase.getEglBaseContext()));
|
||||
assertTrue(encoder.initEncode(MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, 640, 480,
|
||||
300, 30, eglBase.getEglBaseContext()));
|
||||
encoder.release();
|
||||
eglBase.release();
|
||||
}
|
||||
@ -61,13 +59,11 @@ public final class MediaCodecVideoEncoderTest extends ActivityTestCase {
|
||||
}
|
||||
MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
|
||||
assertTrue(encoder.initEncode(
|
||||
MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, 640, 480, 300, 30,
|
||||
null));
|
||||
MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, 640, 480, 300, 30, null));
|
||||
encoder.release();
|
||||
EglBase14 eglBase = new EglBase14(null, EglBase.CONFIG_PLAIN);
|
||||
assertTrue(encoder.initEncode(
|
||||
MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, 640, 480, 300, 30,
|
||||
eglBase.getEglBaseContext()));
|
||||
assertTrue(encoder.initEncode(MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, 640, 480,
|
||||
300, 30, eglBase.getEglBaseContext()));
|
||||
encoder.release();
|
||||
eglBase.release();
|
||||
}
|
||||
@ -137,11 +133,10 @@ public final class MediaCodecVideoEncoderTest extends ActivityTestCase {
|
||||
|
||||
MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
|
||||
|
||||
assertTrue(encoder.initEncode(
|
||||
MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, width, height, 300, 30,
|
||||
eglOesBase.getEglBaseContext()));
|
||||
assertTrue(encoder.encodeTexture(true, oesTextureId, RendererCommon.identityMatrix(),
|
||||
presentationTs));
|
||||
assertTrue(encoder.initEncode(MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, width,
|
||||
height, 300, 30, eglOesBase.getEglBaseContext()));
|
||||
assertTrue(
|
||||
encoder.encodeTexture(true, oesTextureId, RendererCommon.identityMatrix(), presentationTs));
|
||||
GlUtil.checkNoGLES2Error("encodeTexture");
|
||||
|
||||
// It should be Ok to delete the texture after calling encodeTexture.
|
||||
|
||||
@ -37,8 +37,7 @@ public class NetworkMonitorTest extends ActivityTestCase {
|
||||
/**
|
||||
* Listens for alerts fired by the NetworkMonitor when network status changes.
|
||||
*/
|
||||
private static class NetworkMonitorTestObserver
|
||||
implements NetworkMonitor.NetworkObserver {
|
||||
private static class NetworkMonitorTestObserver implements NetworkMonitor.NetworkObserver {
|
||||
private boolean receivedNotification = false;
|
||||
|
||||
@Override
|
||||
@ -118,7 +117,6 @@ public class NetworkMonitorTest extends ActivityTestCase {
|
||||
// A dummy NetworkMonitorAutoDetect.Observer.
|
||||
private static class TestNetworkMonitorAutoDetectObserver
|
||||
implements NetworkMonitorAutoDetect.Observer {
|
||||
|
||||
@Override
|
||||
public void onConnectionTypeChanged(ConnectionType newConnectionType) {}
|
||||
|
||||
@ -138,7 +136,7 @@ public class NetworkMonitorTest extends ActivityTestCase {
|
||||
|
||||
private static Handler getUiThreadHandler() {
|
||||
synchronized (lock) {
|
||||
if (uiThreadHandler == null ) {
|
||||
if (uiThreadHandler == null) {
|
||||
uiThreadHandler = new Handler(Looper.getMainLooper());
|
||||
}
|
||||
return uiThreadHandler;
|
||||
@ -165,8 +163,7 @@ public class NetworkMonitorTest extends ActivityTestCase {
|
||||
}
|
||||
|
||||
private NetworkMonitorAutoDetect.ConnectionType getCurrentConnectionType() {
|
||||
final NetworkMonitorAutoDetect.NetworkState networkState =
|
||||
receiver.getCurrentNetworkState();
|
||||
final NetworkMonitorAutoDetect.NetworkState networkState = receiver.getCurrentNetworkState();
|
||||
return receiver.getConnectionType(networkState);
|
||||
}
|
||||
|
||||
|
||||
@ -45,8 +45,7 @@ public class PeerConnectionTest extends ActivityTestCase {
|
||||
|
||||
private static class ObserverExpectations implements PeerConnection.Observer,
|
||||
VideoRenderer.Callbacks,
|
||||
DataChannel.Observer,
|
||||
StatsObserver {
|
||||
DataChannel.Observer, StatsObserver {
|
||||
private final String name;
|
||||
private int expectedIceCandidates = 0;
|
||||
private int expectedErrors = 0;
|
||||
@ -54,32 +53,24 @@ public class PeerConnectionTest extends ActivityTestCase {
|
||||
private int expectedWidth = 0;
|
||||
private int expectedHeight = 0;
|
||||
private int expectedFramesDelivered = 0;
|
||||
private LinkedList<SignalingState> expectedSignalingChanges =
|
||||
new LinkedList<SignalingState>();
|
||||
private LinkedList<SignalingState> expectedSignalingChanges = new LinkedList<SignalingState>();
|
||||
private LinkedList<IceConnectionState> expectedIceConnectionChanges =
|
||||
new LinkedList<IceConnectionState>();
|
||||
private LinkedList<IceGatheringState> expectedIceGatheringChanges =
|
||||
new LinkedList<IceGatheringState>();
|
||||
private LinkedList<String> expectedAddStreamLabels =
|
||||
new LinkedList<String>();
|
||||
private LinkedList<String> expectedRemoveStreamLabels =
|
||||
new LinkedList<String>();
|
||||
private final LinkedList<IceCandidate> gotIceCandidates =
|
||||
new LinkedList<IceCandidate>();
|
||||
private LinkedList<String> expectedAddStreamLabels = new LinkedList<String>();
|
||||
private LinkedList<String> expectedRemoveStreamLabels = new LinkedList<String>();
|
||||
private final LinkedList<IceCandidate> gotIceCandidates = new LinkedList<IceCandidate>();
|
||||
private Map<MediaStream, WeakReference<VideoRenderer>> renderers =
|
||||
new IdentityHashMap<MediaStream, WeakReference<VideoRenderer>>();
|
||||
private DataChannel dataChannel;
|
||||
private LinkedList<DataChannel.Buffer> expectedBuffers =
|
||||
new LinkedList<DataChannel.Buffer>();
|
||||
private LinkedList<DataChannel.Buffer> expectedBuffers = new LinkedList<DataChannel.Buffer>();
|
||||
private LinkedList<DataChannel.State> expectedStateChanges =
|
||||
new LinkedList<DataChannel.State>();
|
||||
private LinkedList<String> expectedRemoteDataChannelLabels =
|
||||
new LinkedList<String>();
|
||||
private LinkedList<String> expectedRemoteDataChannelLabels = new LinkedList<String>();
|
||||
private int expectedStatsCallbacks = 0;
|
||||
private LinkedList<StatsReport[]> gotStatsReports =
|
||||
new LinkedList<StatsReport[]>();
|
||||
private final HashSet<MediaStream> gotRemoteStreams =
|
||||
new HashSet<MediaStream>();
|
||||
private LinkedList<StatsReport[]> gotStatsReports = new LinkedList<StatsReport[]>();
|
||||
private final HashSet<MediaStream> gotRemoteStreams = new HashSet<MediaStream>();
|
||||
|
||||
public ObserverExpectations(String name) {
|
||||
this.name = name;
|
||||
@ -140,14 +131,12 @@ public class PeerConnectionTest extends ActivityTestCase {
|
||||
assertEquals(expectedSignalingChanges.removeFirst(), newState);
|
||||
}
|
||||
|
||||
public synchronized void expectIceConnectionChange(
|
||||
IceConnectionState newState) {
|
||||
public synchronized void expectIceConnectionChange(IceConnectionState newState) {
|
||||
expectedIceConnectionChanges.add(newState);
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized void onIceConnectionChange(
|
||||
IceConnectionState newState) {
|
||||
public synchronized void onIceConnectionChange(IceConnectionState newState) {
|
||||
// TODO(bemasc): remove once delivery of ICECompleted is reliable
|
||||
// (https://code.google.com/p/webrtc/issues/detail?id=3021).
|
||||
if (newState.equals(IceConnectionState.COMPLETED)) {
|
||||
@ -167,8 +156,7 @@ public class PeerConnectionTest extends ActivityTestCase {
|
||||
System.out.println(name + "Got an ice connection receiving change " + receiving);
|
||||
}
|
||||
|
||||
public synchronized void expectIceGatheringChange(
|
||||
IceGatheringState newState) {
|
||||
public synchronized void expectIceGatheringChange(IceGatheringState newState) {
|
||||
expectedIceGatheringChanges.add(newState);
|
||||
}
|
||||
|
||||
@ -198,8 +186,7 @@ public class PeerConnectionTest extends ActivityTestCase {
|
||||
assertEquals("audio", stream.audioTracks.get(0).kind());
|
||||
VideoRenderer renderer = createVideoRenderer(this);
|
||||
stream.videoTracks.get(0).addRenderer(renderer);
|
||||
assertNull(renderers.put(
|
||||
stream, new WeakReference<VideoRenderer>(renderer)));
|
||||
assertNull(renderers.put(stream, new WeakReference<VideoRenderer>(renderer)));
|
||||
gotRemoteStreams.add(stream);
|
||||
}
|
||||
|
||||
@ -224,8 +211,7 @@ public class PeerConnectionTest extends ActivityTestCase {
|
||||
|
||||
@Override
|
||||
public synchronized void onDataChannel(DataChannel remoteDataChannel) {
|
||||
assertEquals(expectedRemoteDataChannelLabels.removeFirst(),
|
||||
remoteDataChannel.label());
|
||||
assertEquals(expectedRemoteDataChannelLabels.removeFirst(), remoteDataChannel.label());
|
||||
setDataChannel(remoteDataChannel);
|
||||
assertEquals(DataChannel.State.CONNECTING, dataChannel.state());
|
||||
}
|
||||
@ -239,10 +225,8 @@ public class PeerConnectionTest extends ActivityTestCase {
|
||||
assertTrue(--expectedRenegotiations >= 0);
|
||||
}
|
||||
|
||||
public synchronized void expectMessage(ByteBuffer expectedBuffer,
|
||||
boolean expectedBinary) {
|
||||
expectedBuffers.add(
|
||||
new DataChannel.Buffer(expectedBuffer, expectedBinary));
|
||||
public synchronized void expectMessage(ByteBuffer expectedBuffer, boolean expectedBinary) {
|
||||
expectedBuffers.add(new DataChannel.Buffer(expectedBuffer, expectedBinary));
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -299,12 +283,12 @@ public class PeerConnectionTest extends ActivityTestCase {
|
||||
"expectedSignalingChanges: " + expectedSignalingChanges.size());
|
||||
}
|
||||
if (expectedIceConnectionChanges.size() != 0) {
|
||||
stillWaitingForExpectations.add("expectedIceConnectionChanges: " +
|
||||
expectedIceConnectionChanges.size());
|
||||
stillWaitingForExpectations.add(
|
||||
"expectedIceConnectionChanges: " + expectedIceConnectionChanges.size());
|
||||
}
|
||||
if (expectedIceGatheringChanges.size() != 0) {
|
||||
stillWaitingForExpectations.add("expectedIceGatheringChanges: " +
|
||||
expectedIceGatheringChanges.size());
|
||||
stillWaitingForExpectations.add(
|
||||
"expectedIceGatheringChanges: " + expectedIceGatheringChanges.size());
|
||||
}
|
||||
if (expectedAddStreamLabels.size() != 0) {
|
||||
stillWaitingForExpectations.add(
|
||||
@ -315,24 +299,20 @@ public class PeerConnectionTest extends ActivityTestCase {
|
||||
"expectedRemoveStreamLabels: " + expectedRemoveStreamLabels.size());
|
||||
}
|
||||
if (expectedFramesDelivered > 0) {
|
||||
stillWaitingForExpectations.add(
|
||||
"expectedFramesDelivered: " + expectedFramesDelivered);
|
||||
stillWaitingForExpectations.add("expectedFramesDelivered: " + expectedFramesDelivered);
|
||||
}
|
||||
if (!expectedBuffers.isEmpty()) {
|
||||
stillWaitingForExpectations.add(
|
||||
"expectedBuffers: " + expectedBuffers.size());
|
||||
stillWaitingForExpectations.add("expectedBuffers: " + expectedBuffers.size());
|
||||
}
|
||||
if (!expectedStateChanges.isEmpty()) {
|
||||
stillWaitingForExpectations.add(
|
||||
"expectedStateChanges: " + expectedStateChanges.size());
|
||||
stillWaitingForExpectations.add("expectedStateChanges: " + expectedStateChanges.size());
|
||||
}
|
||||
if (!expectedRemoteDataChannelLabels.isEmpty()) {
|
||||
stillWaitingForExpectations.add("expectedRemoteDataChannelLabels: " +
|
||||
expectedRemoteDataChannelLabels.size());
|
||||
stillWaitingForExpectations.add(
|
||||
"expectedRemoteDataChannelLabels: " + expectedRemoteDataChannelLabels.size());
|
||||
}
|
||||
if (expectedStatsCallbacks != 0) {
|
||||
stillWaitingForExpectations.add(
|
||||
"expectedStatsCallbacks: " + expectedStatsCallbacks);
|
||||
stillWaitingForExpectations.add("expectedStatsCallbacks: " + expectedStatsCallbacks);
|
||||
}
|
||||
return stillWaitingForExpectations;
|
||||
}
|
||||
@ -352,11 +332,8 @@ public class PeerConnectionTest extends ActivityTestCase {
|
||||
TreeSet<String> stillWaitingForExpectations = unsatisfiedExpectations();
|
||||
while (!stillWaitingForExpectations.isEmpty()) {
|
||||
if (!stillWaitingForExpectations.equals(prev)) {
|
||||
System.out.println(
|
||||
name + " still waiting at\n " +
|
||||
(new Throwable()).getStackTrace()[1] +
|
||||
"\n for: " +
|
||||
Arrays.toString(stillWaitingForExpectations.toArray()));
|
||||
System.out.println(name + " still waiting at\n " + (new Throwable()).getStackTrace()[1]
|
||||
+ "\n for: " + Arrays.toString(stillWaitingForExpectations.toArray()));
|
||||
}
|
||||
if (endTime < System.currentTimeMillis()) {
|
||||
System.out.println(name + " timed out waiting for: "
|
||||
@ -372,8 +349,8 @@ public class PeerConnectionTest extends ActivityTestCase {
|
||||
stillWaitingForExpectations = unsatisfiedExpectations();
|
||||
}
|
||||
if (prev == null) {
|
||||
System.out.println(name + " didn't need to wait at\n " +
|
||||
(new Throwable()).getStackTrace()[1]);
|
||||
System.out.println(
|
||||
name + " didn't need to wait at\n " + (new Throwable()).getStackTrace()[1]);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
@ -464,21 +441,17 @@ public class PeerConnectionTest extends ActivityTestCase {
|
||||
|
||||
static int videoWindowsMapped = -1;
|
||||
|
||||
private static VideoRenderer createVideoRenderer(
|
||||
VideoRenderer.Callbacks videoCallbacks) {
|
||||
private static VideoRenderer createVideoRenderer(VideoRenderer.Callbacks videoCallbacks) {
|
||||
return new VideoRenderer(videoCallbacks);
|
||||
}
|
||||
|
||||
// Return a weak reference to test that ownership is correctly held by
|
||||
// PeerConnection, not by test code.
|
||||
private static WeakReference<MediaStream> addTracksToPC(
|
||||
PeerConnectionFactory factory, PeerConnection pc,
|
||||
VideoSource videoSource,
|
||||
String streamLabel, String videoTrackId, String audioTrackId,
|
||||
VideoRenderer.Callbacks videoCallbacks) {
|
||||
private static WeakReference<MediaStream> addTracksToPC(PeerConnectionFactory factory,
|
||||
PeerConnection pc, VideoSource videoSource, String streamLabel, String videoTrackId,
|
||||
String audioTrackId, VideoRenderer.Callbacks videoCallbacks) {
|
||||
MediaStream lMS = factory.createLocalMediaStream(streamLabel);
|
||||
VideoTrack videoTrack =
|
||||
factory.createVideoTrack(videoTrackId, videoSource);
|
||||
VideoTrack videoTrack = factory.createVideoTrack(videoTrackId, videoSource);
|
||||
assertNotNull(videoTrack);
|
||||
VideoRenderer videoRenderer = createVideoRenderer(videoCallbacks);
|
||||
assertNotNull(videoRenderer);
|
||||
@ -487,8 +460,8 @@ public class PeerConnectionTest extends ActivityTestCase {
|
||||
// Just for fun, let's remove and re-add the track.
|
||||
lMS.removeTrack(videoTrack);
|
||||
lMS.addTrack(videoTrack);
|
||||
lMS.addTrack(factory.createAudioTrack(
|
||||
audioTrackId, factory.createAudioSource(new MediaConstraints())));
|
||||
lMS.addTrack(
|
||||
factory.createAudioTrack(audioTrackId, factory.createAudioSource(new MediaConstraints())));
|
||||
pc.addStream(lMS);
|
||||
return new WeakReference<MediaStream>(lMS);
|
||||
}
|
||||
@ -544,25 +517,20 @@ public class PeerConnectionTest extends ActivityTestCase {
|
||||
// Logging.Severity.LS_SENSITIVE);
|
||||
|
||||
MediaConstraints pcConstraints = new MediaConstraints();
|
||||
pcConstraints.mandatory.add(
|
||||
new MediaConstraints.KeyValuePair("DtlsSrtpKeyAgreement", "true"));
|
||||
pcConstraints.mandatory.add(new MediaConstraints.KeyValuePair("DtlsSrtpKeyAgreement", "true"));
|
||||
|
||||
LinkedList<PeerConnection.IceServer> iceServers =
|
||||
new LinkedList<PeerConnection.IceServer>();
|
||||
iceServers.add(new PeerConnection.IceServer(
|
||||
"stun:stun.l.google.com:19302"));
|
||||
iceServers.add(new PeerConnection.IceServer(
|
||||
"turn:fake.example.com", "fakeUsername", "fakePassword"));
|
||||
ObserverExpectations offeringExpectations =
|
||||
new ObserverExpectations("PCTest:offerer");
|
||||
PeerConnection offeringPC = factory.createPeerConnection(
|
||||
iceServers, pcConstraints, offeringExpectations);
|
||||
LinkedList<PeerConnection.IceServer> iceServers = new LinkedList<PeerConnection.IceServer>();
|
||||
iceServers.add(new PeerConnection.IceServer("stun:stun.l.google.com:19302"));
|
||||
iceServers.add(
|
||||
new PeerConnection.IceServer("turn:fake.example.com", "fakeUsername", "fakePassword"));
|
||||
ObserverExpectations offeringExpectations = new ObserverExpectations("PCTest:offerer");
|
||||
PeerConnection offeringPC =
|
||||
factory.createPeerConnection(iceServers, pcConstraints, offeringExpectations);
|
||||
assertNotNull(offeringPC);
|
||||
|
||||
ObserverExpectations answeringExpectations =
|
||||
new ObserverExpectations("PCTest:answerer");
|
||||
PeerConnection answeringPC = factory.createPeerConnection(
|
||||
iceServers, pcConstraints, answeringExpectations);
|
||||
ObserverExpectations answeringExpectations = new ObserverExpectations("PCTest:answerer");
|
||||
PeerConnection answeringPC =
|
||||
factory.createPeerConnection(iceServers, pcConstraints, answeringExpectations);
|
||||
assertNotNull(answeringPC);
|
||||
|
||||
// We want to use the same camera for offerer & answerer, so create it here
|
||||
@ -574,14 +542,12 @@ public class PeerConnectionTest extends ActivityTestCase {
|
||||
videoCapturer.startCapture(640, 480, 30);
|
||||
|
||||
offeringExpectations.expectRenegotiationNeeded();
|
||||
WeakReference<MediaStream> oLMS = addTracksToPC(
|
||||
factory, offeringPC, videoSource, "offeredMediaStream",
|
||||
"offeredVideoTrack", "offeredAudioTrack",
|
||||
new ExpectedResolutionSetter(answeringExpectations));
|
||||
WeakReference<MediaStream> oLMS =
|
||||
addTracksToPC(factory, offeringPC, videoSource, "offeredMediaStream", "offeredVideoTrack",
|
||||
"offeredAudioTrack", new ExpectedResolutionSetter(answeringExpectations));
|
||||
|
||||
offeringExpectations.expectRenegotiationNeeded();
|
||||
DataChannel offeringDC = offeringPC.createDataChannel(
|
||||
"offeringDC", new DataChannel.Init());
|
||||
DataChannel offeringDC = offeringPC.createDataChannel("offeringDC", new DataChannel.Init());
|
||||
assertEquals("offeringDC", offeringDC.label());
|
||||
|
||||
offeringExpectations.setDataChannel(offeringDC);
|
||||
@ -593,22 +559,19 @@ public class PeerConnectionTest extends ActivityTestCase {
|
||||
assertFalse(offerSdp.description.isEmpty());
|
||||
|
||||
sdpLatch = new SdpObserverLatch();
|
||||
answeringExpectations.expectSignalingChange(
|
||||
SignalingState.HAVE_REMOTE_OFFER);
|
||||
answeringExpectations.expectSignalingChange(SignalingState.HAVE_REMOTE_OFFER);
|
||||
answeringExpectations.expectAddStream("offeredMediaStream");
|
||||
// SCTP DataChannels are announced via OPEN messages over the established
|
||||
// connection (not via SDP), so answeringExpectations can only register
|
||||
// expecting the channel during ICE, below.
|
||||
answeringPC.setRemoteDescription(sdpLatch, offerSdp);
|
||||
assertEquals(
|
||||
PeerConnection.SignalingState.STABLE, offeringPC.signalingState());
|
||||
assertEquals(PeerConnection.SignalingState.STABLE, offeringPC.signalingState());
|
||||
assertTrue(sdpLatch.await());
|
||||
assertNull(sdpLatch.getSdp());
|
||||
|
||||
answeringExpectations.expectRenegotiationNeeded();
|
||||
WeakReference<MediaStream> aLMS = addTracksToPC(
|
||||
factory, answeringPC, videoSource, "answeredMediaStream",
|
||||
"answeredVideoTrack", "answeredAudioTrack",
|
||||
WeakReference<MediaStream> aLMS = addTracksToPC(factory, answeringPC, videoSource,
|
||||
"answeredMediaStream", "answeredVideoTrack", "answeredAudioTrack",
|
||||
new ExpectedResolutionSetter(offeringExpectations));
|
||||
|
||||
sdpLatch = new SdpObserverLatch();
|
||||
@ -639,19 +602,15 @@ public class PeerConnectionTest extends ActivityTestCase {
|
||||
offeringExpectations.expectSignalingChange(SignalingState.STABLE);
|
||||
offeringExpectations.expectAddStream("answeredMediaStream");
|
||||
|
||||
offeringExpectations.expectIceConnectionChange(
|
||||
IceConnectionState.CHECKING);
|
||||
offeringExpectations.expectIceConnectionChange(
|
||||
IceConnectionState.CONNECTED);
|
||||
offeringExpectations.expectIceConnectionChange(IceConnectionState.CHECKING);
|
||||
offeringExpectations.expectIceConnectionChange(IceConnectionState.CONNECTED);
|
||||
// TODO(bemasc): uncomment once delivery of ICECompleted is reliable
|
||||
// (https://code.google.com/p/webrtc/issues/detail?id=3021).
|
||||
//
|
||||
// offeringExpectations.expectIceConnectionChange(
|
||||
// IceConnectionState.COMPLETED);
|
||||
answeringExpectations.expectIceConnectionChange(
|
||||
IceConnectionState.CHECKING);
|
||||
answeringExpectations.expectIceConnectionChange(
|
||||
IceConnectionState.CONNECTED);
|
||||
answeringExpectations.expectIceConnectionChange(IceConnectionState.CHECKING);
|
||||
answeringExpectations.expectIceConnectionChange(IceConnectionState.CONNECTED);
|
||||
|
||||
offeringPC.setRemoteDescription(sdpLatch, answerSdp);
|
||||
assertTrue(sdpLatch.await());
|
||||
@ -667,7 +626,6 @@ public class PeerConnectionTest extends ActivityTestCase {
|
||||
assertEquals(answeringPC.getSenders().size(), 2);
|
||||
assertEquals(answeringPC.getReceivers().size(), 2);
|
||||
|
||||
|
||||
// Wait for at least some frames to be delivered at each end (number
|
||||
// chosen arbitrarily).
|
||||
offeringExpectations.expectFramesDelivered(10);
|
||||
@ -693,10 +651,8 @@ public class PeerConnectionTest extends ActivityTestCase {
|
||||
assertTrue(offeringExpectations.waitForAllExpectationsToBeSatisfied(TIMEOUT_SECONDS));
|
||||
assertTrue(answeringExpectations.waitForAllExpectationsToBeSatisfied(TIMEOUT_SECONDS));
|
||||
|
||||
assertEquals(
|
||||
PeerConnection.SignalingState.STABLE, offeringPC.signalingState());
|
||||
assertEquals(
|
||||
PeerConnection.SignalingState.STABLE, answeringPC.signalingState());
|
||||
assertEquals(PeerConnection.SignalingState.STABLE, offeringPC.signalingState());
|
||||
assertEquals(PeerConnection.SignalingState.STABLE, answeringPC.signalingState());
|
||||
|
||||
// Set a bitrate limit for the outgoing video stream for the offerer.
|
||||
RtpSender videoSender = null;
|
||||
@ -721,8 +677,8 @@ public class PeerConnectionTest extends ActivityTestCase {
|
||||
// Test send & receive UTF-8 text.
|
||||
answeringExpectations.expectMessage(
|
||||
ByteBuffer.wrap("hello!".getBytes(Charset.forName("UTF-8"))), false);
|
||||
DataChannel.Buffer buffer = new DataChannel.Buffer(
|
||||
ByteBuffer.wrap("hello!".getBytes(Charset.forName("UTF-8"))), false);
|
||||
DataChannel.Buffer buffer =
|
||||
new DataChannel.Buffer(ByteBuffer.wrap("hello!".getBytes(Charset.forName("UTF-8"))), false);
|
||||
assertTrue(offeringExpectations.dataChannel.send(buffer));
|
||||
assertTrue(answeringExpectations.waitForAllExpectationsToBeSatisfied(TIMEOUT_SECONDS));
|
||||
|
||||
@ -735,8 +691,7 @@ public class PeerConnectionTest extends ActivityTestCase {
|
||||
expectedBinaryMessage.flip();
|
||||
offeringExpectations.expectMessage(expectedBinaryMessage, true);
|
||||
assertTrue(answeringExpectations.dataChannel.send(
|
||||
new DataChannel.Buffer(
|
||||
ByteBuffer.wrap(new byte[] { 1, 2, 3, 4, 5 }), true)));
|
||||
new DataChannel.Buffer(ByteBuffer.wrap(new byte[] {1, 2, 3, 4, 5}), true)));
|
||||
assertTrue(offeringExpectations.waitForAllExpectationsToBeSatisfied(TIMEOUT_SECONDS));
|
||||
|
||||
offeringExpectations.expectStateChange(DataChannel.State.CLOSING);
|
||||
@ -768,24 +723,19 @@ public class PeerConnectionTest extends ActivityTestCase {
|
||||
PeerConnectionFactory factory = new PeerConnectionFactory(options);
|
||||
|
||||
MediaConstraints pcConstraints = new MediaConstraints();
|
||||
pcConstraints.mandatory.add(
|
||||
new MediaConstraints.KeyValuePair("DtlsSrtpKeyAgreement", "true"));
|
||||
pcConstraints.mandatory.add(new MediaConstraints.KeyValuePair("DtlsSrtpKeyAgreement", "true"));
|
||||
|
||||
LinkedList<PeerConnection.IceServer> iceServers =
|
||||
new LinkedList<PeerConnection.IceServer>();
|
||||
iceServers.add(new PeerConnection.IceServer(
|
||||
"stun:stun.l.google.com:19302"));
|
||||
LinkedList<PeerConnection.IceServer> iceServers = new LinkedList<PeerConnection.IceServer>();
|
||||
iceServers.add(new PeerConnection.IceServer("stun:stun.l.google.com:19302"));
|
||||
|
||||
ObserverExpectations offeringExpectations =
|
||||
new ObserverExpectations("PCTest:offerer");
|
||||
PeerConnection offeringPC = factory.createPeerConnection(
|
||||
iceServers, pcConstraints, offeringExpectations);
|
||||
ObserverExpectations offeringExpectations = new ObserverExpectations("PCTest:offerer");
|
||||
PeerConnection offeringPC =
|
||||
factory.createPeerConnection(iceServers, pcConstraints, offeringExpectations);
|
||||
assertNotNull(offeringPC);
|
||||
|
||||
ObserverExpectations answeringExpectations =
|
||||
new ObserverExpectations("PCTest:answerer");
|
||||
PeerConnection answeringPC = factory.createPeerConnection(
|
||||
iceServers, pcConstraints, answeringExpectations);
|
||||
ObserverExpectations answeringExpectations = new ObserverExpectations("PCTest:answerer");
|
||||
PeerConnection answeringPC =
|
||||
factory.createPeerConnection(iceServers, pcConstraints, answeringExpectations);
|
||||
assertNotNull(answeringPC);
|
||||
|
||||
// We want to use the same camera for offerer & answerer, so create it here
|
||||
@ -798,10 +748,9 @@ public class PeerConnectionTest extends ActivityTestCase {
|
||||
|
||||
// Add offerer media stream.
|
||||
offeringExpectations.expectRenegotiationNeeded();
|
||||
WeakReference<MediaStream> oLMS = addTracksToPC(
|
||||
factory, offeringPC, videoSource, "offeredMediaStream",
|
||||
"offeredVideoTrack", "offeredAudioTrack",
|
||||
new ExpectedResolutionSetter(answeringExpectations));
|
||||
WeakReference<MediaStream> oLMS =
|
||||
addTracksToPC(factory, offeringPC, videoSource, "offeredMediaStream", "offeredVideoTrack",
|
||||
"offeredAudioTrack", new ExpectedResolutionSetter(answeringExpectations));
|
||||
|
||||
// Create offer.
|
||||
SdpObserverLatch sdpLatch = new SdpObserverLatch();
|
||||
@ -822,8 +771,7 @@ public class PeerConnectionTest extends ActivityTestCase {
|
||||
|
||||
// Set remote description for answerer.
|
||||
sdpLatch = new SdpObserverLatch();
|
||||
answeringExpectations.expectSignalingChange(
|
||||
SignalingState.HAVE_REMOTE_OFFER);
|
||||
answeringExpectations.expectSignalingChange(SignalingState.HAVE_REMOTE_OFFER);
|
||||
answeringExpectations.expectAddStream("offeredMediaStream");
|
||||
answeringPC.setRemoteDescription(sdpLatch, offerSdp);
|
||||
assertTrue(sdpLatch.await());
|
||||
@ -831,9 +779,8 @@ public class PeerConnectionTest extends ActivityTestCase {
|
||||
|
||||
// Add answerer media stream.
|
||||
answeringExpectations.expectRenegotiationNeeded();
|
||||
WeakReference<MediaStream> aLMS = addTracksToPC(
|
||||
factory, answeringPC, videoSource, "answeredMediaStream",
|
||||
"answeredVideoTrack", "answeredAudioTrack",
|
||||
WeakReference<MediaStream> aLMS = addTracksToPC(factory, answeringPC, videoSource,
|
||||
"answeredMediaStream", "answeredVideoTrack", "answeredAudioTrack",
|
||||
new ExpectedResolutionSetter(offeringExpectations));
|
||||
|
||||
// Create answer.
|
||||
@ -858,19 +805,15 @@ public class PeerConnectionTest extends ActivityTestCase {
|
||||
offeringExpectations.expectSignalingChange(SignalingState.STABLE);
|
||||
offeringExpectations.expectAddStream("answeredMediaStream");
|
||||
|
||||
offeringExpectations.expectIceConnectionChange(
|
||||
IceConnectionState.CHECKING);
|
||||
offeringExpectations.expectIceConnectionChange(
|
||||
IceConnectionState.CONNECTED);
|
||||
offeringExpectations.expectIceConnectionChange(IceConnectionState.CHECKING);
|
||||
offeringExpectations.expectIceConnectionChange(IceConnectionState.CONNECTED);
|
||||
// TODO(bemasc): uncomment once delivery of ICECompleted is reliable
|
||||
// (https://code.google.com/p/webrtc/issues/detail?id=3021).
|
||||
//
|
||||
// offeringExpectations.expectIceConnectionChange(
|
||||
// IceConnectionState.COMPLETED);
|
||||
answeringExpectations.expectIceConnectionChange(
|
||||
IceConnectionState.CHECKING);
|
||||
answeringExpectations.expectIceConnectionChange(
|
||||
IceConnectionState.CONNECTED);
|
||||
answeringExpectations.expectIceConnectionChange(IceConnectionState.CHECKING);
|
||||
answeringExpectations.expectIceConnectionChange(IceConnectionState.CONNECTED);
|
||||
|
||||
offeringPC.setRemoteDescription(sdpLatch, answerSdp);
|
||||
assertTrue(sdpLatch.await());
|
||||
@ -897,10 +840,8 @@ public class PeerConnectionTest extends ActivityTestCase {
|
||||
assertTrue(offeringExpectations.waitForAllExpectationsToBeSatisfied(TIMEOUT_SECONDS));
|
||||
assertTrue(answeringExpectations.waitForAllExpectationsToBeSatisfied(TIMEOUT_SECONDS));
|
||||
|
||||
assertEquals(
|
||||
PeerConnection.SignalingState.STABLE, offeringPC.signalingState());
|
||||
assertEquals(
|
||||
PeerConnection.SignalingState.STABLE, answeringPC.signalingState());
|
||||
assertEquals(PeerConnection.SignalingState.STABLE, offeringPC.signalingState());
|
||||
assertEquals(PeerConnection.SignalingState.STABLE, answeringPC.signalingState());
|
||||
|
||||
// Now do another negotiation, removing the video track from one peer.
|
||||
// This previously caused a crash on pc.dispose().
|
||||
@ -927,8 +868,7 @@ public class PeerConnectionTest extends ActivityTestCase {
|
||||
|
||||
// Set remote description for answerer.
|
||||
sdpLatch = new SdpObserverLatch();
|
||||
answeringExpectations.expectSignalingChange(
|
||||
SignalingState.HAVE_REMOTE_OFFER);
|
||||
answeringExpectations.expectSignalingChange(SignalingState.HAVE_REMOTE_OFFER);
|
||||
answeringPC.setRemoteDescription(sdpLatch, offerSdp);
|
||||
assertTrue(sdpLatch.await());
|
||||
assertNull(sdpLatch.getSdp());
|
||||
@ -1041,8 +981,7 @@ public class PeerConnectionTest extends ActivityTestCase {
|
||||
assertTrue(info.samples.size() > 0);
|
||||
}
|
||||
|
||||
private static void shutdownPC(
|
||||
PeerConnection pc, ObserverExpectations expectations) {
|
||||
private static void shutdownPC(PeerConnection pc, ObserverExpectations expectations) {
|
||||
if (expectations.dataChannel != null) {
|
||||
expectations.dataChannel.unregisterObserver();
|
||||
expectations.dataChannel.dispose();
|
||||
|
||||
@ -95,11 +95,13 @@ public final class RendererCommonTest extends ActivityTestCase {
|
||||
// Assert:
|
||||
// u' = u.
|
||||
// v' = v.
|
||||
// clang-format off
|
||||
MoreAsserts.assertEquals(new double[] {
|
||||
1, 0, 0, 0,
|
||||
0, 1, 0, 0,
|
||||
0, 0, 1, 0,
|
||||
0, 0, 0, 1}, round(layoutMatrix));
|
||||
// clang-format on
|
||||
}
|
||||
|
||||
@SmallTest
|
||||
@ -108,11 +110,13 @@ public final class RendererCommonTest extends ActivityTestCase {
|
||||
// Assert:
|
||||
// u' = 1 - u.
|
||||
// v' = v.
|
||||
// clang-format off
|
||||
MoreAsserts.assertEquals(new double[] {
|
||||
-1, 0, 0, 0,
|
||||
0, 1, 0, 0,
|
||||
0, 0, 1, 0,
|
||||
1, 0, 0, 1}, round(layoutMatrix));
|
||||
// clang-format on
|
||||
}
|
||||
|
||||
@SmallTest
|
||||
@ -123,22 +127,26 @@ public final class RendererCommonTest extends ActivityTestCase {
|
||||
// Assert:
|
||||
// u' = 0.25 + 0.5 u.
|
||||
// v' = v.
|
||||
// clang-format off
|
||||
MoreAsserts.assertEquals(new double[] {
|
||||
0.5, 0, 0, 0,
|
||||
0, 1, 0, 0,
|
||||
0, 0, 1, 0,
|
||||
0.25, 0, 0, 1}, round(layoutMatrix));
|
||||
// clang-format on
|
||||
}
|
||||
|
||||
@SmallTest
|
||||
public static void testRotateTextureMatrixDefault() {
|
||||
// Test that rotation with 0 degrees returns an identical matrix.
|
||||
// clang-format off
|
||||
final float[] matrix = new float[] {
|
||||
1, 2, 3, 4,
|
||||
5, 6, 7, 8,
|
||||
9, 0, 1, 2,
|
||||
3, 4, 5, 6
|
||||
};
|
||||
// clang-format on
|
||||
final float rotatedMatrix[] = rotateTextureMatrix(matrix, 0);
|
||||
MoreAsserts.assertEquals(round(matrix), round(rotatedMatrix));
|
||||
}
|
||||
@ -149,11 +157,13 @@ public final class RendererCommonTest extends ActivityTestCase {
|
||||
// Assert:
|
||||
// u' = 1 - v.
|
||||
// v' = u.
|
||||
// clang-format off
|
||||
MoreAsserts.assertEquals(new double[] {
|
||||
0, 1, 0, 0,
|
||||
-1, 0, 0, 0,
|
||||
0, 0, 1, 0,
|
||||
1, 0, 0, 1}, round(samplingMatrix));
|
||||
// clang-format on
|
||||
}
|
||||
|
||||
@SmallTest
|
||||
@ -162,10 +172,12 @@ public final class RendererCommonTest extends ActivityTestCase {
|
||||
// Assert:
|
||||
// u' = 1 - u.
|
||||
// v' = 1 - v.
|
||||
// clang-format off
|
||||
MoreAsserts.assertEquals(new double[] {
|
||||
-1, 0, 0, 0,
|
||||
0, -1, 0, 0,
|
||||
0, 0, 1, 0,
|
||||
1, 1, 0, 1}, round(samplingMatrix));
|
||||
// clang-format on
|
||||
}
|
||||
}
|
||||
|
||||
@ -110,8 +110,7 @@ public final class SurfaceTextureHelperTest extends ActivityTestCase {
|
||||
|
||||
// Create resources for stubbing an OES texture producer. |eglOesBase| has the SurfaceTexture in
|
||||
// |surfaceTextureHelper| as the target EGLSurface.
|
||||
final EglBase eglOesBase =
|
||||
EglBase.create(eglBase.getEglBaseContext(), EglBase.CONFIG_PLAIN);
|
||||
final EglBase eglOesBase = EglBase.create(eglBase.getEglBaseContext(), EglBase.CONFIG_PLAIN);
|
||||
eglOesBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
|
||||
assertEquals(eglOesBase.surfaceWidth(), width);
|
||||
assertEquals(eglOesBase.surfaceHeight(), height);
|
||||
@ -131,8 +130,8 @@ public final class SurfaceTextureHelperTest extends ActivityTestCase {
|
||||
// Wait for an OES texture to arrive and draw it onto the pixel buffer.
|
||||
listener.waitForNewFrame();
|
||||
eglBase.makeCurrent();
|
||||
drawer.drawOes(listener.oesTextureId, listener.transformMatrix, width, height,
|
||||
0, 0, width, height);
|
||||
drawer.drawOes(
|
||||
listener.oesTextureId, listener.transformMatrix, width, height, 0, 0, width, height);
|
||||
|
||||
surfaceTextureHelper.returnTextureFrame();
|
||||
|
||||
@ -178,8 +177,7 @@ public final class SurfaceTextureHelperTest extends ActivityTestCase {
|
||||
|
||||
// Create resources for stubbing an OES texture producer. |eglOesBase| has the SurfaceTexture in
|
||||
// |surfaceTextureHelper| as the target EGLSurface.
|
||||
final EglBase eglOesBase =
|
||||
EglBase.create(eglBase.getEglBaseContext(), EglBase.CONFIG_PLAIN);
|
||||
final EglBase eglOesBase = EglBase.create(eglBase.getEglBaseContext(), EglBase.CONFIG_PLAIN);
|
||||
eglOesBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
|
||||
assertEquals(eglOesBase.surfaceWidth(), width);
|
||||
assertEquals(eglOesBase.surfaceHeight(), height);
|
||||
@ -203,8 +201,8 @@ public final class SurfaceTextureHelperTest extends ActivityTestCase {
|
||||
// Draw the pending texture frame onto the pixel buffer.
|
||||
eglBase.makeCurrent();
|
||||
final GlRectDrawer drawer = new GlRectDrawer();
|
||||
drawer.drawOes(listener.oesTextureId, listener.transformMatrix, width, height,
|
||||
0, 0, width, height);
|
||||
drawer.drawOes(
|
||||
listener.oesTextureId, listener.transformMatrix, width, height, 0, 0, width, height);
|
||||
drawer.release();
|
||||
|
||||
// Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9.
|
||||
@ -231,8 +229,8 @@ public final class SurfaceTextureHelperTest extends ActivityTestCase {
|
||||
@MediumTest
|
||||
public static void testDispose() throws InterruptedException {
|
||||
// Create SurfaceTextureHelper and listener.
|
||||
final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(
|
||||
"SurfaceTextureHelper test" /* threadName */, null);
|
||||
final SurfaceTextureHelper surfaceTextureHelper =
|
||||
SurfaceTextureHelper.create("SurfaceTextureHelper test" /* threadName */, null);
|
||||
final MockTextureListener listener = new MockTextureListener();
|
||||
surfaceTextureHelper.startListening(listener);
|
||||
// Create EglBase with the SurfaceTexture as target EGLSurface.
|
||||
@ -267,8 +265,8 @@ public final class SurfaceTextureHelperTest extends ActivityTestCase {
|
||||
*/
|
||||
@SmallTest
|
||||
public static void testDisposeImmediately() {
|
||||
final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(
|
||||
"SurfaceTextureHelper test" /* threadName */, null);
|
||||
final SurfaceTextureHelper surfaceTextureHelper =
|
||||
SurfaceTextureHelper.create("SurfaceTextureHelper test" /* threadName */, null);
|
||||
surfaceTextureHelper.dispose();
|
||||
}
|
||||
|
||||
@ -279,8 +277,8 @@ public final class SurfaceTextureHelperTest extends ActivityTestCase {
|
||||
@MediumTest
|
||||
public static void testStopListening() throws InterruptedException {
|
||||
// Create SurfaceTextureHelper and listener.
|
||||
final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(
|
||||
"SurfaceTextureHelper test" /* threadName */, null);
|
||||
final SurfaceTextureHelper surfaceTextureHelper =
|
||||
SurfaceTextureHelper.create("SurfaceTextureHelper test" /* threadName */, null);
|
||||
final MockTextureListener listener = new MockTextureListener();
|
||||
surfaceTextureHelper.startListening(listener);
|
||||
// Create EglBase with the SurfaceTexture as target EGLSurface.
|
||||
@ -315,8 +313,8 @@ public final class SurfaceTextureHelperTest extends ActivityTestCase {
|
||||
*/
|
||||
@SmallTest
|
||||
public static void testStopListeningImmediately() throws InterruptedException {
|
||||
final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(
|
||||
"SurfaceTextureHelper test" /* threadName */, null);
|
||||
final SurfaceTextureHelper surfaceTextureHelper =
|
||||
SurfaceTextureHelper.create("SurfaceTextureHelper test" /* threadName */, null);
|
||||
final MockTextureListener listener = new MockTextureListener();
|
||||
surfaceTextureHelper.startListening(listener);
|
||||
surfaceTextureHelper.stopListening();
|
||||
@ -329,8 +327,8 @@ public final class SurfaceTextureHelperTest extends ActivityTestCase {
|
||||
*/
|
||||
@SmallTest
|
||||
public static void testStopListeningImmediatelyOnHandlerThread() throws InterruptedException {
|
||||
final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(
|
||||
"SurfaceTextureHelper test" /* threadName */, null);
|
||||
final SurfaceTextureHelper surfaceTextureHelper =
|
||||
SurfaceTextureHelper.create("SurfaceTextureHelper test" /* threadName */, null);
|
||||
final MockTextureListener listener = new MockTextureListener();
|
||||
|
||||
final CountDownLatch stopListeningBarrier = new CountDownLatch(1);
|
||||
@ -353,7 +351,8 @@ public final class SurfaceTextureHelperTest extends ActivityTestCase {
|
||||
// Wait until handler thread is idle to try to catch late startListening() call.
|
||||
final CountDownLatch barrier = new CountDownLatch(1);
|
||||
surfaceTextureHelper.getHandler().post(new Runnable() {
|
||||
@Override public void run() {
|
||||
@Override
|
||||
public void run() {
|
||||
barrier.countDown();
|
||||
}
|
||||
});
|
||||
@ -371,8 +370,8 @@ public final class SurfaceTextureHelperTest extends ActivityTestCase {
|
||||
@MediumTest
|
||||
public static void testRestartListeningWithNewListener() throws InterruptedException {
|
||||
// Create SurfaceTextureHelper and listener.
|
||||
final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(
|
||||
"SurfaceTextureHelper test" /* threadName */, null);
|
||||
final SurfaceTextureHelper surfaceTextureHelper =
|
||||
SurfaceTextureHelper.create("SurfaceTextureHelper test" /* threadName */, null);
|
||||
final MockTextureListener listener1 = new MockTextureListener();
|
||||
surfaceTextureHelper.startListening(listener1);
|
||||
// Create EglBase with the SurfaceTexture as target EGLSurface.
|
||||
@ -467,17 +466,17 @@ public final class SurfaceTextureHelperTest extends ActivityTestCase {
|
||||
// 368 UUUUUUUU VVVVVVVV
|
||||
// 384 buffer end
|
||||
ByteBuffer buffer = ByteBuffer.allocateDirect(width * height * 3 / 2);
|
||||
surfaceTextureHelper.textureToYUV(buffer, width, height, width,
|
||||
listener.oesTextureId, listener.transformMatrix);
|
||||
surfaceTextureHelper.textureToYUV(
|
||||
buffer, width, height, width, listener.oesTextureId, listener.transformMatrix);
|
||||
|
||||
surfaceTextureHelper.returnTextureFrame();
|
||||
|
||||
// Allow off-by-one differences due to different rounding.
|
||||
while (buffer.position() < width*height) {
|
||||
while (buffer.position() < width * height) {
|
||||
assertClose(1, buffer.get() & 0xff, ref_y[i]);
|
||||
}
|
||||
while (buffer.hasRemaining()) {
|
||||
if (buffer.position() % width < width/2)
|
||||
if (buffer.position() % width < width / 2)
|
||||
assertClose(1, buffer.get() & 0xff, ref_u[i]);
|
||||
else
|
||||
assertClose(1, buffer.get() & 0xff, ref_v[i]);
|
||||
|
||||
@ -24,8 +24,7 @@ public final class SurfaceViewRendererOnMeasureTest extends ActivityTestCase {
|
||||
* List with all possible scaling types.
|
||||
*/
|
||||
private static final List<RendererCommon.ScalingType> scalingTypes = Arrays.asList(
|
||||
RendererCommon.ScalingType.SCALE_ASPECT_FIT,
|
||||
RendererCommon.ScalingType.SCALE_ASPECT_FILL,
|
||||
RendererCommon.ScalingType.SCALE_ASPECT_FIT, RendererCommon.ScalingType.SCALE_ASPECT_FILL,
|
||||
RendererCommon.ScalingType.SCALE_ASPECT_BALANCED);
|
||||
|
||||
/**
|
||||
@ -50,22 +49,19 @@ public final class SurfaceViewRendererOnMeasureTest extends ActivityTestCase {
|
||||
/**
|
||||
* Assert onMeasure() with given parameters will result in expected measured size.
|
||||
*/
|
||||
private static void assertMeasuredSize(
|
||||
SurfaceViewRenderer surfaceViewRenderer, RendererCommon.ScalingType scalingType,
|
||||
String frameDimensions,
|
||||
int expectedWidth, int expectedHeight,
|
||||
int widthSpec, int heightSpec) {
|
||||
private static void assertMeasuredSize(SurfaceViewRenderer surfaceViewRenderer,
|
||||
RendererCommon.ScalingType scalingType, String frameDimensions, int expectedWidth,
|
||||
int expectedHeight, int widthSpec, int heightSpec) {
|
||||
surfaceViewRenderer.setScalingType(scalingType);
|
||||
surfaceViewRenderer.onMeasure(widthSpec, heightSpec);
|
||||
final int measuredWidth = surfaceViewRenderer.getMeasuredWidth();
|
||||
final int measuredHeight = surfaceViewRenderer.getMeasuredHeight();
|
||||
if (measuredWidth != expectedWidth || measuredHeight != expectedHeight) {
|
||||
fail("onMeasure("
|
||||
+ MeasureSpec.toString(widthSpec) + ", " + MeasureSpec.toString(heightSpec) + ")"
|
||||
+ " with scaling type " + scalingType
|
||||
+ " and frame: " + frameDimensions
|
||||
+ " expected measured size " + expectedWidth + "x" + expectedHeight
|
||||
+ ", but was " + measuredWidth + "x" + measuredHeight);
|
||||
fail("onMeasure(" + MeasureSpec.toString(widthSpec) + ", " + MeasureSpec.toString(heightSpec)
|
||||
+ ")"
|
||||
+ " with scaling type " + scalingType + " and frame: " + frameDimensions
|
||||
+ " expected measured size " + expectedWidth + "x" + expectedHeight + ", but was "
|
||||
+ measuredWidth + "x" + measuredHeight);
|
||||
}
|
||||
}
|
||||
|
||||
@ -82,10 +78,9 @@ public final class SurfaceViewRendererOnMeasureTest extends ActivityTestCase {
|
||||
for (RendererCommon.ScalingType scalingType : scalingTypes) {
|
||||
for (int measureSpecMode : measureSpecModes) {
|
||||
final int zeroMeasureSize = MeasureSpec.makeMeasureSpec(0, measureSpecMode);
|
||||
assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions,
|
||||
0, 0, zeroMeasureSize, zeroMeasureSize);
|
||||
assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions,
|
||||
1280, 720,
|
||||
assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, 0, 0, zeroMeasureSize,
|
||||
zeroMeasureSize);
|
||||
assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, 1280, 720,
|
||||
MeasureSpec.makeMeasureSpec(1280, measureSpecMode),
|
||||
MeasureSpec.makeMeasureSpec(720, measureSpecMode));
|
||||
}
|
||||
@ -96,10 +91,9 @@ public final class SurfaceViewRendererOnMeasureTest extends ActivityTestCase {
|
||||
for (RendererCommon.ScalingType scalingType : scalingTypes) {
|
||||
for (int measureSpecMode : measureSpecModes) {
|
||||
final int zeroMeasureSize = MeasureSpec.makeMeasureSpec(0, measureSpecMode);
|
||||
assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions,
|
||||
0, 0, zeroMeasureSize, zeroMeasureSize);
|
||||
assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions,
|
||||
1280, 720,
|
||||
assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, 0, 0, zeroMeasureSize,
|
||||
zeroMeasureSize);
|
||||
assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, 1280, 720,
|
||||
MeasureSpec.makeMeasureSpec(1280, measureSpecMode),
|
||||
MeasureSpec.makeMeasureSpec(720, measureSpecMode));
|
||||
}
|
||||
@ -163,25 +157,23 @@ public final class SurfaceViewRendererOnMeasureTest extends ActivityTestCase {
|
||||
for (RendererCommon.ScalingType scalingType : scalingTypes) {
|
||||
for (int measureSpecMode : measureSpecModes) {
|
||||
final int zeroMeasureSize = MeasureSpec.makeMeasureSpec(0, measureSpecMode);
|
||||
assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions,
|
||||
0, 0, zeroMeasureSize, zeroMeasureSize);
|
||||
assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, 0, 0,
|
||||
zeroMeasureSize, zeroMeasureSize);
|
||||
}
|
||||
}
|
||||
|
||||
// Test perfect fit.
|
||||
for (RendererCommon.ScalingType scalingType : scalingTypes) {
|
||||
for (int measureSpecMode : measureSpecModes) {
|
||||
assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions,
|
||||
rotatedWidth, rotatedHeight,
|
||||
MeasureSpec.makeMeasureSpec(rotatedWidth, measureSpecMode),
|
||||
assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, rotatedWidth,
|
||||
rotatedHeight, MeasureSpec.makeMeasureSpec(rotatedWidth, measureSpecMode),
|
||||
MeasureSpec.makeMeasureSpec(rotatedHeight, measureSpecMode));
|
||||
}
|
||||
}
|
||||
|
||||
// Force spec size with different aspect ratio than frame aspect ratio.
|
||||
for (RendererCommon.ScalingType scalingType : scalingTypes) {
|
||||
assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions,
|
||||
720, 1280,
|
||||
assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, 720, 1280,
|
||||
MeasureSpec.makeMeasureSpec(720, MeasureSpec.EXACTLY),
|
||||
MeasureSpec.makeMeasureSpec(1280, MeasureSpec.EXACTLY));
|
||||
}
|
||||
@ -194,8 +186,8 @@ public final class SurfaceViewRendererOnMeasureTest extends ActivityTestCase {
|
||||
for (RendererCommon.ScalingType scalingType : scalingTypes) {
|
||||
final Point expectedSize =
|
||||
RendererCommon.getDisplaySize(scalingType, videoAspectRatio, 720, 1280);
|
||||
assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions,
|
||||
expectedSize.x, expectedSize.y, widthSpec, heightSpec);
|
||||
assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, expectedSize.x,
|
||||
expectedSize.y, widthSpec, heightSpec);
|
||||
}
|
||||
}
|
||||
{
|
||||
@ -206,8 +198,8 @@ public final class SurfaceViewRendererOnMeasureTest extends ActivityTestCase {
|
||||
for (RendererCommon.ScalingType scalingType : scalingTypes) {
|
||||
final Point expectedSize =
|
||||
RendererCommon.getDisplaySize(scalingType, videoAspectRatio, 720, 1280);
|
||||
assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions,
|
||||
expectedSize.x, expectedSize.y, widthSpec, heightSpec);
|
||||
assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, expectedSize.x,
|
||||
expectedSize.y, widthSpec, heightSpec);
|
||||
}
|
||||
}
|
||||
{
|
||||
@ -215,8 +207,8 @@ public final class SurfaceViewRendererOnMeasureTest extends ActivityTestCase {
|
||||
final int widthSpec = MeasureSpec.makeMeasureSpec(720, MeasureSpec.AT_MOST);
|
||||
final int heightSpec = MeasureSpec.makeMeasureSpec(1280, MeasureSpec.EXACTLY);
|
||||
for (RendererCommon.ScalingType scalingType : scalingTypes) {
|
||||
assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions,
|
||||
720, 1280, widthSpec, heightSpec);
|
||||
assertMeasuredSize(
|
||||
surfaceViewRenderer, scalingType, frameDimensions, 720, 1280, widthSpec, heightSpec);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -57,12 +57,10 @@ public class Logging {
|
||||
TraceLevel(int level) {
|
||||
this.level = level;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Keep in sync with webrtc/base/logging.h:LoggingSeverity.
|
||||
public enum Severity {
|
||||
LS_SENSITIVE, LS_VERBOSE, LS_INFO, LS_WARNING, LS_ERROR, LS_NONE
|
||||
};
|
||||
public enum Severity { LS_SENSITIVE, LS_VERBOSE, LS_INFO, LS_WARNING, LS_ERROR, LS_NONE }
|
||||
|
||||
public static void enableLogThreads() {
|
||||
if (!nativeLibLoaded) {
|
||||
@ -74,8 +72,8 @@ public class Logging {
|
||||
|
||||
public static void enableLogTimeStamps() {
|
||||
if (!nativeLibLoaded) {
|
||||
fallbackLogger.log(Level.WARNING,
|
||||
"Cannot enable log timestamps because native lib not loaded.");
|
||||
fallbackLogger.log(
|
||||
Level.WARNING, "Cannot enable log timestamps because native lib not loaded.");
|
||||
return;
|
||||
}
|
||||
nativeEnableLogTimeStamps();
|
||||
@ -84,8 +82,7 @@ public class Logging {
|
||||
// Enable tracing to |path| of messages of |levels|.
|
||||
// On Android, use "logcat:" for |path| to send output there.
|
||||
// Note: this function controls the output of the WEBRTC_TRACE() macros.
|
||||
public static synchronized void enableTracing(
|
||||
String path, EnumSet<TraceLevel> levels) {
|
||||
public static synchronized void enableTracing(String path, EnumSet<TraceLevel> levels) {
|
||||
if (!nativeLibLoaded) {
|
||||
fallbackLogger.log(Level.WARNING, "Cannot enable tracing because native lib not loaded.");
|
||||
return;
|
||||
@ -178,8 +175,7 @@ public class Logging {
|
||||
return sw.toString();
|
||||
}
|
||||
|
||||
private static native void nativeEnableTracing(
|
||||
String path, int nativeLevels);
|
||||
private static native void nativeEnableTracing(String path, int nativeLevels);
|
||||
private static native void nativeEnableLogToDebugOutput(int nativeSeverity);
|
||||
private static native void nativeEnableLogThreads();
|
||||
private static native void nativeEnableLogTimeStamps();
|
||||
|
||||
@ -42,9 +42,7 @@ public class ThreadUtils {
|
||||
* Utility interface to be used with executeUninterruptibly() to wait for blocking operations
|
||||
* to complete without getting interrupted..
|
||||
*/
|
||||
public interface BlockingOperation {
|
||||
void run() throws InterruptedException;
|
||||
}
|
||||
public interface BlockingOperation { void run() throws InterruptedException; }
|
||||
|
||||
/**
|
||||
* Utility method to make sure a blocking operation is executed to completion without getting
|
||||
@ -168,7 +166,8 @@ public class ThreadUtils {
|
||||
final CaughtException caughtException = new CaughtException();
|
||||
final CountDownLatch barrier = new CountDownLatch(1);
|
||||
handler.post(new Runnable() {
|
||||
@Override public void run() {
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
result.value = callable.call();
|
||||
} catch (Exception e) {
|
||||
@ -182,9 +181,8 @@ public class ThreadUtils {
|
||||
// stack trace for the waiting thread as well.
|
||||
if (caughtException.e != null) {
|
||||
final RuntimeException runtimeException = new RuntimeException(caughtException.e);
|
||||
runtimeException.setStackTrace(concatStackTraces(
|
||||
caughtException.e.getStackTrace(),
|
||||
runtimeException.getStackTrace()));
|
||||
runtimeException.setStackTrace(
|
||||
concatStackTraces(caughtException.e.getStackTrace(), runtimeException.getStackTrace()));
|
||||
throw runtimeException;
|
||||
}
|
||||
return result.value;
|
||||
|
||||
@ -84,10 +84,8 @@ public class AppRTCAudioManager {
|
||||
|
||||
// The proximity sensor should only be activated when there are exactly two
|
||||
// available audio devices.
|
||||
if (audioDevices.size() == 2
|
||||
&& audioDevices.contains(AppRTCAudioManager.AudioDevice.EARPIECE)
|
||||
&& audioDevices.contains(
|
||||
AppRTCAudioManager.AudioDevice.SPEAKER_PHONE)) {
|
||||
if (audioDevices.size() == 2 && audioDevices.contains(AppRTCAudioManager.AudioDevice.EARPIECE)
|
||||
&& audioDevices.contains(AppRTCAudioManager.AudioDevice.SPEAKER_PHONE)) {
|
||||
if (proximitySensor.sensorReportsNearState()) {
|
||||
// Sensor reports that a "handset is being held up to a person's ear",
|
||||
// or "something is covering the light sensor".
|
||||
@ -101,17 +99,14 @@ public class AppRTCAudioManager {
|
||||
}
|
||||
|
||||
/** Construction */
|
||||
static AppRTCAudioManager create(Context context,
|
||||
Runnable deviceStateChangeListener) {
|
||||
static AppRTCAudioManager create(Context context, Runnable deviceStateChangeListener) {
|
||||
return new AppRTCAudioManager(context, deviceStateChangeListener);
|
||||
}
|
||||
|
||||
private AppRTCAudioManager(Context context,
|
||||
Runnable deviceStateChangeListener) {
|
||||
private AppRTCAudioManager(Context context, Runnable deviceStateChangeListener) {
|
||||
apprtcContext = context;
|
||||
onStateChangeListener = deviceStateChangeListener;
|
||||
audioManager = ((AudioManager) context.getSystemService(
|
||||
Context.AUDIO_SERVICE));
|
||||
audioManager = ((AudioManager) context.getSystemService(Context.AUDIO_SERVICE));
|
||||
|
||||
SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(context);
|
||||
useSpeakerphone = sharedPreferences.getString(context.getString(R.string.pref_speakerphone_key),
|
||||
@ -149,8 +144,8 @@ public class AppRTCAudioManager {
|
||||
savedIsMicrophoneMute = audioManager.isMicrophoneMute();
|
||||
|
||||
// Request audio focus before making any device switch.
|
||||
audioManager.requestAudioFocus(null, AudioManager.STREAM_VOICE_CALL,
|
||||
AudioManager.AUDIOFOCUS_GAIN_TRANSIENT);
|
||||
audioManager.requestAudioFocus(
|
||||
null, AudioManager.STREAM_VOICE_CALL, AudioManager.AUDIOFOCUS_GAIN_TRANSIENT);
|
||||
|
||||
// Start by setting MODE_IN_COMMUNICATION as default audio mode. It is
|
||||
// required to be in this mode when playout and/or recording starts for
|
||||
@ -250,13 +245,11 @@ public class AppRTCAudioManager {
|
||||
int state = intent.getIntExtra("state", STATE_UNPLUGGED);
|
||||
int microphone = intent.getIntExtra("microphone", HAS_NO_MIC);
|
||||
String name = intent.getStringExtra("name");
|
||||
Log.d(TAG, "BroadcastReceiver.onReceive" + AppRTCUtils.getThreadInfo()
|
||||
+ ": "
|
||||
+ "a=" + intent.getAction()
|
||||
+ ", s=" + (state == STATE_UNPLUGGED ? "unplugged" : "plugged")
|
||||
+ ", m=" + (microphone == HAS_MIC ? "mic" : "no mic")
|
||||
+ ", n=" + name
|
||||
+ ", sb=" + isInitialStickyBroadcast());
|
||||
Log.d(TAG, "BroadcastReceiver.onReceive" + AppRTCUtils.getThreadInfo() + ": "
|
||||
+ "a=" + intent.getAction() + ", s="
|
||||
+ (state == STATE_UNPLUGGED ? "unplugged" : "plugged") + ", m="
|
||||
+ (microphone == HAS_MIC ? "mic" : "no mic") + ", n=" + name + ", sb="
|
||||
+ isInitialStickyBroadcast());
|
||||
|
||||
boolean hasWiredHeadset = (state == STATE_PLUGGED);
|
||||
switch (state) {
|
||||
@ -304,8 +297,7 @@ public class AppRTCAudioManager {
|
||||
|
||||
/** Gets the current earpiece state. */
|
||||
private boolean hasEarpiece() {
|
||||
return apprtcContext.getPackageManager().hasSystemFeature(
|
||||
PackageManager.FEATURE_TELEPHONY);
|
||||
return apprtcContext.getPackageManager().hasSystemFeature(PackageManager.FEATURE_TELEPHONY);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -347,8 +339,8 @@ public class AppRTCAudioManager {
|
||||
|
||||
/** Called each time a new audio device has been added or removed. */
|
||||
private void onAudioManagerChangedState() {
|
||||
Log.d(TAG, "onAudioManagerChangedState: devices=" + audioDevices
|
||||
+ ", selected=" + selectedAudioDevice);
|
||||
Log.d(TAG, "onAudioManagerChangedState: devices=" + audioDevices + ", selected="
|
||||
+ selectedAudioDevice);
|
||||
|
||||
// Enable the proximity sensor if there are two available audio devices
|
||||
// in the list. Given the current implementation, we know that the choice
|
||||
|
||||
@ -20,7 +20,6 @@ import java.util.List;
|
||||
* AppRTCClient is the interface representing an AppRTC client.
|
||||
*/
|
||||
public interface AppRTCClient {
|
||||
|
||||
/**
|
||||
* Struct holding the connection parameters of an AppRTC room.
|
||||
*/
|
||||
@ -28,8 +27,7 @@ public interface AppRTCClient {
|
||||
public final String roomUrl;
|
||||
public final String roomId;
|
||||
public final boolean loopback;
|
||||
public RoomConnectionParameters(
|
||||
String roomUrl, String roomId, boolean loopback) {
|
||||
public RoomConnectionParameters(String roomUrl, String roomId, boolean loopback) {
|
||||
this.roomUrl = roomUrl;
|
||||
this.roomId = roomId;
|
||||
this.loopback = loopback;
|
||||
@ -80,11 +78,9 @@ public interface AppRTCClient {
|
||||
public final SessionDescription offerSdp;
|
||||
public final List<IceCandidate> iceCandidates;
|
||||
|
||||
public SignalingParameters(
|
||||
List<PeerConnection.IceServer> iceServers,
|
||||
boolean initiator, String clientId,
|
||||
String wssUrl, String wssPostUrl,
|
||||
SessionDescription offerSdp, List<IceCandidate> iceCandidates) {
|
||||
public SignalingParameters(List<PeerConnection.IceServer> iceServers, boolean initiator,
|
||||
String clientId, String wssUrl, String wssPostUrl, SessionDescription offerSdp,
|
||||
List<IceCandidate> iceCandidates) {
|
||||
this.iceServers = iceServers;
|
||||
this.initiator = initiator;
|
||||
this.clientId = clientId;
|
||||
|
||||
@ -45,16 +45,14 @@ public class AppRTCProximitySensor implements SensorEventListener {
|
||||
private boolean lastStateReportIsNear = false;
|
||||
|
||||
/** Construction */
|
||||
static AppRTCProximitySensor create(Context context,
|
||||
Runnable sensorStateListener) {
|
||||
static AppRTCProximitySensor create(Context context, Runnable sensorStateListener) {
|
||||
return new AppRTCProximitySensor(context, sensorStateListener);
|
||||
}
|
||||
|
||||
private AppRTCProximitySensor(Context context, Runnable sensorStateListener) {
|
||||
Log.d(TAG, "AppRTCProximitySensor" + AppRTCUtils.getThreadInfo());
|
||||
onSensorStateListener = sensorStateListener;
|
||||
sensorManager = ((SensorManager) context.getSystemService(
|
||||
Context.SENSOR_SERVICE));
|
||||
sensorManager = ((SensorManager) context.getSystemService(Context.SENSOR_SERVICE));
|
||||
}
|
||||
|
||||
/**
|
||||
@ -68,8 +66,7 @@ public class AppRTCProximitySensor implements SensorEventListener {
|
||||
// Proximity sensor is not supported on this device.
|
||||
return false;
|
||||
}
|
||||
sensorManager.registerListener(
|
||||
this, proximitySensor, SensorManager.SENSOR_DELAY_NORMAL);
|
||||
sensorManager.registerListener(this, proximitySensor, SensorManager.SENSOR_DELAY_NORMAL);
|
||||
return true;
|
||||
}
|
||||
|
||||
@ -120,8 +117,8 @@ public class AppRTCProximitySensor implements SensorEventListener {
|
||||
}
|
||||
|
||||
Log.d(TAG, "onSensorChanged" + AppRTCUtils.getThreadInfo() + ": "
|
||||
+ "accuracy=" + event.accuracy
|
||||
+ ", timestamp=" + event.timestamp + ", distance=" + event.values[0]);
|
||||
+ "accuracy=" + event.accuracy + ", timestamp=" + event.timestamp + ", distance="
|
||||
+ event.values[0]);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -168,5 +165,4 @@ public class AppRTCProximitySensor implements SensorEventListener {
|
||||
}
|
||||
Log.d(TAG, info.toString());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@ -42,68 +42,41 @@ import org.webrtc.SurfaceViewRenderer;
|
||||
* Activity for peer connection call setup, call waiting
|
||||
* and call view.
|
||||
*/
|
||||
public class CallActivity extends Activity
|
||||
implements AppRTCClient.SignalingEvents,
|
||||
public class CallActivity extends Activity implements AppRTCClient.SignalingEvents,
|
||||
PeerConnectionClient.PeerConnectionEvents,
|
||||
CallFragment.OnCallEvents {
|
||||
|
||||
public static final String EXTRA_ROOMID =
|
||||
"org.appspot.apprtc.ROOMID";
|
||||
public static final String EXTRA_LOOPBACK =
|
||||
"org.appspot.apprtc.LOOPBACK";
|
||||
public static final String EXTRA_VIDEO_CALL =
|
||||
"org.appspot.apprtc.VIDEO_CALL";
|
||||
public static final String EXTRA_CAMERA2 =
|
||||
"org.appspot.apprtc.CAMERA2";
|
||||
public static final String EXTRA_VIDEO_WIDTH =
|
||||
"org.appspot.apprtc.VIDEO_WIDTH";
|
||||
public static final String EXTRA_VIDEO_HEIGHT =
|
||||
"org.appspot.apprtc.VIDEO_HEIGHT";
|
||||
public static final String EXTRA_VIDEO_FPS =
|
||||
"org.appspot.apprtc.VIDEO_FPS";
|
||||
public static final String EXTRA_ROOMID = "org.appspot.apprtc.ROOMID";
|
||||
public static final String EXTRA_LOOPBACK = "org.appspot.apprtc.LOOPBACK";
|
||||
public static final String EXTRA_VIDEO_CALL = "org.appspot.apprtc.VIDEO_CALL";
|
||||
public static final String EXTRA_CAMERA2 = "org.appspot.apprtc.CAMERA2";
|
||||
public static final String EXTRA_VIDEO_WIDTH = "org.appspot.apprtc.VIDEO_WIDTH";
|
||||
public static final String EXTRA_VIDEO_HEIGHT = "org.appspot.apprtc.VIDEO_HEIGHT";
|
||||
public static final String EXTRA_VIDEO_FPS = "org.appspot.apprtc.VIDEO_FPS";
|
||||
public static final String EXTRA_VIDEO_CAPTUREQUALITYSLIDER_ENABLED =
|
||||
"org.appsopt.apprtc.VIDEO_CAPTUREQUALITYSLIDER";
|
||||
public static final String EXTRA_VIDEO_BITRATE =
|
||||
"org.appspot.apprtc.VIDEO_BITRATE";
|
||||
public static final String EXTRA_VIDEOCODEC =
|
||||
"org.appspot.apprtc.VIDEOCODEC";
|
||||
public static final String EXTRA_HWCODEC_ENABLED =
|
||||
"org.appspot.apprtc.HWCODEC";
|
||||
public static final String EXTRA_CAPTURETOTEXTURE_ENABLED =
|
||||
"org.appspot.apprtc.CAPTURETOTEXTURE";
|
||||
public static final String EXTRA_AUDIO_BITRATE =
|
||||
"org.appspot.apprtc.AUDIO_BITRATE";
|
||||
public static final String EXTRA_AUDIOCODEC =
|
||||
"org.appspot.apprtc.AUDIOCODEC";
|
||||
public static final String EXTRA_VIDEO_BITRATE = "org.appspot.apprtc.VIDEO_BITRATE";
|
||||
public static final String EXTRA_VIDEOCODEC = "org.appspot.apprtc.VIDEOCODEC";
|
||||
public static final String EXTRA_HWCODEC_ENABLED = "org.appspot.apprtc.HWCODEC";
|
||||
public static final String EXTRA_CAPTURETOTEXTURE_ENABLED = "org.appspot.apprtc.CAPTURETOTEXTURE";
|
||||
public static final String EXTRA_AUDIO_BITRATE = "org.appspot.apprtc.AUDIO_BITRATE";
|
||||
public static final String EXTRA_AUDIOCODEC = "org.appspot.apprtc.AUDIOCODEC";
|
||||
public static final String EXTRA_NOAUDIOPROCESSING_ENABLED =
|
||||
"org.appspot.apprtc.NOAUDIOPROCESSING";
|
||||
public static final String EXTRA_AECDUMP_ENABLED =
|
||||
"org.appspot.apprtc.AECDUMP";
|
||||
public static final String EXTRA_OPENSLES_ENABLED =
|
||||
"org.appspot.apprtc.OPENSLES";
|
||||
public static final String EXTRA_DISABLE_BUILT_IN_AEC =
|
||||
"org.appspot.apprtc.DISABLE_BUILT_IN_AEC";
|
||||
public static final String EXTRA_DISABLE_BUILT_IN_AGC =
|
||||
"org.appspot.apprtc.DISABLE_BUILT_IN_AGC";
|
||||
public static final String EXTRA_DISABLE_BUILT_IN_NS =
|
||||
"org.appspot.apprtc.DISABLE_BUILT_IN_NS";
|
||||
public static final String EXTRA_ENABLE_LEVEL_CONTROL =
|
||||
"org.appspot.apprtc.ENABLE_LEVEL_CONTROL";
|
||||
public static final String EXTRA_DISPLAY_HUD =
|
||||
"org.appspot.apprtc.DISPLAY_HUD";
|
||||
public static final String EXTRA_AECDUMP_ENABLED = "org.appspot.apprtc.AECDUMP";
|
||||
public static final String EXTRA_OPENSLES_ENABLED = "org.appspot.apprtc.OPENSLES";
|
||||
public static final String EXTRA_DISABLE_BUILT_IN_AEC = "org.appspot.apprtc.DISABLE_BUILT_IN_AEC";
|
||||
public static final String EXTRA_DISABLE_BUILT_IN_AGC = "org.appspot.apprtc.DISABLE_BUILT_IN_AGC";
|
||||
public static final String EXTRA_DISABLE_BUILT_IN_NS = "org.appspot.apprtc.DISABLE_BUILT_IN_NS";
|
||||
public static final String EXTRA_ENABLE_LEVEL_CONTROL = "org.appspot.apprtc.ENABLE_LEVEL_CONTROL";
|
||||
public static final String EXTRA_DISPLAY_HUD = "org.appspot.apprtc.DISPLAY_HUD";
|
||||
public static final String EXTRA_TRACING = "org.appspot.apprtc.TRACING";
|
||||
public static final String EXTRA_CMDLINE =
|
||||
"org.appspot.apprtc.CMDLINE";
|
||||
public static final String EXTRA_RUNTIME =
|
||||
"org.appspot.apprtc.RUNTIME";
|
||||
public static final String EXTRA_CMDLINE = "org.appspot.apprtc.CMDLINE";
|
||||
public static final String EXTRA_RUNTIME = "org.appspot.apprtc.RUNTIME";
|
||||
private static final String TAG = "CallRTCClient";
|
||||
|
||||
// List of mandatory application permissions.
|
||||
private static final String[] MANDATORY_PERMISSIONS = {
|
||||
"android.permission.MODIFY_AUDIO_SETTINGS",
|
||||
"android.permission.RECORD_AUDIO",
|
||||
"android.permission.INTERNET"
|
||||
};
|
||||
private static final String[] MANDATORY_PERMISSIONS = {"android.permission.MODIFY_AUDIO_SETTINGS",
|
||||
"android.permission.RECORD_AUDIO", "android.permission.INTERNET"};
|
||||
|
||||
// Peer connection statistics callback period in ms.
|
||||
private static final int STAT_CALLBACK_PERIOD = 1000;
|
||||
@ -152,22 +125,16 @@ public class CallActivity extends Activity
|
||||
@Override
|
||||
public void onCreate(Bundle savedInstanceState) {
|
||||
super.onCreate(savedInstanceState);
|
||||
Thread.setDefaultUncaughtExceptionHandler(
|
||||
new UnhandledExceptionHandler(this));
|
||||
Thread.setDefaultUncaughtExceptionHandler(new UnhandledExceptionHandler(this));
|
||||
|
||||
// Set window styles for fullscreen-window size. Needs to be done before
|
||||
// adding content.
|
||||
requestWindowFeature(Window.FEATURE_NO_TITLE);
|
||||
getWindow().addFlags(
|
||||
LayoutParams.FLAG_FULLSCREEN
|
||||
| LayoutParams.FLAG_KEEP_SCREEN_ON
|
||||
| LayoutParams.FLAG_DISMISS_KEYGUARD
|
||||
| LayoutParams.FLAG_SHOW_WHEN_LOCKED
|
||||
getWindow().addFlags(LayoutParams.FLAG_FULLSCREEN | LayoutParams.FLAG_KEEP_SCREEN_ON
|
||||
| LayoutParams.FLAG_DISMISS_KEYGUARD | LayoutParams.FLAG_SHOW_WHEN_LOCKED
|
||||
| LayoutParams.FLAG_TURN_SCREEN_ON);
|
||||
getWindow().getDecorView().setSystemUiVisibility(
|
||||
View.SYSTEM_UI_FLAG_HIDE_NAVIGATION
|
||||
| View.SYSTEM_UI_FLAG_FULLSCREEN
|
||||
| View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY);
|
||||
getWindow().getDecorView().setSystemUiVisibility(View.SYSTEM_UI_FLAG_HIDE_NAVIGATION
|
||||
| View.SYSTEM_UI_FLAG_FULLSCREEN | View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY);
|
||||
setContentView(R.layout.activity_call);
|
||||
|
||||
iceConnected = false;
|
||||
@ -232,23 +199,17 @@ public class CallActivity extends Activity
|
||||
boolean loopback = intent.getBooleanExtra(EXTRA_LOOPBACK, false);
|
||||
boolean tracing = intent.getBooleanExtra(EXTRA_TRACING, false);
|
||||
|
||||
boolean useCamera2 = Camera2Enumerator.isSupported(this)
|
||||
&& intent.getBooleanExtra(EXTRA_CAMERA2, true);
|
||||
boolean useCamera2 =
|
||||
Camera2Enumerator.isSupported(this) && intent.getBooleanExtra(EXTRA_CAMERA2, true);
|
||||
|
||||
peerConnectionParameters = new PeerConnectionParameters(
|
||||
intent.getBooleanExtra(EXTRA_VIDEO_CALL, true),
|
||||
loopback,
|
||||
tracing,
|
||||
useCamera2,
|
||||
intent.getIntExtra(EXTRA_VIDEO_WIDTH, 0),
|
||||
intent.getIntExtra(EXTRA_VIDEO_HEIGHT, 0),
|
||||
intent.getIntExtra(EXTRA_VIDEO_FPS, 0),
|
||||
intent.getIntExtra(EXTRA_VIDEO_BITRATE, 0),
|
||||
intent.getStringExtra(EXTRA_VIDEOCODEC),
|
||||
peerConnectionParameters =
|
||||
new PeerConnectionParameters(intent.getBooleanExtra(EXTRA_VIDEO_CALL, true), loopback,
|
||||
tracing, useCamera2, intent.getIntExtra(EXTRA_VIDEO_WIDTH, 0),
|
||||
intent.getIntExtra(EXTRA_VIDEO_HEIGHT, 0), intent.getIntExtra(EXTRA_VIDEO_FPS, 0),
|
||||
intent.getIntExtra(EXTRA_VIDEO_BITRATE, 0), intent.getStringExtra(EXTRA_VIDEOCODEC),
|
||||
intent.getBooleanExtra(EXTRA_HWCODEC_ENABLED, true),
|
||||
intent.getBooleanExtra(EXTRA_CAPTURETOTEXTURE_ENABLED, false),
|
||||
intent.getIntExtra(EXTRA_AUDIO_BITRATE, 0),
|
||||
intent.getStringExtra(EXTRA_AUDIOCODEC),
|
||||
intent.getIntExtra(EXTRA_AUDIO_BITRATE, 0), intent.getStringExtra(EXTRA_AUDIOCODEC),
|
||||
intent.getBooleanExtra(EXTRA_NOAUDIOPROCESSING_ENABLED, false),
|
||||
intent.getBooleanExtra(EXTRA_AECDUMP_ENABLED, false),
|
||||
intent.getBooleanExtra(EXTRA_OPENSLES_ENABLED, false),
|
||||
@ -268,8 +229,7 @@ public class CallActivity extends Activity
|
||||
appRtcClient = new DirectRTCClient(this);
|
||||
}
|
||||
// Create connection parameters.
|
||||
roomConnectionParameters = new RoomConnectionParameters(
|
||||
roomUri.toString(), roomId, loopback);
|
||||
roomConnectionParameters = new RoomConnectionParameters(roomUri.toString(), roomId, loopback);
|
||||
|
||||
// Create CPU monitor
|
||||
cpuMonitor = new CpuMonitor(this);
|
||||
@ -419,8 +379,7 @@ public class CallActivity extends Activity
|
||||
callStartedTimeMs = System.currentTimeMillis();
|
||||
|
||||
// Start room connection.
|
||||
logAndToast(getString(R.string.connecting_to,
|
||||
roomConnectionParameters.roomUrl));
|
||||
logAndToast(getString(R.string.connecting_to, roomConnectionParameters.roomUrl));
|
||||
appRtcClient.connectToRoom(roomConnectionParameters);
|
||||
|
||||
// Create and audio manager that will take care of audio routing,
|
||||
@ -432,8 +391,7 @@ public class CallActivity extends Activity
|
||||
public void run() {
|
||||
onAudioManagerChangedState();
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
// Store existing audio settings and change audio mode to
|
||||
// MODE_IN_COMMUNICATION for best possible VoIP performance.
|
||||
Log.d(TAG, "Initializing the audio manager...");
|
||||
@ -499,13 +457,16 @@ public class CallActivity extends Activity
|
||||
.setTitle(getText(R.string.channel_error_title))
|
||||
.setMessage(errorMessage)
|
||||
.setCancelable(false)
|
||||
.setNeutralButton(R.string.ok, new DialogInterface.OnClickListener() {
|
||||
.setNeutralButton(R.string.ok,
|
||||
new DialogInterface.OnClickListener() {
|
||||
@Override
|
||||
public void onClick(DialogInterface dialog, int id) {
|
||||
dialog.cancel();
|
||||
disconnect();
|
||||
}
|
||||
}).create().show();
|
||||
})
|
||||
.create()
|
||||
.show();
|
||||
}
|
||||
}
|
||||
|
||||
@ -539,8 +500,8 @@ public class CallActivity extends Activity
|
||||
|
||||
signalingParameters = params;
|
||||
logAndToast("Creating peer connection, delay=" + delta + "ms");
|
||||
peerConnectionClient.createPeerConnection(rootEglBase.getEglBaseContext(),
|
||||
localRender, remoteRender, signalingParameters);
|
||||
peerConnectionClient.createPeerConnection(
|
||||
rootEglBase.getEglBaseContext(), localRender, remoteRender, signalingParameters);
|
||||
|
||||
if (signalingParameters.initiator) {
|
||||
logAndToast("Creating OFFER...");
|
||||
@ -716,8 +677,7 @@ public class CallActivity extends Activity
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onPeerConnectionClosed() {
|
||||
}
|
||||
public void onPeerConnectionClosed() {}
|
||||
|
||||
@Override
|
||||
public void onPeerConnectionStatsReady(final StatsReport[] reports) {
|
||||
|
||||
@ -50,26 +50,18 @@ public class CallFragment extends Fragment {
|
||||
}
|
||||
|
||||
@Override
|
||||
public View onCreateView(LayoutInflater inflater, ViewGroup container,
|
||||
Bundle savedInstanceState) {
|
||||
controlView =
|
||||
inflater.inflate(R.layout.fragment_call, container, false);
|
||||
public View onCreateView(
|
||||
LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
|
||||
controlView = inflater.inflate(R.layout.fragment_call, container, false);
|
||||
|
||||
// Create UI controls.
|
||||
contactView =
|
||||
(TextView) controlView.findViewById(R.id.contact_name_call);
|
||||
disconnectButton =
|
||||
(ImageButton) controlView.findViewById(R.id.button_call_disconnect);
|
||||
cameraSwitchButton =
|
||||
(ImageButton) controlView.findViewById(R.id.button_call_switch_camera);
|
||||
videoScalingButton =
|
||||
(ImageButton) controlView.findViewById(R.id.button_call_scaling_mode);
|
||||
toggleMuteButton =
|
||||
(ImageButton) controlView.findViewById(R.id.button_call_toggle_mic);
|
||||
captureFormatText =
|
||||
(TextView) controlView.findViewById(R.id.capture_format_text_call);
|
||||
captureFormatSlider =
|
||||
(SeekBar) controlView.findViewById(R.id.capture_format_slider_call);
|
||||
contactView = (TextView) controlView.findViewById(R.id.contact_name_call);
|
||||
disconnectButton = (ImageButton) controlView.findViewById(R.id.button_call_disconnect);
|
||||
cameraSwitchButton = (ImageButton) controlView.findViewById(R.id.button_call_switch_camera);
|
||||
videoScalingButton = (ImageButton) controlView.findViewById(R.id.button_call_scaling_mode);
|
||||
toggleMuteButton = (ImageButton) controlView.findViewById(R.id.button_call_toggle_mic);
|
||||
captureFormatText = (TextView) controlView.findViewById(R.id.capture_format_text_call);
|
||||
captureFormatSlider = (SeekBar) controlView.findViewById(R.id.capture_format_slider_call);
|
||||
|
||||
// Add buttons click events.
|
||||
disconnectButton.setOnClickListener(new View.OnClickListener() {
|
||||
@ -90,12 +82,10 @@ public class CallFragment extends Fragment {
|
||||
@Override
|
||||
public void onClick(View view) {
|
||||
if (scalingType == ScalingType.SCALE_ASPECT_FILL) {
|
||||
videoScalingButton.setBackgroundResource(
|
||||
R.drawable.ic_action_full_screen);
|
||||
videoScalingButton.setBackgroundResource(R.drawable.ic_action_full_screen);
|
||||
scalingType = ScalingType.SCALE_ASPECT_FIT;
|
||||
} else {
|
||||
videoScalingButton.setBackgroundResource(
|
||||
R.drawable.ic_action_return_from_full_screen);
|
||||
videoScalingButton.setBackgroundResource(R.drawable.ic_action_return_from_full_screen);
|
||||
scalingType = ScalingType.SCALE_ASPECT_FILL;
|
||||
}
|
||||
callEvents.onVideoScalingSwitch(scalingType);
|
||||
@ -144,5 +134,4 @@ public class CallFragment extends Fragment {
|
||||
super.onAttach(activity);
|
||||
callEvents = (OnCallEvents) activity;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@ -24,13 +24,10 @@ import java.util.List;
|
||||
* Control capture format based on a seekbar listener.
|
||||
*/
|
||||
public class CaptureQualityController implements SeekBar.OnSeekBarChangeListener {
|
||||
private final List<CaptureFormat> formats = Arrays.asList(
|
||||
new CaptureFormat(1280, 720, 0, 30000),
|
||||
new CaptureFormat(960, 540, 0, 30000),
|
||||
new CaptureFormat(640, 480, 0, 30000),
|
||||
new CaptureFormat(480, 360, 0, 30000),
|
||||
new CaptureFormat(320, 240, 0, 30000),
|
||||
new CaptureFormat(256, 144, 0, 30000));
|
||||
private final List<CaptureFormat> formats =
|
||||
Arrays.asList(new CaptureFormat(1280, 720, 0, 30000), new CaptureFormat(960, 540, 0, 30000),
|
||||
new CaptureFormat(640, 480, 0, 30000), new CaptureFormat(480, 360, 0, 30000),
|
||||
new CaptureFormat(320, 240, 0, 30000), new CaptureFormat(256, 144, 0, 30000));
|
||||
// Prioritize framerate below this threshold and resolution above the threshold.
|
||||
private static final int FRAMERATE_THRESHOLD = 15;
|
||||
private TextView captureFormatText;
|
||||
@ -76,8 +73,8 @@ public class CaptureQualityController implements SeekBar.OnSeekBarChangeListener
|
||||
// Extract max bandwidth (in millipixels / second).
|
||||
long maxCaptureBandwidth = java.lang.Long.MIN_VALUE;
|
||||
for (CaptureFormat format : formats) {
|
||||
maxCaptureBandwidth = Math.max(maxCaptureBandwidth,
|
||||
(long) format.width * format.height * format.framerate.max);
|
||||
maxCaptureBandwidth =
|
||||
Math.max(maxCaptureBandwidth, (long) format.width * format.height * format.framerate.max);
|
||||
}
|
||||
|
||||
// Fraction between 0 and 1.
|
||||
@ -97,8 +94,7 @@ public class CaptureQualityController implements SeekBar.OnSeekBarChangeListener
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onStartTrackingTouch(SeekBar seekBar) {
|
||||
}
|
||||
public void onStartTrackingTouch(SeekBar seekBar) {}
|
||||
|
||||
@Override
|
||||
public void onStopTrackingTouch(SeekBar seekBar) {
|
||||
@ -107,8 +103,8 @@ public class CaptureQualityController implements SeekBar.OnSeekBarChangeListener
|
||||
|
||||
// Return the highest frame rate possible based on bandwidth and format.
|
||||
private int calculateFramerate(double bandwidth, CaptureFormat format) {
|
||||
return (int) Math.round(Math.min(format.framerate.max,
|
||||
(int) Math.round(bandwidth / (format.width * format.height))) / 1000.0);
|
||||
return (int) Math.round(
|
||||
Math.min(format.framerate.max, (int) Math.round(bandwidth / (format.width * format.height)))
|
||||
/ 1000.0);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -118,11 +118,9 @@ public class ConnectActivity extends Activity {
|
||||
setContentView(R.layout.activity_connect);
|
||||
|
||||
roomEditText = (EditText) findViewById(R.id.room_edittext);
|
||||
roomEditText.setOnEditorActionListener(
|
||||
new TextView.OnEditorActionListener() {
|
||||
roomEditText.setOnEditorActionListener(new TextView.OnEditorActionListener() {
|
||||
@Override
|
||||
public boolean onEditorAction(
|
||||
TextView textView, int i, KeyEvent keyEvent) {
|
||||
public boolean onEditorAction(TextView textView, int i, KeyEvent keyEvent) {
|
||||
if (i == EditorInfo.IME_ACTION_DONE) {
|
||||
addFavoriteButton.performClick();
|
||||
return true;
|
||||
@ -143,12 +141,9 @@ public class ConnectActivity extends Activity {
|
||||
|
||||
// If an implicit VIEW intent is launching the app, go directly to that URL.
|
||||
final Intent intent = getIntent();
|
||||
if ("android.intent.action.VIEW".equals(intent.getAction())
|
||||
&& !commandLineRun) {
|
||||
boolean loopback = intent.getBooleanExtra(
|
||||
CallActivity.EXTRA_LOOPBACK, false);
|
||||
int runTimeMs = intent.getIntExtra(
|
||||
CallActivity.EXTRA_RUNTIME, 0);
|
||||
if ("android.intent.action.VIEW".equals(intent.getAction()) && !commandLineRun) {
|
||||
boolean loopback = intent.getBooleanExtra(CallActivity.EXTRA_LOOPBACK, false);
|
||||
int runTimeMs = intent.getIntExtra(CallActivity.EXTRA_RUNTIME, 0);
|
||||
String room = sharedPref.getString(keyprefRoom, "");
|
||||
connectToRoom(room, true, loopback, runTimeMs);
|
||||
}
|
||||
@ -230,8 +225,7 @@ public class ConnectActivity extends Activity {
|
||||
Log.e(TAG, "Failed to load room list: " + e.toString());
|
||||
}
|
||||
}
|
||||
adapter = new ArrayAdapter<String>(
|
||||
this, android.R.layout.simple_list_item_1, roomList);
|
||||
adapter = new ArrayAdapter<String>(this, android.R.layout.simple_list_item_1, roomList);
|
||||
roomListView.setAdapter(adapter);
|
||||
if (adapter.getCount() > 0) {
|
||||
roomListView.requestFocus();
|
||||
@ -240,8 +234,7 @@ public class ConnectActivity extends Activity {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onActivityResult(
|
||||
int requestCode, int resultCode, Intent data) {
|
||||
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
|
||||
if (requestCode == CONNECTION_REQUEST && commandLineRun) {
|
||||
Log.d(TAG, "Return: " + resultCode);
|
||||
setResult(resultCode);
|
||||
@ -260,71 +253,63 @@ public class ConnectActivity extends Activity {
|
||||
}
|
||||
|
||||
String roomUrl = sharedPref.getString(
|
||||
keyprefRoomServerUrl,
|
||||
getString(R.string.pref_room_server_url_default));
|
||||
keyprefRoomServerUrl, getString(R.string.pref_room_server_url_default));
|
||||
|
||||
// Video call enabled flag.
|
||||
boolean videoCallEnabled = sharedPref.getBoolean(keyprefVideoCallEnabled,
|
||||
Boolean.valueOf(getString(R.string.pref_videocall_default)));
|
||||
boolean videoCallEnabled = sharedPref.getBoolean(
|
||||
keyprefVideoCallEnabled, Boolean.valueOf(getString(R.string.pref_videocall_default)));
|
||||
|
||||
// Use Camera2 option.
|
||||
boolean useCamera2 = sharedPref.getBoolean(keyprefCamera2,
|
||||
Boolean.valueOf(getString(R.string.pref_camera2_default)));
|
||||
boolean useCamera2 = sharedPref.getBoolean(
|
||||
keyprefCamera2, Boolean.valueOf(getString(R.string.pref_camera2_default)));
|
||||
|
||||
// Get default codecs.
|
||||
String videoCodec = sharedPref.getString(keyprefVideoCodec,
|
||||
getString(R.string.pref_videocodec_default));
|
||||
String audioCodec = sharedPref.getString(keyprefAudioCodec,
|
||||
getString(R.string.pref_audiocodec_default));
|
||||
String videoCodec =
|
||||
sharedPref.getString(keyprefVideoCodec, getString(R.string.pref_videocodec_default));
|
||||
String audioCodec =
|
||||
sharedPref.getString(keyprefAudioCodec, getString(R.string.pref_audiocodec_default));
|
||||
|
||||
// Check HW codec flag.
|
||||
boolean hwCodec = sharedPref.getBoolean(keyprefHwCodecAcceleration,
|
||||
Boolean.valueOf(getString(R.string.pref_hwcodec_default)));
|
||||
boolean hwCodec = sharedPref.getBoolean(
|
||||
keyprefHwCodecAcceleration, Boolean.valueOf(getString(R.string.pref_hwcodec_default)));
|
||||
|
||||
// Check Capture to texture.
|
||||
boolean captureToTexture = sharedPref.getBoolean(keyprefCaptureToTexture,
|
||||
Boolean.valueOf(getString(R.string.pref_capturetotexture_default)));
|
||||
|
||||
// Check Disable Audio Processing flag.
|
||||
boolean noAudioProcessing = sharedPref.getBoolean(
|
||||
keyprefNoAudioProcessingPipeline,
|
||||
boolean noAudioProcessing = sharedPref.getBoolean(keyprefNoAudioProcessingPipeline,
|
||||
Boolean.valueOf(getString(R.string.pref_noaudioprocessing_default)));
|
||||
|
||||
// Check Disable Audio Processing flag.
|
||||
boolean aecDump = sharedPref.getBoolean(
|
||||
keyprefAecDump,
|
||||
Boolean.valueOf(getString(R.string.pref_aecdump_default)));
|
||||
keyprefAecDump, Boolean.valueOf(getString(R.string.pref_aecdump_default)));
|
||||
|
||||
// Check OpenSL ES enabled flag.
|
||||
boolean useOpenSLES = sharedPref.getBoolean(
|
||||
keyprefOpenSLES,
|
||||
Boolean.valueOf(getString(R.string.pref_opensles_default)));
|
||||
keyprefOpenSLES, Boolean.valueOf(getString(R.string.pref_opensles_default)));
|
||||
|
||||
// Check Disable built-in AEC flag.
|
||||
boolean disableBuiltInAEC = sharedPref.getBoolean(
|
||||
keyprefDisableBuiltInAec,
|
||||
boolean disableBuiltInAEC = sharedPref.getBoolean(keyprefDisableBuiltInAec,
|
||||
Boolean.valueOf(getString(R.string.pref_disable_built_in_aec_default)));
|
||||
|
||||
// Check Disable built-in AGC flag.
|
||||
boolean disableBuiltInAGC = sharedPref.getBoolean(
|
||||
keyprefDisableBuiltInAgc,
|
||||
boolean disableBuiltInAGC = sharedPref.getBoolean(keyprefDisableBuiltInAgc,
|
||||
Boolean.valueOf(getString(R.string.pref_disable_built_in_agc_default)));
|
||||
|
||||
// Check Disable built-in NS flag.
|
||||
boolean disableBuiltInNS = sharedPref.getBoolean(
|
||||
keyprefDisableBuiltInNs,
|
||||
boolean disableBuiltInNS = sharedPref.getBoolean(keyprefDisableBuiltInNs,
|
||||
Boolean.valueOf(getString(R.string.pref_disable_built_in_ns_default)));
|
||||
|
||||
// Check Enable level control.
|
||||
boolean enableLevelControl = sharedPref.getBoolean(
|
||||
keyprefEnableLevelControl,
|
||||
boolean enableLevelControl = sharedPref.getBoolean(keyprefEnableLevelControl,
|
||||
Boolean.valueOf(getString(R.string.pref_enable_level_control_key)));
|
||||
|
||||
// Get video resolution from settings.
|
||||
int videoWidth = 0;
|
||||
int videoHeight = 0;
|
||||
String resolution = sharedPref.getString(keyprefResolution,
|
||||
getString(R.string.pref_resolution_default));
|
||||
String resolution =
|
||||
sharedPref.getString(keyprefResolution, getString(R.string.pref_resolution_default));
|
||||
String[] dimensions = resolution.split("[ x]+");
|
||||
if (dimensions.length == 2) {
|
||||
try {
|
||||
@ -339,8 +324,7 @@ public class ConnectActivity extends Activity {
|
||||
|
||||
// Get camera fps from settings.
|
||||
int cameraFps = 0;
|
||||
String fps = sharedPref.getString(keyprefFps,
|
||||
getString(R.string.pref_fps_default));
|
||||
String fps = sharedPref.getString(keyprefFps, getString(R.string.pref_fps_default));
|
||||
String[] fpsValues = fps.split("[ x]+");
|
||||
if (fpsValues.length == 2) {
|
||||
try {
|
||||
@ -356,28 +340,25 @@ public class ConnectActivity extends Activity {
|
||||
|
||||
// Get video and audio start bitrate.
|
||||
int videoStartBitrate = 0;
|
||||
String bitrateTypeDefault = getString(
|
||||
R.string.pref_maxvideobitrate_default);
|
||||
String bitrateType = sharedPref.getString(
|
||||
keyprefVideoBitrateType, bitrateTypeDefault);
|
||||
String bitrateTypeDefault = getString(R.string.pref_maxvideobitrate_default);
|
||||
String bitrateType = sharedPref.getString(keyprefVideoBitrateType, bitrateTypeDefault);
|
||||
if (!bitrateType.equals(bitrateTypeDefault)) {
|
||||
String bitrateValue = sharedPref.getString(keyprefVideoBitrateValue,
|
||||
getString(R.string.pref_maxvideobitratevalue_default));
|
||||
String bitrateValue = sharedPref.getString(
|
||||
keyprefVideoBitrateValue, getString(R.string.pref_maxvideobitratevalue_default));
|
||||
videoStartBitrate = Integer.parseInt(bitrateValue);
|
||||
}
|
||||
int audioStartBitrate = 0;
|
||||
bitrateTypeDefault = getString(R.string.pref_startaudiobitrate_default);
|
||||
bitrateType = sharedPref.getString(
|
||||
keyprefAudioBitrateType, bitrateTypeDefault);
|
||||
bitrateType = sharedPref.getString(keyprefAudioBitrateType, bitrateTypeDefault);
|
||||
if (!bitrateType.equals(bitrateTypeDefault)) {
|
||||
String bitrateValue = sharedPref.getString(keyprefAudioBitrateValue,
|
||||
getString(R.string.pref_startaudiobitratevalue_default));
|
||||
String bitrateValue = sharedPref.getString(
|
||||
keyprefAudioBitrateValue, getString(R.string.pref_startaudiobitratevalue_default));
|
||||
audioStartBitrate = Integer.parseInt(bitrateValue);
|
||||
}
|
||||
|
||||
// Check statistics display option.
|
||||
boolean displayHud = sharedPref.getBoolean(keyprefDisplayHud,
|
||||
Boolean.valueOf(getString(R.string.pref_displayhud_default)));
|
||||
boolean displayHud = sharedPref.getBoolean(
|
||||
keyprefDisplayHud, Boolean.valueOf(getString(R.string.pref_displayhud_default)));
|
||||
|
||||
boolean tracing = sharedPref.getBoolean(
|
||||
keyprefTracing, Boolean.valueOf(getString(R.string.pref_tracing_default)));
|
||||
@ -395,14 +376,12 @@ public class ConnectActivity extends Activity {
|
||||
intent.putExtra(CallActivity.EXTRA_VIDEO_WIDTH, videoWidth);
|
||||
intent.putExtra(CallActivity.EXTRA_VIDEO_HEIGHT, videoHeight);
|
||||
intent.putExtra(CallActivity.EXTRA_VIDEO_FPS, cameraFps);
|
||||
intent.putExtra(CallActivity.EXTRA_VIDEO_CAPTUREQUALITYSLIDER_ENABLED,
|
||||
captureQualitySlider);
|
||||
intent.putExtra(CallActivity.EXTRA_VIDEO_CAPTUREQUALITYSLIDER_ENABLED, captureQualitySlider);
|
||||
intent.putExtra(CallActivity.EXTRA_VIDEO_BITRATE, videoStartBitrate);
|
||||
intent.putExtra(CallActivity.EXTRA_VIDEOCODEC, videoCodec);
|
||||
intent.putExtra(CallActivity.EXTRA_HWCODEC_ENABLED, hwCodec);
|
||||
intent.putExtra(CallActivity.EXTRA_CAPTURETOTEXTURE_ENABLED, captureToTexture);
|
||||
intent.putExtra(CallActivity.EXTRA_NOAUDIOPROCESSING_ENABLED,
|
||||
noAudioProcessing);
|
||||
intent.putExtra(CallActivity.EXTRA_NOAUDIOPROCESSING_ENABLED, noAudioProcessing);
|
||||
intent.putExtra(CallActivity.EXTRA_AECDUMP_ENABLED, aecDump);
|
||||
intent.putExtra(CallActivity.EXTRA_OPENSLES_ENABLED, useOpenSLES);
|
||||
intent.putExtra(CallActivity.EXTRA_DISABLE_BUILT_IN_AEC, disableBuiltInAEC);
|
||||
@ -429,16 +408,19 @@ public class ConnectActivity extends Activity {
|
||||
.setTitle(getText(R.string.invalid_url_title))
|
||||
.setMessage(getString(R.string.invalid_url_text, url))
|
||||
.setCancelable(false)
|
||||
.setNeutralButton(R.string.ok, new DialogInterface.OnClickListener() {
|
||||
.setNeutralButton(R.string.ok,
|
||||
new DialogInterface.OnClickListener() {
|
||||
public void onClick(DialogInterface dialog, int id) {
|
||||
dialog.cancel();
|
||||
}
|
||||
}).create().show();
|
||||
})
|
||||
.create()
|
||||
.show();
|
||||
return false;
|
||||
}
|
||||
|
||||
private final AdapterView.OnItemClickListener
|
||||
roomListClickListener = new AdapterView.OnItemClickListener() {
|
||||
private final AdapterView.OnItemClickListener roomListClickListener =
|
||||
new AdapterView.OnItemClickListener() {
|
||||
@Override
|
||||
public void onItemClick(AdapterView<?> adapterView, View view, int i, long l) {
|
||||
String roomId = ((TextView) view).getText().toString();
|
||||
|
||||
@ -275,8 +275,8 @@ class CpuMonitor {
|
||||
int batteryLevel = 0;
|
||||
int batteryScale = intent.getIntExtra(BatteryManager.EXTRA_SCALE, 100);
|
||||
if (batteryScale > 0) {
|
||||
batteryLevel = (int) (
|
||||
100f * intent.getIntExtra(BatteryManager.EXTRA_LEVEL, 0) / batteryScale);
|
||||
batteryLevel =
|
||||
(int) (100f * intent.getIntExtra(BatteryManager.EXTRA_LEVEL, 0) / batteryScale);
|
||||
}
|
||||
return batteryLevel;
|
||||
}
|
||||
@ -402,16 +402,20 @@ class CpuMonitor {
|
||||
private synchronized String getStatString() {
|
||||
StringBuilder stat = new StringBuilder();
|
||||
stat.append("CPU User: ")
|
||||
.append(doubleToPercent(userCpuUsage.getCurrent())).append("/")
|
||||
.append(doubleToPercent(userCpuUsage.getCurrent()))
|
||||
.append("/")
|
||||
.append(doubleToPercent(userCpuUsage.getAverage()))
|
||||
.append(". System: ")
|
||||
.append(doubleToPercent(systemCpuUsage.getCurrent())).append("/")
|
||||
.append(doubleToPercent(systemCpuUsage.getCurrent()))
|
||||
.append("/")
|
||||
.append(doubleToPercent(systemCpuUsage.getAverage()))
|
||||
.append(". Freq: ")
|
||||
.append(doubleToPercent(frequencyScale.getCurrent())).append("/")
|
||||
.append(doubleToPercent(frequencyScale.getCurrent()))
|
||||
.append("/")
|
||||
.append(doubleToPercent(frequencyScale.getAverage()))
|
||||
.append(". Total usage: ")
|
||||
.append(doubleToPercent(totalCpuUsage.getCurrent())).append("/")
|
||||
.append(doubleToPercent(totalCpuUsage.getCurrent()))
|
||||
.append("/")
|
||||
.append(doubleToPercent(totalCpuUsage.getAverage()))
|
||||
.append(". Cores: ")
|
||||
.append(actualCpusPresent);
|
||||
|
||||
@ -35,8 +35,7 @@ public class DirectRTCClient implements AppRTCClient, TCPChannelClient.TCPChanne
|
||||
private static final int DEFAULT_PORT = 8888;
|
||||
|
||||
// Regex pattern used for checking if room id looks like an IP.
|
||||
static final Pattern IP_PATTERN = Pattern.compile(
|
||||
"("
|
||||
static final Pattern IP_PATTERN = Pattern.compile("("
|
||||
// IPv4
|
||||
+ "((\\d+\\.){3}\\d+)|"
|
||||
// IPv6
|
||||
@ -50,17 +49,14 @@ public class DirectRTCClient implements AppRTCClient, TCPChannelClient.TCPChanne
|
||||
+ "localhost"
|
||||
+ ")"
|
||||
// Optional port number
|
||||
+ "(:(\\d+))?"
|
||||
);
|
||||
+ "(:(\\d+))?");
|
||||
|
||||
private final ExecutorService executor;
|
||||
private final SignalingEvents events;
|
||||
private TCPChannelClient tcpClient;
|
||||
private RoomConnectionParameters connectionParameters;
|
||||
|
||||
private enum ConnectionState {
|
||||
NEW, CONNECTED, CLOSED, ERROR
|
||||
};
|
||||
private enum ConnectionState { NEW, CONNECTED, CLOSED, ERROR }
|
||||
|
||||
// All alterations of the room state should be done from inside the looper thread.
|
||||
private ConnectionState roomState;
|
||||
@ -265,13 +261,11 @@ public class DirectRTCClient implements AppRTCClient, TCPChannelClient.TCPChanne
|
||||
events.onRemoteIceCandidatesRemoved(candidates);
|
||||
} else if (type.equals("answer")) {
|
||||
SessionDescription sdp = new SessionDescription(
|
||||
SessionDescription.Type.fromCanonicalForm(type),
|
||||
json.getString("sdp"));
|
||||
SessionDescription.Type.fromCanonicalForm(type), json.getString("sdp"));
|
||||
events.onRemoteDescription(sdp);
|
||||
} else if (type.equals("offer")) {
|
||||
SessionDescription sdp = new SessionDescription(
|
||||
SessionDescription.Type.fromCanonicalForm(type),
|
||||
json.getString("sdp"));
|
||||
SessionDescription.Type.fromCanonicalForm(type), json.getString("sdp"));
|
||||
|
||||
SignalingParameters parameters = new SignalingParameters(
|
||||
// Ice servers are not needed for direct connections.
|
||||
@ -347,8 +341,7 @@ public class DirectRTCClient implements AppRTCClient, TCPChannelClient.TCPChanne
|
||||
|
||||
// Converts a JSON candidate to a Java object.
|
||||
private static IceCandidate toJavaCandidate(JSONObject json) throws JSONException {
|
||||
return new IceCandidate(json.getString("id"),
|
||||
json.getInt("label"),
|
||||
json.getString("candidate"));
|
||||
return new IceCandidate(
|
||||
json.getString("id"), json.getInt("label"), json.getString("candidate"));
|
||||
}
|
||||
}
|
||||
|
||||
@ -41,8 +41,8 @@ public class HudFragment extends Fragment {
|
||||
private CpuMonitor cpuMonitor;
|
||||
|
||||
@Override
|
||||
public View onCreateView(LayoutInflater inflater, ViewGroup container,
|
||||
Bundle savedInstanceState) {
|
||||
public View onCreateView(
|
||||
LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
|
||||
controlView = inflater.inflate(R.layout.fragment_hud, container, false);
|
||||
|
||||
// Create UI controls.
|
||||
@ -57,8 +57,8 @@ public class HudFragment extends Fragment {
|
||||
@Override
|
||||
public void onClick(View view) {
|
||||
if (displayHud) {
|
||||
int visibility = (hudViewBwe.getVisibility() == View.VISIBLE)
|
||||
? View.INVISIBLE : View.VISIBLE;
|
||||
int visibility =
|
||||
(hudViewBwe.getVisibility() == View.VISIBLE) ? View.INVISIBLE : View.VISIBLE;
|
||||
hudViewsSetProperties(visibility);
|
||||
}
|
||||
}
|
||||
@ -126,8 +126,7 @@ public class HudFragment extends Fragment {
|
||||
String actualBitrate = null;
|
||||
|
||||
for (StatsReport report : reports) {
|
||||
if (report.type.equals("ssrc") && report.id.contains("ssrc")
|
||||
&& report.id.contains("send")) {
|
||||
if (report.type.equals("ssrc") && report.id.contains("ssrc") && report.id.contains("send")) {
|
||||
// Send video statistics.
|
||||
Map<String, String> reportMap = getReportMap(report);
|
||||
String trackId = reportMap.get("googTrackId");
|
||||
@ -195,9 +194,11 @@ public class HudFragment extends Fragment {
|
||||
|
||||
if (cpuMonitor != null) {
|
||||
encoderStat.append("CPU%: ")
|
||||
.append(cpuMonitor.getCpuUsageCurrent()).append("/")
|
||||
.append(cpuMonitor.getCpuUsageCurrent())
|
||||
.append("/")
|
||||
.append(cpuMonitor.getCpuUsageAverage())
|
||||
.append(". Freq: ").append(cpuMonitor.getFrequencyScaleAverage());
|
||||
.append(". Freq: ")
|
||||
.append(cpuMonitor.getFrequencyScaleAverage());
|
||||
}
|
||||
encoderStatView.setText(encoderStat.toString());
|
||||
}
|
||||
|
||||
@ -74,11 +74,10 @@ public class PeerConnectionClient {
|
||||
private static final String VIDEO_CODEC_H264 = "H264";
|
||||
private static final String AUDIO_CODEC_OPUS = "opus";
|
||||
private static final String AUDIO_CODEC_ISAC = "ISAC";
|
||||
private static final String VIDEO_CODEC_PARAM_START_BITRATE =
|
||||
"x-google-start-bitrate";
|
||||
private static final String VIDEO_CODEC_PARAM_START_BITRATE = "x-google-start-bitrate";
|
||||
private static final String AUDIO_CODEC_PARAM_BITRATE = "maxaveragebitrate";
|
||||
private static final String AUDIO_ECHO_CANCELLATION_CONSTRAINT = "googEchoCancellation";
|
||||
private static final String AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT= "googAutoGainControl";
|
||||
private static final String AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT = "googAutoGainControl";
|
||||
private static final String AUDIO_HIGH_PASS_FILTER_CONSTRAINT = "googHighpassFilter";
|
||||
private static final String AUDIO_NOISE_SUPPRESSION_CONSTRAINT = "googNoiseSuppression";
|
||||
private static final String AUDIO_LEVEL_CONTROL_CONSTRAINT = "levelControl";
|
||||
@ -162,14 +161,12 @@ public class PeerConnectionClient {
|
||||
public final boolean disableBuiltInNS;
|
||||
public final boolean enableLevelControl;
|
||||
|
||||
public PeerConnectionParameters(
|
||||
boolean videoCallEnabled, boolean loopback, boolean tracing, boolean useCamera2,
|
||||
int videoWidth, int videoHeight, int videoFps,
|
||||
int videoMaxBitrate, String videoCodec, boolean videoCodecHwAcceleration,
|
||||
boolean captureToTexture, int audioStartBitrate, String audioCodec,
|
||||
boolean noAudioProcessing, boolean aecDump, boolean useOpenSLES,
|
||||
boolean disableBuiltInAEC, boolean disableBuiltInAGC, boolean disableBuiltInNS,
|
||||
boolean enableLevelControl) {
|
||||
public PeerConnectionParameters(boolean videoCallEnabled, boolean loopback, boolean tracing,
|
||||
boolean useCamera2, int videoWidth, int videoHeight, int videoFps, int videoMaxBitrate,
|
||||
String videoCodec, boolean videoCodecHwAcceleration, boolean captureToTexture,
|
||||
int audioStartBitrate, String audioCodec, boolean noAudioProcessing, boolean aecDump,
|
||||
boolean useOpenSLES, boolean disableBuiltInAEC, boolean disableBuiltInAGC,
|
||||
boolean disableBuiltInNS, boolean enableLevelControl) {
|
||||
this.videoCallEnabled = videoCallEnabled;
|
||||
this.useCamera2 = useCamera2;
|
||||
this.loopback = loopback;
|
||||
@ -255,10 +252,8 @@ public class PeerConnectionClient {
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
public void createPeerConnectionFactory(
|
||||
final Context context,
|
||||
final PeerConnectionParameters peerConnectionParameters,
|
||||
final PeerConnectionEvents events) {
|
||||
public void createPeerConnectionFactory(final Context context,
|
||||
final PeerConnectionParameters peerConnectionParameters, final PeerConnectionEvents events) {
|
||||
this.peerConnectionParameters = peerConnectionParameters;
|
||||
this.events = events;
|
||||
videoCallEnabled = peerConnectionParameters.videoCallEnabled;
|
||||
@ -289,10 +284,8 @@ public class PeerConnectionClient {
|
||||
});
|
||||
}
|
||||
|
||||
public void createPeerConnection(
|
||||
final EglBase.Context renderEGLContext,
|
||||
final VideoRenderer.Callbacks localRender,
|
||||
final VideoRenderer.Callbacks remoteRender,
|
||||
public void createPeerConnection(final EglBase.Context renderEGLContext,
|
||||
final VideoRenderer.Callbacks localRender, final VideoRenderer.Callbacks remoteRender,
|
||||
final SignalingParameters signalingParameters) {
|
||||
if (peerConnectionParameters == null) {
|
||||
Log.e(TAG, "Creating peer connection without initializing factory.");
|
||||
@ -335,8 +328,8 @@ public class PeerConnectionClient {
|
||||
Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator
|
||||
+ "webrtc-trace.txt");
|
||||
}
|
||||
Log.d(TAG, "Create peer connection factory. Use video: " +
|
||||
peerConnectionParameters.videoCallEnabled);
|
||||
Log.d(TAG,
|
||||
"Create peer connection factory. Use video: " + peerConnectionParameters.videoCallEnabled);
|
||||
isError = false;
|
||||
|
||||
// Initialize field trials.
|
||||
@ -391,8 +384,8 @@ public class PeerConnectionClient {
|
||||
}
|
||||
|
||||
// Create peer connection factory.
|
||||
if (!PeerConnectionFactory.initializeAndroidGlobals(context, true, true,
|
||||
peerConnectionParameters.videoCodecHwAcceleration)) {
|
||||
if (!PeerConnectionFactory.initializeAndroidGlobals(
|
||||
context, true, true, peerConnectionParameters.videoCodecHwAcceleration)) {
|
||||
events.onPeerConnectionError("Failed to initializeAndroidGlobals");
|
||||
}
|
||||
if (options != null) {
|
||||
@ -448,30 +441,30 @@ public class PeerConnectionClient {
|
||||
// added for audio performance measurements
|
||||
if (peerConnectionParameters.noAudioProcessing) {
|
||||
Log.d(TAG, "Disabling audio processing");
|
||||
audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
|
||||
AUDIO_ECHO_CANCELLATION_CONSTRAINT, "false"));
|
||||
audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
|
||||
AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT, "false"));
|
||||
audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
|
||||
AUDIO_HIGH_PASS_FILTER_CONSTRAINT, "false"));
|
||||
audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
|
||||
AUDIO_NOISE_SUPPRESSION_CONSTRAINT , "false"));
|
||||
audioConstraints.mandatory.add(
|
||||
new MediaConstraints.KeyValuePair(AUDIO_ECHO_CANCELLATION_CONSTRAINT, "false"));
|
||||
audioConstraints.mandatory.add(
|
||||
new MediaConstraints.KeyValuePair(AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT, "false"));
|
||||
audioConstraints.mandatory.add(
|
||||
new MediaConstraints.KeyValuePair(AUDIO_HIGH_PASS_FILTER_CONSTRAINT, "false"));
|
||||
audioConstraints.mandatory.add(
|
||||
new MediaConstraints.KeyValuePair(AUDIO_NOISE_SUPPRESSION_CONSTRAINT, "false"));
|
||||
}
|
||||
if (peerConnectionParameters.enableLevelControl) {
|
||||
Log.d(TAG, "Enabling level control.");
|
||||
audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
|
||||
AUDIO_LEVEL_CONTROL_CONSTRAINT, "true"));
|
||||
audioConstraints.mandatory.add(
|
||||
new MediaConstraints.KeyValuePair(AUDIO_LEVEL_CONTROL_CONSTRAINT, "true"));
|
||||
}
|
||||
// Create SDP constraints.
|
||||
sdpMediaConstraints = new MediaConstraints();
|
||||
sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
|
||||
"OfferToReceiveAudio", "true"));
|
||||
sdpMediaConstraints.mandatory.add(
|
||||
new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true"));
|
||||
if (videoCallEnabled || peerConnectionParameters.loopback) {
|
||||
sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
|
||||
"OfferToReceiveVideo", "true"));
|
||||
sdpMediaConstraints.mandatory.add(
|
||||
new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "true"));
|
||||
} else {
|
||||
sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
|
||||
"OfferToReceiveVideo", "false"));
|
||||
sdpMediaConstraints.mandatory.add(
|
||||
new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "false"));
|
||||
}
|
||||
}
|
||||
|
||||
@ -531,15 +524,12 @@ public class PeerConnectionClient {
|
||||
// Use ECDSA encryption.
|
||||
rtcConfig.keyType = PeerConnection.KeyType.ECDSA;
|
||||
|
||||
peerConnection = factory.createPeerConnection(
|
||||
rtcConfig, pcConstraints, pcObserver);
|
||||
peerConnection = factory.createPeerConnection(rtcConfig, pcConstraints, pcObserver);
|
||||
isInitiator = false;
|
||||
|
||||
// Set default WebRTC tracing and INFO libjingle logging.
|
||||
// NOTE: this _must_ happen while |factory| is alive!
|
||||
Logging.enableTracing(
|
||||
"logcat:",
|
||||
EnumSet.of(Logging.TraceLevel.TRACE_DEFAULT));
|
||||
Logging.enableTracing("logcat:", EnumSet.of(Logging.TraceLevel.TRACE_DEFAULT));
|
||||
Logging.enableLogToDebugOutput(Logging.Severity.LS_INFO);
|
||||
|
||||
mediaStream = factory.createLocalMediaStream("ARDAMS");
|
||||
@ -572,15 +562,13 @@ public class PeerConnectionClient {
|
||||
|
||||
if (peerConnectionParameters.aecDump) {
|
||||
try {
|
||||
aecDumpFileDescriptor = ParcelFileDescriptor.open(
|
||||
new File(Environment.getExternalStorageDirectory().getPath()
|
||||
+ File.separator
|
||||
+ "Download/audio.aecdump"),
|
||||
ParcelFileDescriptor.MODE_READ_WRITE |
|
||||
ParcelFileDescriptor.MODE_CREATE |
|
||||
ParcelFileDescriptor.MODE_TRUNCATE);
|
||||
aecDumpFileDescriptor =
|
||||
ParcelFileDescriptor.open(new File(Environment.getExternalStorageDirectory().getPath()
|
||||
+ File.separator + "Download/audio.aecdump"),
|
||||
ParcelFileDescriptor.MODE_READ_WRITE | ParcelFileDescriptor.MODE_CREATE
|
||||
| ParcelFileDescriptor.MODE_TRUNCATE);
|
||||
factory.startAecDump(aecDumpFileDescriptor.getFd(), -1);
|
||||
} catch(IOException e) {
|
||||
} catch (IOException e) {
|
||||
Log.e(TAG, "Can not open aecdump file", e);
|
||||
}
|
||||
}
|
||||
@ -607,7 +595,7 @@ public class PeerConnectionClient {
|
||||
if (videoCapturer != null) {
|
||||
try {
|
||||
videoCapturer.stopCapture();
|
||||
} catch(InterruptedException e) {
|
||||
} catch (InterruptedException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
videoCapturer.dispose();
|
||||
@ -773,12 +761,11 @@ public class PeerConnectionClient {
|
||||
sdpDescription = preferCodec(sdpDescription, preferredVideoCodec, false);
|
||||
}
|
||||
if (peerConnectionParameters.audioStartBitrate > 0) {
|
||||
sdpDescription = setStartBitrate(AUDIO_CODEC_OPUS, false,
|
||||
sdpDescription, peerConnectionParameters.audioStartBitrate);
|
||||
sdpDescription = setStartBitrate(
|
||||
AUDIO_CODEC_OPUS, false, sdpDescription, peerConnectionParameters.audioStartBitrate);
|
||||
}
|
||||
Log.d(TAG, "Set remote SDP.");
|
||||
SessionDescription sdpRemote = new SessionDescription(
|
||||
sdp.type, sdpDescription);
|
||||
SessionDescription sdpRemote = new SessionDescription(sdp.type, sdpDescription);
|
||||
peerConnection.setRemoteDescription(sdpObserver, sdpRemote);
|
||||
}
|
||||
});
|
||||
@ -792,7 +779,8 @@ public class PeerConnectionClient {
|
||||
Log.d(TAG, "Stop video source.");
|
||||
try {
|
||||
videoCapturer.stopCapture();
|
||||
} catch (InterruptedException e) {}
|
||||
} catch (InterruptedException e) {
|
||||
}
|
||||
videoCapturerStopped = true;
|
||||
}
|
||||
}
|
||||
@ -833,9 +821,7 @@ public class PeerConnectionClient {
|
||||
|
||||
for (RtpParameters.Encoding encoding : parameters.encodings) {
|
||||
// Null value means no limit.
|
||||
encoding.maxBitrateBps = maxBitrateKbps == null
|
||||
? null
|
||||
: maxBitrateKbps * BPS_IN_KBPS;
|
||||
encoding.maxBitrateBps = maxBitrateKbps == null ? null : maxBitrateKbps * BPS_IN_KBPS;
|
||||
}
|
||||
if (!localVideoSender.setParameters(parameters)) {
|
||||
Log.e(TAG, "RtpSender.setParameters failed.");
|
||||
@ -887,8 +873,8 @@ public class PeerConnectionClient {
|
||||
}
|
||||
}
|
||||
|
||||
private static String setStartBitrate(String codec, boolean isVideoCodec,
|
||||
String sdpDescription, int bitrateKbps) {
|
||||
private static String setStartBitrate(
|
||||
String codec, boolean isVideoCodec, String sdpDescription, int bitrateKbps) {
|
||||
String[] lines = sdpDescription.split("\r\n");
|
||||
int rtpmapLineIndex = -1;
|
||||
boolean sdpFormatUpdated = false;
|
||||
@ -909,8 +895,7 @@ public class PeerConnectionClient {
|
||||
Log.w(TAG, "No rtpmap for " + codec + " codec");
|
||||
return sdpDescription;
|
||||
}
|
||||
Log.d(TAG, "Found " + codec + " rtpmap " + codecRtpMap
|
||||
+ " at " + lines[rtpmapLineIndex]);
|
||||
Log.d(TAG, "Found " + codec + " rtpmap " + codecRtpMap + " at " + lines[rtpmapLineIndex]);
|
||||
|
||||
// Check if a=fmtp string already exist in remote SDP for this codec and
|
||||
// update it with new bitrate parameter.
|
||||
@ -921,11 +906,9 @@ public class PeerConnectionClient {
|
||||
if (codecMatcher.matches()) {
|
||||
Log.d(TAG, "Found " + codec + " " + lines[i]);
|
||||
if (isVideoCodec) {
|
||||
lines[i] += "; " + VIDEO_CODEC_PARAM_START_BITRATE
|
||||
+ "=" + bitrateKbps;
|
||||
lines[i] += "; " + VIDEO_CODEC_PARAM_START_BITRATE + "=" + bitrateKbps;
|
||||
} else {
|
||||
lines[i] += "; " + AUDIO_CODEC_PARAM_BITRATE
|
||||
+ "=" + (bitrateKbps * 1000);
|
||||
lines[i] += "; " + AUDIO_CODEC_PARAM_BITRATE + "=" + (bitrateKbps * 1000);
|
||||
}
|
||||
Log.d(TAG, "Update remote SDP line: " + lines[i]);
|
||||
sdpFormatUpdated = true;
|
||||
@ -940,22 +923,20 @@ public class PeerConnectionClient {
|
||||
if (!sdpFormatUpdated && i == rtpmapLineIndex) {
|
||||
String bitrateSet;
|
||||
if (isVideoCodec) {
|
||||
bitrateSet = "a=fmtp:" + codecRtpMap + " "
|
||||
+ VIDEO_CODEC_PARAM_START_BITRATE + "=" + bitrateKbps;
|
||||
bitrateSet =
|
||||
"a=fmtp:" + codecRtpMap + " " + VIDEO_CODEC_PARAM_START_BITRATE + "=" + bitrateKbps;
|
||||
} else {
|
||||
bitrateSet = "a=fmtp:" + codecRtpMap + " "
|
||||
+ AUDIO_CODEC_PARAM_BITRATE + "=" + (bitrateKbps * 1000);
|
||||
bitrateSet = "a=fmtp:" + codecRtpMap + " " + AUDIO_CODEC_PARAM_BITRATE + "="
|
||||
+ (bitrateKbps * 1000);
|
||||
}
|
||||
Log.d(TAG, "Add remote SDP line: " + bitrateSet);
|
||||
newSdpDescription.append(bitrateSet).append("\r\n");
|
||||
}
|
||||
|
||||
}
|
||||
return newSdpDescription.toString();
|
||||
}
|
||||
|
||||
private static String preferCodec(
|
||||
String sdpDescription, String codec, boolean isAudio) {
|
||||
private static String preferCodec(String sdpDescription, String codec, boolean isAudio) {
|
||||
String[] lines = sdpDescription.split("\r\n");
|
||||
int mLineIndex = -1;
|
||||
String codecRtpMap = null;
|
||||
@ -966,8 +947,7 @@ public class PeerConnectionClient {
|
||||
if (isAudio) {
|
||||
mediaDescription = "m=audio ";
|
||||
}
|
||||
for (int i = 0; (i < lines.length)
|
||||
&& (mLineIndex == -1 || codecRtpMap == null); i++) {
|
||||
for (int i = 0; (i < lines.length) && (mLineIndex == -1 || codecRtpMap == null); i++) {
|
||||
if (lines[i].startsWith(mediaDescription)) {
|
||||
mLineIndex = i;
|
||||
continue;
|
||||
@ -985,8 +965,7 @@ public class PeerConnectionClient {
|
||||
Log.w(TAG, "No rtpmap for " + codec);
|
||||
return sdpDescription;
|
||||
}
|
||||
Log.d(TAG, "Found " + codec + " rtpmap " + codecRtpMap + ", prefer at "
|
||||
+ lines[mLineIndex]);
|
||||
Log.d(TAG, "Found " + codec + " rtpmap " + codecRtpMap + ", prefer at " + lines[mLineIndex]);
|
||||
String[] origMLineParts = lines[mLineIndex].split(" ");
|
||||
if (origMLineParts.length > 3) {
|
||||
StringBuilder newMLine = new StringBuilder();
|
||||
@ -1025,8 +1004,8 @@ public class PeerConnectionClient {
|
||||
|
||||
private void switchCameraInternal() {
|
||||
if (!videoCallEnabled || numberOfCameras < 2 || isError || videoCapturer == null) {
|
||||
Log.e(TAG, "Failed to switch camera. Video: " + videoCallEnabled + ". Error : "
|
||||
+ isError + ". Number of cameras: " + numberOfCameras);
|
||||
Log.e(TAG, "Failed to switch camera. Video: " + videoCallEnabled + ". Error : " + isError
|
||||
+ ". Number of cameras: " + numberOfCameras);
|
||||
return; // No video is sent or only one camera is available or error happened.
|
||||
}
|
||||
Log.d(TAG, "Switch camera");
|
||||
@ -1053,8 +1032,8 @@ public class PeerConnectionClient {
|
||||
|
||||
private void changeCaptureFormatInternal(int width, int height, int framerate) {
|
||||
if (!videoCallEnabled || isError || videoCapturer == null) {
|
||||
Log.e(TAG, "Failed to change capture format. Video: " + videoCallEnabled + ". Error : "
|
||||
+ isError);
|
||||
Log.e(TAG,
|
||||
"Failed to change capture format. Video: " + videoCallEnabled + ". Error : " + isError);
|
||||
return;
|
||||
}
|
||||
Log.d(TAG, "changeCaptureFormat: " + width + "x" + height + "@" + framerate);
|
||||
@ -1064,7 +1043,7 @@ public class PeerConnectionClient {
|
||||
// Implementation detail: observe ICE & stream changes and react accordingly.
|
||||
private class PCObserver implements PeerConnection.Observer {
|
||||
@Override
|
||||
public void onIceCandidate(final IceCandidate candidate){
|
||||
public void onIceCandidate(final IceCandidate candidate) {
|
||||
executor.execute(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
@ -1084,14 +1063,12 @@ public class PeerConnectionClient {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onSignalingChange(
|
||||
PeerConnection.SignalingState newState) {
|
||||
public void onSignalingChange(PeerConnection.SignalingState newState) {
|
||||
Log.d(TAG, "SignalingState: " + newState);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onIceConnectionChange(
|
||||
final PeerConnection.IceConnectionState newState) {
|
||||
public void onIceConnectionChange(final PeerConnection.IceConnectionState newState) {
|
||||
executor.execute(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
@ -1108,8 +1085,7 @@ public class PeerConnectionClient {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onIceGatheringChange(
|
||||
PeerConnection.IceGatheringState newState) {
|
||||
public void onIceGatheringChange(PeerConnection.IceGatheringState newState) {
|
||||
Log.d(TAG, "IceGatheringState: " + newState);
|
||||
}
|
||||
|
||||
@ -1119,7 +1095,7 @@ public class PeerConnectionClient {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onAddStream(final MediaStream stream){
|
||||
public void onAddStream(final MediaStream stream) {
|
||||
executor.execute(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
@ -1140,7 +1116,7 @@ public class PeerConnectionClient {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onRemoveStream(final MediaStream stream){
|
||||
public void onRemoveStream(final MediaStream stream) {
|
||||
executor.execute(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
@ -1151,8 +1127,7 @@ public class PeerConnectionClient {
|
||||
|
||||
@Override
|
||||
public void onDataChannel(final DataChannel dc) {
|
||||
reportError("AppRTC doesn't use data channels, but got: " + dc.label()
|
||||
+ " anyway!");
|
||||
reportError("AppRTC doesn't use data channels, but got: " + dc.label() + " anyway!");
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -1178,8 +1153,7 @@ public class PeerConnectionClient {
|
||||
if (videoCallEnabled) {
|
||||
sdpDescription = preferCodec(sdpDescription, preferredVideoCodec, false);
|
||||
}
|
||||
final SessionDescription sdp = new SessionDescription(
|
||||
origSdp.type, sdpDescription);
|
||||
final SessionDescription sdp = new SessionDescription(origSdp.type, sdpDescription);
|
||||
localSdp = sdp;
|
||||
executor.execute(new Runnable() {
|
||||
@Override
|
||||
|
||||
@ -54,8 +54,7 @@ public class PercentFrameLayout extends ViewGroup {
|
||||
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
|
||||
final int width = getDefaultSize(Integer.MAX_VALUE, widthMeasureSpec);
|
||||
final int height = getDefaultSize(Integer.MAX_VALUE, heightMeasureSpec);
|
||||
setMeasuredDimension(
|
||||
MeasureSpec.makeMeasureSpec(width, MeasureSpec.EXACTLY),
|
||||
setMeasuredDimension(MeasureSpec.makeMeasureSpec(width, MeasureSpec.EXACTLY),
|
||||
MeasureSpec.makeMeasureSpec(height, MeasureSpec.EXACTLY));
|
||||
|
||||
final int childWidthMeasureSpec =
|
||||
|
||||
@ -58,8 +58,8 @@ public class RoomParametersFetcher {
|
||||
void onSignalingParametersError(final String description);
|
||||
}
|
||||
|
||||
public RoomParametersFetcher(String roomUrl, String roomMessage,
|
||||
final RoomParametersFetcherEvents events) {
|
||||
public RoomParametersFetcher(
|
||||
String roomUrl, String roomMessage, final RoomParametersFetcherEvents events) {
|
||||
this.roomUrl = roomUrl;
|
||||
this.roomMessage = roomMessage;
|
||||
this.events = events;
|
||||
@ -67,9 +67,8 @@ public class RoomParametersFetcher {
|
||||
|
||||
public void makeRequest() {
|
||||
Log.d(TAG, "Connecting to room: " + roomUrl);
|
||||
httpConnection = new AsyncHttpURLConnection(
|
||||
"POST", roomUrl, roomMessage,
|
||||
new AsyncHttpEvents() {
|
||||
httpConnection =
|
||||
new AsyncHttpURLConnection("POST", roomUrl, roomMessage, new AsyncHttpEvents() {
|
||||
@Override
|
||||
public void onHttpError(String errorMessage) {
|
||||
Log.e(TAG, "Room connection error: " + errorMessage);
|
||||
@ -114,13 +113,10 @@ public class RoomParametersFetcher {
|
||||
Log.d(TAG, "GAE->C #" + i + " : " + messageString);
|
||||
if (messageType.equals("offer")) {
|
||||
offerSdp = new SessionDescription(
|
||||
SessionDescription.Type.fromCanonicalForm(messageType),
|
||||
message.getString("sdp"));
|
||||
SessionDescription.Type.fromCanonicalForm(messageType), message.getString("sdp"));
|
||||
} else if (messageType.equals("candidate")) {
|
||||
IceCandidate candidate = new IceCandidate(
|
||||
message.getString("id"),
|
||||
message.getInt("label"),
|
||||
message.getString("candidate"));
|
||||
message.getString("id"), message.getInt("label"), message.getString("candidate"));
|
||||
iceCandidates.add(candidate);
|
||||
} else {
|
||||
Log.e(TAG, "Unknown message: " + messageString);
|
||||
@ -153,13 +149,10 @@ public class RoomParametersFetcher {
|
||||
}
|
||||
|
||||
SignalingParameters params = new SignalingParameters(
|
||||
iceServers, initiator,
|
||||
clientId, wssUrl, wssPostUrl,
|
||||
offerSdp, iceCandidates);
|
||||
iceServers, initiator, clientId, wssUrl, wssPostUrl, offerSdp, iceCandidates);
|
||||
events.onSignalingParametersReady(params);
|
||||
} catch (JSONException e) {
|
||||
events.onSignalingParametersError(
|
||||
"Room JSON parsing error: " + e.toString());
|
||||
events.onSignalingParametersError("Room JSON parsing error: " + e.toString());
|
||||
} catch (IOException e) {
|
||||
events.onSignalingParametersError("Room IO error: " + e.toString());
|
||||
}
|
||||
@ -169,19 +162,17 @@ public class RoomParametersFetcher {
|
||||
// off the main thread!
|
||||
private LinkedList<PeerConnection.IceServer> requestTurnServers(String url)
|
||||
throws IOException, JSONException {
|
||||
LinkedList<PeerConnection.IceServer> turnServers =
|
||||
new LinkedList<PeerConnection.IceServer>();
|
||||
LinkedList<PeerConnection.IceServer> turnServers = new LinkedList<PeerConnection.IceServer>();
|
||||
Log.d(TAG, "Request TURN from: " + url);
|
||||
HttpURLConnection connection =
|
||||
(HttpURLConnection) new URL(url).openConnection();
|
||||
HttpURLConnection connection = (HttpURLConnection) new URL(url).openConnection();
|
||||
connection.setDoOutput(true);
|
||||
connection.setRequestProperty("REFERER", "https://appr.tc");
|
||||
connection.setConnectTimeout(TURN_HTTP_TIMEOUT_MS);
|
||||
connection.setReadTimeout(TURN_HTTP_TIMEOUT_MS);
|
||||
int responseCode = connection.getResponseCode();
|
||||
if (responseCode != 200) {
|
||||
throw new IOException("Non-200 response when requesting TURN server from "
|
||||
+ url + " : " + connection.getHeaderField(null));
|
||||
throw new IOException("Non-200 response when requesting TURN server from " + url + " : "
|
||||
+ connection.getHeaderField(null));
|
||||
}
|
||||
InputStream responseStream = connection.getInputStream();
|
||||
String response = drainStream(responseStream);
|
||||
@ -192,14 +183,11 @@ public class RoomParametersFetcher {
|
||||
for (int i = 0; i < iceServers.length(); ++i) {
|
||||
JSONObject server = iceServers.getJSONObject(i);
|
||||
JSONArray turnUrls = server.getJSONArray("urls");
|
||||
String username =
|
||||
server.has("username") ? server.getString("username") : "";
|
||||
String credential =
|
||||
server.has("credential") ? server.getString("credential") : "";
|
||||
String username = server.has("username") ? server.getString("username") : "";
|
||||
String credential = server.has("credential") ? server.getString("credential") : "";
|
||||
for (int j = 0; j < turnUrls.length(); j++) {
|
||||
String turnUrl = turnUrls.getString(j);
|
||||
turnServers.add(new PeerConnection.IceServer(turnUrl, username,
|
||||
credential));
|
||||
turnServers.add(new PeerConnection.IceServer(turnUrl, username, credential));
|
||||
}
|
||||
}
|
||||
return turnServers;
|
||||
@ -207,17 +195,15 @@ public class RoomParametersFetcher {
|
||||
|
||||
// Return the list of ICE servers described by a WebRTCPeerConnection
|
||||
// configuration string.
|
||||
private LinkedList<PeerConnection.IceServer> iceServersFromPCConfigJSON(
|
||||
String pcConfig) throws JSONException {
|
||||
private LinkedList<PeerConnection.IceServer> iceServersFromPCConfigJSON(String pcConfig)
|
||||
throws JSONException {
|
||||
JSONObject json = new JSONObject(pcConfig);
|
||||
JSONArray servers = json.getJSONArray("iceServers");
|
||||
LinkedList<PeerConnection.IceServer> ret =
|
||||
new LinkedList<PeerConnection.IceServer>();
|
||||
LinkedList<PeerConnection.IceServer> ret = new LinkedList<PeerConnection.IceServer>();
|
||||
for (int i = 0; i < servers.length(); ++i) {
|
||||
JSONObject server = servers.getJSONObject(i);
|
||||
String url = server.getString("urls");
|
||||
String credential =
|
||||
server.has("credential") ? server.getString("credential") : "";
|
||||
String credential = server.has("credential") ? server.getString("credential") : "";
|
||||
ret.add(new PeerConnection.IceServer(url, "", credential));
|
||||
}
|
||||
return ret;
|
||||
@ -228,5 +214,4 @@ public class RoomParametersFetcher {
|
||||
Scanner s = new Scanner(in).useDelimiter("\\A");
|
||||
return s.hasNext() ? s.next() : "";
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@ -23,8 +23,7 @@ import org.webrtc.voiceengine.WebRtcAudioUtils;
|
||||
/**
|
||||
* Settings activity for AppRTC.
|
||||
*/
|
||||
public class SettingsActivity extends Activity
|
||||
implements OnSharedPreferenceChangeListener{
|
||||
public class SettingsActivity extends Activity implements OnSharedPreferenceChangeListener {
|
||||
private SettingsFragment settingsFragment;
|
||||
private String keyprefVideoCall;
|
||||
private String keyprefCamera2;
|
||||
@ -85,7 +84,8 @@ public class SettingsActivity extends Activity
|
||||
|
||||
// Display the fragment as the main content.
|
||||
settingsFragment = new SettingsFragment();
|
||||
getFragmentManager().beginTransaction()
|
||||
getFragmentManager()
|
||||
.beginTransaction()
|
||||
.replace(android.R.id.content, settingsFragment)
|
||||
.commit();
|
||||
}
|
||||
@ -127,8 +127,7 @@ public class SettingsActivity extends Activity
|
||||
updateSummaryB(sharedPreferences, keyPrefTracing);
|
||||
|
||||
if (!Camera2Enumerator.isSupported(this)) {
|
||||
Preference camera2Preference =
|
||||
settingsFragment.findPreference(keyprefCamera2);
|
||||
Preference camera2Preference = settingsFragment.findPreference(keyprefCamera2);
|
||||
|
||||
camera2Preference.setSummary(getString(R.string.pref_camera2_not_supported));
|
||||
camera2Preference.setEnabled(false);
|
||||
@ -173,8 +172,8 @@ public class SettingsActivity extends Activity
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onSharedPreferenceChanged(SharedPreferences sharedPreferences,
|
||||
String key) {
|
||||
public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, String key) {
|
||||
// clang-format off
|
||||
if (key.equals(keyprefResolution)
|
||||
|| key.equals(keyprefFps)
|
||||
|| key.equals(keyprefMaxVideoBitrateType)
|
||||
@ -204,6 +203,7 @@ public class SettingsActivity extends Activity
|
||||
} else if (key.equals(keyprefSpeakerphone)) {
|
||||
updateSummaryList(sharedPreferences, key);
|
||||
}
|
||||
// clang-format on
|
||||
if (key.equals(keyprefMaxVideoBitrateType)) {
|
||||
setVideoBitrateEnable(sharedPreferences);
|
||||
}
|
||||
@ -218,8 +218,7 @@ public class SettingsActivity extends Activity
|
||||
updatedPref.setSummary(sharedPreferences.getString(key, ""));
|
||||
}
|
||||
|
||||
private void updateSummaryBitrate(
|
||||
SharedPreferences sharedPreferences, String key) {
|
||||
private void updateSummaryBitrate(SharedPreferences sharedPreferences, String key) {
|
||||
Preference updatedPref = settingsFragment.findPreference(key);
|
||||
updatedPref.setSummary(sharedPreferences.getString(key, "") + " kbps");
|
||||
}
|
||||
@ -240,8 +239,8 @@ public class SettingsActivity extends Activity
|
||||
Preference bitratePreferenceValue =
|
||||
settingsFragment.findPreference(keyprefMaxVideoBitrateValue);
|
||||
String bitrateTypeDefault = getString(R.string.pref_maxvideobitrate_default);
|
||||
String bitrateType = sharedPreferences.getString(
|
||||
keyprefMaxVideoBitrateType, bitrateTypeDefault);
|
||||
String bitrateType =
|
||||
sharedPreferences.getString(keyprefMaxVideoBitrateType, bitrateTypeDefault);
|
||||
if (bitrateType.equals(bitrateTypeDefault)) {
|
||||
bitratePreferenceValue.setEnabled(false);
|
||||
} else {
|
||||
@ -253,8 +252,8 @@ public class SettingsActivity extends Activity
|
||||
Preference bitratePreferenceValue =
|
||||
settingsFragment.findPreference(keyprefStartAudioBitrateValue);
|
||||
String bitrateTypeDefault = getString(R.string.pref_startaudiobitrate_default);
|
||||
String bitrateType = sharedPreferences.getString(
|
||||
keyprefStartAudioBitrateType, bitrateTypeDefault);
|
||||
String bitrateType =
|
||||
sharedPreferences.getString(keyprefStartAudioBitrateType, bitrateTypeDefault);
|
||||
if (bitrateType.equals(bitrateTypeDefault)) {
|
||||
bitratePreferenceValue.setEnabled(false);
|
||||
} else {
|
||||
|
||||
@ -17,7 +17,6 @@ import android.preference.PreferenceFragment;
|
||||
* Settings fragment for AppRTC.
|
||||
*/
|
||||
public class SettingsFragment extends PreferenceFragment {
|
||||
|
||||
@Override
|
||||
public void onCreate(Bundle savedInstanceState) {
|
||||
super.onCreate(savedInstanceState);
|
||||
|
||||
@ -116,7 +116,6 @@ public class TCPChannelClient {
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Base class for server and client sockets. Contains a listening thread that will call
|
||||
* eventListener.onTCPMessage on new messages.
|
||||
|
||||
@ -29,8 +29,7 @@ import java.io.StringWriter;
|
||||
* Thread.setDefaultUncaughtExceptionHandler() rather than
|
||||
* Thread.setUncaughtExceptionHandler(), to apply to background threads as well.
|
||||
*/
|
||||
public class UnhandledExceptionHandler
|
||||
implements Thread.UncaughtExceptionHandler {
|
||||
public class UnhandledExceptionHandler implements Thread.UncaughtExceptionHandler {
|
||||
private static final String TAG = "AppRTCMobileActivity";
|
||||
private final Activity activity;
|
||||
|
||||
@ -40,7 +39,8 @@ public class UnhandledExceptionHandler
|
||||
|
||||
public void uncaughtException(Thread unusedThread, final Throwable e) {
|
||||
activity.runOnUiThread(new Runnable() {
|
||||
@Override public void run() {
|
||||
@Override
|
||||
public void run() {
|
||||
String title = "Fatal error: " + getTopLevelCauseMessage(e);
|
||||
String msg = getRecursiveStackTrace(e);
|
||||
TextView errorView = new TextView(activity);
|
||||
@ -49,20 +49,18 @@ public class UnhandledExceptionHandler
|
||||
ScrollView scrollingContainer = new ScrollView(activity);
|
||||
scrollingContainer.addView(errorView);
|
||||
Log.e(TAG, title + "\n\n" + msg);
|
||||
DialogInterface.OnClickListener listener =
|
||||
new DialogInterface.OnClickListener() {
|
||||
@Override public void onClick(
|
||||
DialogInterface dialog, int which) {
|
||||
DialogInterface.OnClickListener listener = new DialogInterface.OnClickListener() {
|
||||
@Override
|
||||
public void onClick(DialogInterface dialog, int which) {
|
||||
dialog.dismiss();
|
||||
System.exit(1);
|
||||
}
|
||||
};
|
||||
AlertDialog.Builder builder =
|
||||
new AlertDialog.Builder(activity);
|
||||
builder
|
||||
.setTitle(title)
|
||||
AlertDialog.Builder builder = new AlertDialog.Builder(activity);
|
||||
builder.setTitle(title)
|
||||
.setView(scrollingContainer)
|
||||
.setPositiveButton("Exit", listener).show();
|
||||
.setPositiveButton("Exit", listener)
|
||||
.show();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@ -56,9 +56,7 @@ public class WebSocketChannelClient {
|
||||
/**
|
||||
* Possible WebSocket connection states.
|
||||
*/
|
||||
public enum WebSocketConnectionState {
|
||||
NEW, CONNECTED, REGISTERED, CLOSED, ERROR
|
||||
};
|
||||
public enum WebSocketConnectionState { NEW, CONNECTED, REGISTERED, CLOSED, ERROR }
|
||||
|
||||
/**
|
||||
* Callback interface for messages delivered on WebSocket.
|
||||
@ -179,8 +177,7 @@ public class WebSocketChannelClient {
|
||||
sendWSSMessage("DELETE", "");
|
||||
}
|
||||
// Close WebSocket in CONNECTED or ERROR states only.
|
||||
if (state == WebSocketConnectionState.CONNECTED
|
||||
|| state == WebSocketConnectionState.ERROR) {
|
||||
if (state == WebSocketConnectionState.CONNECTED || state == WebSocketConnectionState.ERROR) {
|
||||
ws.disconnect();
|
||||
state = WebSocketConnectionState.CLOSED;
|
||||
|
||||
@ -219,16 +216,15 @@ public class WebSocketChannelClient {
|
||||
private void sendWSSMessage(final String method, final String message) {
|
||||
String postUrl = postServerUrl + "/" + roomID + "/" + clientID;
|
||||
Log.d(TAG, "WS " + method + " : " + postUrl + " : " + message);
|
||||
AsyncHttpURLConnection httpConnection = new AsyncHttpURLConnection(
|
||||
method, postUrl, message, new AsyncHttpEvents() {
|
||||
AsyncHttpURLConnection httpConnection =
|
||||
new AsyncHttpURLConnection(method, postUrl, message, new AsyncHttpEvents() {
|
||||
@Override
|
||||
public void onHttpError(String errorMessage) {
|
||||
reportError("WS " + method + " error: " + errorMessage);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onHttpComplete(String response) {
|
||||
}
|
||||
public void onHttpComplete(String response) {}
|
||||
});
|
||||
httpConnection.send();
|
||||
}
|
||||
@ -237,8 +233,7 @@ public class WebSocketChannelClient {
|
||||
// called on a looper thread.
|
||||
private void checkIfCalledOnValidThread() {
|
||||
if (Thread.currentThread() != handler.getLooper().getThread()) {
|
||||
throw new IllegalStateException(
|
||||
"WebSocket method is not called on valid thread");
|
||||
throw new IllegalStateException("WebSocket method is not called on valid thread");
|
||||
}
|
||||
}
|
||||
|
||||
@ -260,8 +255,8 @@ public class WebSocketChannelClient {
|
||||
|
||||
@Override
|
||||
public void onClose(WebSocketCloseNotification code, String reason) {
|
||||
Log.d(TAG, "WebSocket connection closed. Code: " + code
|
||||
+ ". Reason: " + reason + ". State: " + state);
|
||||
Log.d(TAG, "WebSocket connection closed. Code: " + code + ". Reason: " + reason + ". State: "
|
||||
+ state);
|
||||
synchronized (closeEventLock) {
|
||||
closeEvent = true;
|
||||
closeEventLock.notify();
|
||||
@ -293,12 +288,9 @@ public class WebSocketChannelClient {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onRawTextMessage(byte[] payload) {
|
||||
}
|
||||
public void onRawTextMessage(byte[] payload) {}
|
||||
|
||||
@Override
|
||||
public void onBinaryMessage(byte[] payload) {
|
||||
public void onBinaryMessage(byte[] payload) {}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@ -36,19 +36,16 @@ import org.webrtc.SessionDescription;
|
||||
* Messages to other party (with local Ice candidates and answer SDP) can
|
||||
* be sent after WebSocket connection is established.
|
||||
*/
|
||||
public class WebSocketRTCClient implements AppRTCClient,
|
||||
WebSocketChannelEvents {
|
||||
public class WebSocketRTCClient implements AppRTCClient, WebSocketChannelEvents {
|
||||
private static final String TAG = "WSRTCClient";
|
||||
private static final String ROOM_JOIN = "join";
|
||||
private static final String ROOM_MESSAGE = "message";
|
||||
private static final String ROOM_LEAVE = "leave";
|
||||
|
||||
private enum ConnectionState {
|
||||
NEW, CONNECTED, CLOSED, ERROR
|
||||
};
|
||||
private enum MessageType {
|
||||
MESSAGE, LEAVE
|
||||
};
|
||||
private enum ConnectionState { NEW, CONNECTED, CLOSED, ERROR }
|
||||
|
||||
private enum MessageType { MESSAGE, LEAVE }
|
||||
|
||||
private final Handler handler;
|
||||
private boolean initiator;
|
||||
private SignalingEvents events;
|
||||
@ -101,8 +98,7 @@ public class WebSocketRTCClient implements AppRTCClient,
|
||||
|
||||
RoomParametersFetcherEvents callbacks = new RoomParametersFetcherEvents() {
|
||||
@Override
|
||||
public void onSignalingParametersReady(
|
||||
final SignalingParameters params) {
|
||||
public void onSignalingParametersReady(final SignalingParameters params) {
|
||||
WebSocketRTCClient.this.handler.post(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
@ -134,37 +130,32 @@ public class WebSocketRTCClient implements AppRTCClient,
|
||||
}
|
||||
|
||||
// Helper functions to get connection, post message and leave message URLs
|
||||
private String getConnectionUrl(
|
||||
RoomConnectionParameters connectionParameters) {
|
||||
return connectionParameters.roomUrl + "/" + ROOM_JOIN + "/"
|
||||
+ connectionParameters.roomId;
|
||||
private String getConnectionUrl(RoomConnectionParameters connectionParameters) {
|
||||
return connectionParameters.roomUrl + "/" + ROOM_JOIN + "/" + connectionParameters.roomId;
|
||||
}
|
||||
|
||||
private String getMessageUrl(RoomConnectionParameters connectionParameters,
|
||||
SignalingParameters signalingParameters) {
|
||||
return connectionParameters.roomUrl + "/" + ROOM_MESSAGE + "/"
|
||||
+ connectionParameters.roomId + "/" + signalingParameters.clientId;
|
||||
private String getMessageUrl(
|
||||
RoomConnectionParameters connectionParameters, SignalingParameters signalingParameters) {
|
||||
return connectionParameters.roomUrl + "/" + ROOM_MESSAGE + "/" + connectionParameters.roomId
|
||||
+ "/" + signalingParameters.clientId;
|
||||
}
|
||||
|
||||
private String getLeaveUrl(RoomConnectionParameters connectionParameters,
|
||||
SignalingParameters signalingParameters) {
|
||||
return connectionParameters.roomUrl + "/" + ROOM_LEAVE + "/"
|
||||
+ connectionParameters.roomId + "/" + signalingParameters.clientId;
|
||||
private String getLeaveUrl(
|
||||
RoomConnectionParameters connectionParameters, SignalingParameters signalingParameters) {
|
||||
return connectionParameters.roomUrl + "/" + ROOM_LEAVE + "/" + connectionParameters.roomId + "/"
|
||||
+ signalingParameters.clientId;
|
||||
}
|
||||
|
||||
// Callback issued when room parameters are extracted. Runs on local
|
||||
// looper thread.
|
||||
private void signalingParametersReady(
|
||||
final SignalingParameters signalingParameters) {
|
||||
private void signalingParametersReady(final SignalingParameters signalingParameters) {
|
||||
Log.d(TAG, "Room connection completed.");
|
||||
if (connectionParameters.loopback
|
||||
&& (!signalingParameters.initiator
|
||||
|| signalingParameters.offerSdp != null)) {
|
||||
&& (!signalingParameters.initiator || signalingParameters.offerSdp != null)) {
|
||||
reportError("Loopback room is busy.");
|
||||
return;
|
||||
}
|
||||
if (!connectionParameters.loopback
|
||||
&& !signalingParameters.initiator
|
||||
if (!connectionParameters.loopback && !signalingParameters.initiator
|
||||
&& signalingParameters.offerSdp == null) {
|
||||
Log.w(TAG, "No offer SDP in room response.");
|
||||
}
|
||||
@ -200,8 +191,7 @@ public class WebSocketRTCClient implements AppRTCClient,
|
||||
if (connectionParameters.loopback) {
|
||||
// In loopback mode rename this offer to answer and route it back.
|
||||
SessionDescription sdpAnswer = new SessionDescription(
|
||||
SessionDescription.Type.fromCanonicalForm("answer"),
|
||||
sdp.description);
|
||||
SessionDescription.Type.fromCanonicalForm("answer"), sdp.description);
|
||||
events.onRemoteDescription(sdpAnswer);
|
||||
}
|
||||
}
|
||||
@ -308,15 +298,14 @@ public class WebSocketRTCClient implements AppRTCClient,
|
||||
} else if (type.equals("remove-candidates")) {
|
||||
JSONArray candidateArray = json.getJSONArray("candidates");
|
||||
IceCandidate[] candidates = new IceCandidate[candidateArray.length()];
|
||||
for (int i =0; i < candidateArray.length(); ++i) {
|
||||
for (int i = 0; i < candidateArray.length(); ++i) {
|
||||
candidates[i] = toJavaCandidate(candidateArray.getJSONObject(i));
|
||||
}
|
||||
events.onRemoteIceCandidatesRemoved(candidates);
|
||||
} else if (type.equals("answer")) {
|
||||
if (initiator) {
|
||||
SessionDescription sdp = new SessionDescription(
|
||||
SessionDescription.Type.fromCanonicalForm(type),
|
||||
json.getString("sdp"));
|
||||
SessionDescription.Type.fromCanonicalForm(type), json.getString("sdp"));
|
||||
events.onRemoteDescription(sdp);
|
||||
} else {
|
||||
reportError("Received answer for call initiator: " + msg);
|
||||
@ -324,8 +313,7 @@ public class WebSocketRTCClient implements AppRTCClient,
|
||||
} else if (type.equals("offer")) {
|
||||
if (!initiator) {
|
||||
SessionDescription sdp = new SessionDescription(
|
||||
SessionDescription.Type.fromCanonicalForm(type),
|
||||
json.getString("sdp"));
|
||||
SessionDescription.Type.fromCanonicalForm(type), json.getString("sdp"));
|
||||
events.onRemoteDescription(sdp);
|
||||
} else {
|
||||
reportError("Received offer for call receiver: " + msg);
|
||||
@ -389,8 +377,8 @@ public class WebSocketRTCClient implements AppRTCClient,
|
||||
logInfo += ". Message: " + message;
|
||||
}
|
||||
Log.d(TAG, "C->GAE: " + logInfo);
|
||||
AsyncHttpURLConnection httpConnection = new AsyncHttpURLConnection(
|
||||
"POST", url, message, new AsyncHttpEvents() {
|
||||
AsyncHttpURLConnection httpConnection =
|
||||
new AsyncHttpURLConnection("POST", url, message, new AsyncHttpEvents() {
|
||||
@Override
|
||||
public void onHttpError(String errorMessage) {
|
||||
reportError("GAE POST error: " + errorMessage);
|
||||
@ -425,8 +413,7 @@ public class WebSocketRTCClient implements AppRTCClient,
|
||||
|
||||
// Converts a JSON candidate to a Java object.
|
||||
IceCandidate toJavaCandidate(JSONObject json) throws JSONException {
|
||||
return new IceCandidate(json.getString("id"),
|
||||
json.getInt("label"),
|
||||
json.getString("candidate"));
|
||||
return new IceCandidate(
|
||||
json.getString("id"), json.getInt("label"), json.getString("candidate"));
|
||||
}
|
||||
}
|
||||
|
||||
@ -17,9 +17,7 @@ import android.util.Log;
|
||||
* AppRTCUtils provides helper functions for managing thread safety.
|
||||
*/
|
||||
public final class AppRTCUtils {
|
||||
|
||||
private AppRTCUtils() {
|
||||
}
|
||||
private AppRTCUtils() {}
|
||||
|
||||
/** Helper method which throws an exception when an assertion has failed. */
|
||||
public static void assertIsTrue(boolean condition) {
|
||||
@ -30,8 +28,8 @@ public final class AppRTCUtils {
|
||||
|
||||
/** Helper method for building a string of thread information.*/
|
||||
public static String getThreadInfo() {
|
||||
return "@[name=" + Thread.currentThread().getName()
|
||||
+ ", id=" + Thread.currentThread().getId() + "]";
|
||||
return "@[name=" + Thread.currentThread().getName() + ", id=" + Thread.currentThread().getId()
|
||||
+ "]";
|
||||
}
|
||||
|
||||
/** Information about the current build, taken from system properties. */
|
||||
|
||||
@ -38,8 +38,7 @@ public class AsyncHttpURLConnection {
|
||||
void onHttpComplete(String response);
|
||||
}
|
||||
|
||||
public AsyncHttpURLConnection(String method, String url, String message,
|
||||
AsyncHttpEvents events) {
|
||||
public AsyncHttpURLConnection(String method, String url, String message, AsyncHttpEvents events) {
|
||||
this.method = method;
|
||||
this.url = url;
|
||||
this.message = message;
|
||||
@ -61,8 +60,7 @@ public class AsyncHttpURLConnection {
|
||||
|
||||
private void sendHttpMessage() {
|
||||
try {
|
||||
HttpURLConnection connection =
|
||||
(HttpURLConnection) new URL(url).openConnection();
|
||||
HttpURLConnection connection = (HttpURLConnection) new URL(url).openConnection();
|
||||
byte[] postData = new byte[0];
|
||||
if (message != null) {
|
||||
postData = message.getBytes("UTF-8");
|
||||
@ -96,8 +94,8 @@ public class AsyncHttpURLConnection {
|
||||
// Get response.
|
||||
int responseCode = connection.getResponseCode();
|
||||
if (responseCode != 200) {
|
||||
events.onHttpError("Non-200 response to " + method + " to URL: "
|
||||
+ url + " : " + connection.getHeaderField(null));
|
||||
events.onHttpError("Non-200 response to " + method + " to URL: " + url + " : "
|
||||
+ connection.getHeaderField(null));
|
||||
connection.disconnect();
|
||||
return;
|
||||
}
|
||||
@ -109,8 +107,7 @@ public class AsyncHttpURLConnection {
|
||||
} catch (SocketTimeoutException e) {
|
||||
events.onHttpError("HTTP " + method + " to " + url + " timeout");
|
||||
} catch (IOException e) {
|
||||
events.onHttpError("HTTP " + method + " to " + url + " error: "
|
||||
+ e.getMessage());
|
||||
events.onHttpError("HTTP " + method + " to " + url + " error: " + e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -62,6 +62,7 @@ public class DirectRTCClientTest {
|
||||
@Test
|
||||
public void testValidIpPattern() {
|
||||
// Strings that should match the pattern.
|
||||
// clang-format off
|
||||
final String[] ipAddresses = new String[] {
|
||||
"0.0.0.0",
|
||||
"127.0.0.1",
|
||||
@ -79,6 +80,7 @@ public class DirectRTCClientTest {
|
||||
"[::1]:8888",
|
||||
"[2001:0db8:85a3:0000:0000:8a2e:0370:7946]:8888"
|
||||
};
|
||||
// clang-format on
|
||||
|
||||
for (String ip : ipAddresses) {
|
||||
assertTrue(ip + " didn't match IP_PATTERN even though it should.",
|
||||
@ -89,6 +91,7 @@ public class DirectRTCClientTest {
|
||||
@Test
|
||||
public void testInvalidIpPattern() {
|
||||
// Strings that shouldn't match the pattern.
|
||||
// clang-format off
|
||||
final String[] invalidIpAddresses = new String[] {
|
||||
"Hello, World!",
|
||||
"aaaa",
|
||||
@ -96,6 +99,7 @@ public class DirectRTCClientTest {
|
||||
"[hello world]",
|
||||
"hello:world"
|
||||
};
|
||||
// clang-format on
|
||||
|
||||
for (String invalidIp : invalidIpAddresses) {
|
||||
assertFalse(invalidIp + " matched IP_PATTERN even though it shouldn't.",
|
||||
@ -121,8 +125,8 @@ public class DirectRTCClientTest {
|
||||
verify(clientEvents, timeout(NETWORK_TIMEOUT))
|
||||
.onConnectedToRoom(any(AppRTCClient.SignalingParameters.class));
|
||||
|
||||
SessionDescription answerSdp
|
||||
= new SessionDescription(SessionDescription.Type.ANSWER, DUMMY_SDP);
|
||||
SessionDescription answerSdp =
|
||||
new SessionDescription(SessionDescription.Type.ANSWER, DUMMY_SDP);
|
||||
client.sendAnswerSdp(answerSdp);
|
||||
verify(serverEvents, timeout(NETWORK_TIMEOUT))
|
||||
.onRemoteDescription(isNotNull(SessionDescription.class));
|
||||
|
||||
@ -52,7 +52,6 @@ public class TCPChannelClientTest {
|
||||
private TCPChannelClient server;
|
||||
private TCPChannelClient client;
|
||||
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
ShadowLog.stream = System.out;
|
||||
|
||||
@ -37,8 +37,8 @@ import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
public class PeerConnectionClientTest extends InstrumentationTestCase
|
||||
implements PeerConnectionEvents {
|
||||
public class PeerConnectionClientTest
|
||||
extends InstrumentationTestCase implements PeerConnectionEvents {
|
||||
private static final String TAG = "RTCClientTest";
|
||||
private static final int ICE_CONNECTION_WAIT_TIMEOUT = 10000;
|
||||
private static final int WAIT_TIMEOUT = 7000;
|
||||
@ -103,8 +103,8 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
|
||||
public synchronized void renderFrame(VideoRenderer.I420Frame frame) {
|
||||
if (!renderFrameCalled) {
|
||||
if (rendererName != null) {
|
||||
Log.d(TAG, rendererName + " render frame: "
|
||||
+ frame.rotatedWidth() + " x " + frame.rotatedHeight());
|
||||
Log.d(TAG, rendererName + " render frame: " + frame.rotatedWidth() + " x "
|
||||
+ frame.rotatedHeight());
|
||||
} else {
|
||||
Log.d(TAG, "Render frame: " + frame.rotatedWidth() + " x " + frame.rotatedHeight());
|
||||
}
|
||||
@ -114,11 +114,9 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
|
||||
doneRendering.countDown();
|
||||
}
|
||||
|
||||
|
||||
// This method shouldn't hold any locks or touch member variables since it
|
||||
// blocks.
|
||||
public boolean waitForFramesRendered(int timeoutMs)
|
||||
throws InterruptedException {
|
||||
public boolean waitForFramesRendered(int timeoutMs) throws InterruptedException {
|
||||
doneRendering.await(timeoutMs, TimeUnit.MILLISECONDS);
|
||||
return (doneRendering.getCount() <= 0);
|
||||
}
|
||||
@ -136,7 +134,7 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
|
||||
|
||||
@Override
|
||||
public void onIceCandidate(final IceCandidate candidate) {
|
||||
synchronized(iceCandidateEvent) {
|
||||
synchronized (iceCandidateEvent) {
|
||||
Log.d(TAG, "IceCandidate #" + iceCandidates.size() + " : " + candidate.toString());
|
||||
if (loopback) {
|
||||
// Loopback local ICE candidate in a separate thread to avoid adding
|
||||
@ -161,7 +159,7 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
|
||||
@Override
|
||||
public void onIceConnected() {
|
||||
Log.d(TAG, "ICE Connected");
|
||||
synchronized(iceConnectedEvent) {
|
||||
synchronized (iceConnectedEvent) {
|
||||
isIceConnected = true;
|
||||
iceConnectedEvent.notifyAll();
|
||||
}
|
||||
@ -170,7 +168,7 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
|
||||
@Override
|
||||
public void onIceDisconnected() {
|
||||
Log.d(TAG, "ICE Disconnected");
|
||||
synchronized(iceConnectedEvent) {
|
||||
synchronized (iceConnectedEvent) {
|
||||
isIceConnected = false;
|
||||
iceConnectedEvent.notifyAll();
|
||||
}
|
||||
@ -179,7 +177,7 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
|
||||
@Override
|
||||
public void onPeerConnectionClosed() {
|
||||
Log.d(TAG, "PeerConnection closed");
|
||||
synchronized(closeEvent) {
|
||||
synchronized (closeEvent) {
|
||||
isClosed = true;
|
||||
closeEvent.notifyAll();
|
||||
}
|
||||
@ -191,13 +189,11 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onPeerConnectionStatsReady(StatsReport[] reports) {
|
||||
}
|
||||
public void onPeerConnectionStatsReady(StatsReport[] reports) {}
|
||||
|
||||
// Helper wait functions.
|
||||
private boolean waitForLocalSDP(int timeoutMs)
|
||||
throws InterruptedException {
|
||||
synchronized(localSdpEvent) {
|
||||
private boolean waitForLocalSDP(int timeoutMs) throws InterruptedException {
|
||||
synchronized (localSdpEvent) {
|
||||
if (localSdp == null) {
|
||||
localSdpEvent.wait(timeoutMs);
|
||||
}
|
||||
@ -205,9 +201,8 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
|
||||
}
|
||||
}
|
||||
|
||||
private boolean waitForIceCandidates(int timeoutMs)
|
||||
throws InterruptedException {
|
||||
synchronized(iceCandidateEvent) {
|
||||
private boolean waitForIceCandidates(int timeoutMs) throws InterruptedException {
|
||||
synchronized (iceCandidateEvent) {
|
||||
if (iceCandidates.size() == 0) {
|
||||
iceCandidateEvent.wait(timeoutMs);
|
||||
}
|
||||
@ -215,9 +210,8 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
|
||||
}
|
||||
}
|
||||
|
||||
private boolean waitForIceConnected(int timeoutMs)
|
||||
throws InterruptedException {
|
||||
synchronized(iceConnectedEvent) {
|
||||
private boolean waitForIceConnected(int timeoutMs) throws InterruptedException {
|
||||
synchronized (iceConnectedEvent) {
|
||||
if (!isIceConnected) {
|
||||
iceConnectedEvent.wait(timeoutMs);
|
||||
}
|
||||
@ -229,9 +223,8 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
|
||||
}
|
||||
}
|
||||
|
||||
private boolean waitForPeerConnectionClosed(int timeoutMs)
|
||||
throws InterruptedException {
|
||||
synchronized(closeEvent) {
|
||||
private boolean waitForPeerConnectionClosed(int timeoutMs) throws InterruptedException {
|
||||
synchronized (closeEvent) {
|
||||
if (!isClosed) {
|
||||
closeEvent.wait(timeoutMs);
|
||||
}
|
||||
@ -239,13 +232,12 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
|
||||
}
|
||||
}
|
||||
|
||||
PeerConnectionClient createPeerConnectionClient(
|
||||
MockRenderer localRenderer, MockRenderer remoteRenderer,
|
||||
PeerConnectionParameters peerConnectionParameters, EglBase.Context eglContext) {
|
||||
List<PeerConnection.IceServer> iceServers =
|
||||
new LinkedList<PeerConnection.IceServer>();
|
||||
SignalingParameters signalingParameters = new SignalingParameters(
|
||||
iceServers, true, // iceServers, initiator.
|
||||
PeerConnectionClient createPeerConnectionClient(MockRenderer localRenderer,
|
||||
MockRenderer remoteRenderer, PeerConnectionParameters peerConnectionParameters,
|
||||
EglBase.Context eglContext) {
|
||||
List<PeerConnection.IceServer> iceServers = new LinkedList<PeerConnection.IceServer>();
|
||||
SignalingParameters signalingParameters =
|
||||
new SignalingParameters(iceServers, true, // iceServers, initiator.
|
||||
null, null, null, // clientId, wssUrl, wssPostUrl.
|
||||
null, null); // offerSdp, iceCandidates.
|
||||
|
||||
@ -263,8 +255,7 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
|
||||
|
||||
private PeerConnectionParameters createParametersForAudioCall() {
|
||||
PeerConnectionParameters peerConnectionParameters =
|
||||
new PeerConnectionParameters(
|
||||
false, /* videoCallEnabled */
|
||||
new PeerConnectionParameters(false, /* videoCallEnabled */
|
||||
true, /* loopback */
|
||||
false, /* tracing */
|
||||
// Video codec parameters.
|
||||
@ -281,22 +272,18 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
|
||||
"OPUS", /* audioCodec */
|
||||
false, /* noAudioProcessing */
|
||||
false, /* aecDump */
|
||||
false /* useOpenSLES */,
|
||||
false /* disableBuiltInAEC */,
|
||||
false /* disableBuiltInAGC */,
|
||||
false /* disableBuiltInNS */,
|
||||
false /* enableLevelControl */);
|
||||
false /* useOpenSLES */, false /* disableBuiltInAEC */, false /* disableBuiltInAGC */,
|
||||
false /* disableBuiltInNS */, false /* enableLevelControl */);
|
||||
return peerConnectionParameters;
|
||||
}
|
||||
|
||||
private PeerConnectionParameters createParametersForVideoCall(
|
||||
String videoCodec, boolean captureToTexture) {
|
||||
final boolean useCamera2 = captureToTexture
|
||||
&& Camera2Enumerator.isSupported(getInstrumentation().getTargetContext());
|
||||
final boolean useCamera2 =
|
||||
captureToTexture && Camera2Enumerator.isSupported(getInstrumentation().getTargetContext());
|
||||
|
||||
PeerConnectionParameters peerConnectionParameters =
|
||||
new PeerConnectionParameters(
|
||||
true, /* videoCallEnabled */
|
||||
new PeerConnectionParameters(true, /* videoCallEnabled */
|
||||
true, /* loopback */
|
||||
false, /* tracing */
|
||||
// Video codec parameters.
|
||||
@ -313,11 +300,8 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
|
||||
"OPUS", /* audioCodec */
|
||||
false, /* noAudioProcessing */
|
||||
false, /* aecDump */
|
||||
false /* useOpenSLES */,
|
||||
false /* disableBuiltInAEC */,
|
||||
false /* disableBuiltInAGC */,
|
||||
false /* disableBuiltInNS */,
|
||||
false /* enableLevelControl */);
|
||||
false /* useOpenSLES */, false /* disableBuiltInAEC */, false /* disableBuiltInAGC */,
|
||||
false /* disableBuiltInNS */, false /* enableLevelControl */);
|
||||
return peerConnectionParameters;
|
||||
}
|
||||
|
||||
@ -338,26 +322,23 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
|
||||
}
|
||||
|
||||
@SmallTest
|
||||
public void testSetLocalOfferMakesVideoFlowLocally()
|
||||
throws InterruptedException {
|
||||
public void testSetLocalOfferMakesVideoFlowLocally() throws InterruptedException {
|
||||
Log.d(TAG, "testSetLocalOfferMakesVideoFlowLocally");
|
||||
MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME);
|
||||
pcClient = createPeerConnectionClient(
|
||||
localRenderer, new MockRenderer(0, null),
|
||||
pcClient = createPeerConnectionClient(localRenderer, new MockRenderer(0, null),
|
||||
createParametersForVideoCall(VIDEO_CODEC_VP8, false), null);
|
||||
|
||||
// Wait for local SDP and ice candidates set events.
|
||||
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
|
||||
assertTrue("ICE candidates were not generated.",
|
||||
waitForIceCandidates(WAIT_TIMEOUT));
|
||||
assertTrue("ICE candidates were not generated.", waitForIceCandidates(WAIT_TIMEOUT));
|
||||
|
||||
// Check that local video frames were rendered.
|
||||
assertTrue("Local video frames were not rendered.",
|
||||
localRenderer.waitForFramesRendered(WAIT_TIMEOUT));
|
||||
assertTrue(
|
||||
"Local video frames were not rendered.", localRenderer.waitForFramesRendered(WAIT_TIMEOUT));
|
||||
|
||||
pcClient.close();
|
||||
assertTrue("PeerConnection close event was not received.",
|
||||
waitForPeerConnectionClosed(WAIT_TIMEOUT));
|
||||
assertTrue(
|
||||
"PeerConnection close event was not received.", waitForPeerConnectionClosed(WAIT_TIMEOUT));
|
||||
Log.d(TAG, "testSetLocalOfferMakesVideoFlowLocally Done.");
|
||||
}
|
||||
|
||||
@ -379,8 +360,7 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
|
||||
// Wait for local SDP, rename it to answer and set as remote SDP.
|
||||
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
|
||||
SessionDescription remoteSdp = new SessionDescription(
|
||||
SessionDescription.Type.fromCanonicalForm("answer"),
|
||||
localSdp.description);
|
||||
SessionDescription.Type.fromCanonicalForm("answer"), localSdp.description);
|
||||
pcClient.setRemoteDescription(remoteSdp);
|
||||
|
||||
// Wait for ICE connection.
|
||||
@ -492,15 +472,14 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
|
||||
eglBase = null;
|
||||
|
||||
SessionDescription remoteSdp = new SessionDescription(
|
||||
SessionDescription.Type.fromCanonicalForm("answer"),
|
||||
localSdp.description);
|
||||
SessionDescription.Type.fromCanonicalForm("answer"), localSdp.description);
|
||||
pcClient.setRemoteDescription(remoteSdp);
|
||||
|
||||
// Wait for ICE connection.
|
||||
assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAIT_TIMEOUT));
|
||||
// Check that local and remote video frames were rendered.
|
||||
assertTrue("Local video frames were not rendered.",
|
||||
localRenderer.waitForFramesRendered(WAIT_TIMEOUT));
|
||||
assertTrue(
|
||||
"Local video frames were not rendered.", localRenderer.waitForFramesRendered(WAIT_TIMEOUT));
|
||||
assertTrue("Remote video frames were not rendered.",
|
||||
remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT));
|
||||
|
||||
@ -524,7 +503,6 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
|
||||
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264, true), true);
|
||||
}
|
||||
|
||||
|
||||
// Checks if default front camera can be switched to back camera and then
|
||||
// again to front camera.
|
||||
@SmallTest
|
||||
@ -541,8 +519,7 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
|
||||
// Wait for local SDP, rename it to answer and set as remote SDP.
|
||||
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
|
||||
SessionDescription remoteSdp = new SessionDescription(
|
||||
SessionDescription.Type.fromCanonicalForm("answer"),
|
||||
localSdp.description);
|
||||
SessionDescription.Type.fromCanonicalForm("answer"), localSdp.description);
|
||||
pcClient.setRemoteDescription(remoteSdp);
|
||||
|
||||
// Wait for ICE connection.
|
||||
@ -588,8 +565,7 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
|
||||
// Wait for local SDP, rename it to answer and set as remote SDP.
|
||||
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
|
||||
SessionDescription remoteSdp = new SessionDescription(
|
||||
SessionDescription.Type.fromCanonicalForm("answer"),
|
||||
localSdp.description);
|
||||
SessionDescription.Type.fromCanonicalForm("answer"), localSdp.description);
|
||||
pcClient.setRemoteDescription(remoteSdp);
|
||||
|
||||
// Wait for ICE connection.
|
||||
@ -636,8 +612,7 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
|
||||
// Wait for local SDP, rename it to answer and set as remote SDP.
|
||||
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
|
||||
SessionDescription remoteSdp = new SessionDescription(
|
||||
SessionDescription.Type.fromCanonicalForm("answer"),
|
||||
localSdp.description);
|
||||
SessionDescription.Type.fromCanonicalForm("answer"), localSdp.description);
|
||||
pcClient.setRemoteDescription(remoteSdp);
|
||||
|
||||
// Wait for ICE connection.
|
||||
@ -671,5 +646,4 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
|
||||
assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT));
|
||||
Log.d(TAG, "testCaptureFormatChange done.");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@ -67,8 +67,7 @@ class WebRtcAudioEffects {
|
||||
// Note: we're using isAcousticEchoCancelerEffectAvailable() instead of
|
||||
// AcousticEchoCanceler.isAvailable() to avoid the expensive getEffects()
|
||||
// OS API call.
|
||||
return WebRtcAudioUtils.runningOnJellyBeanOrHigher()
|
||||
&& isAcousticEchoCancelerEffectAvailable();
|
||||
return WebRtcAudioUtils.runningOnJellyBeanOrHigher() && isAcousticEchoCancelerEffectAvailable();
|
||||
}
|
||||
|
||||
// Checks if the device implements Automatic Gain Control (AGC).
|
||||
@ -77,8 +76,7 @@ class WebRtcAudioEffects {
|
||||
// Note: we're using isAutomaticGainControlEffectAvailable() instead of
|
||||
// AutomaticGainControl.isAvailable() to avoid the expensive getEffects()
|
||||
// OS API call.
|
||||
return WebRtcAudioUtils.runningOnJellyBeanOrHigher()
|
||||
&& isAutomaticGainControlEffectAvailable();
|
||||
return WebRtcAudioUtils.runningOnJellyBeanOrHigher() && isAutomaticGainControlEffectAvailable();
|
||||
}
|
||||
|
||||
// Checks if the device implements Noise Suppression (NS).
|
||||
@ -87,14 +85,12 @@ class WebRtcAudioEffects {
|
||||
// Note: we're using isNoiseSuppressorEffectAvailable() instead of
|
||||
// NoiseSuppressor.isAvailable() to avoid the expensive getEffects()
|
||||
// OS API call.
|
||||
return WebRtcAudioUtils.runningOnJellyBeanOrHigher()
|
||||
&& isNoiseSuppressorEffectAvailable();
|
||||
return WebRtcAudioUtils.runningOnJellyBeanOrHigher() && isNoiseSuppressorEffectAvailable();
|
||||
}
|
||||
|
||||
// Returns true if the device is blacklisted for HW AEC usage.
|
||||
public static boolean isAcousticEchoCancelerBlacklisted() {
|
||||
List<String> blackListedModels =
|
||||
WebRtcAudioUtils.getBlackListedModelsForAecUsage();
|
||||
List<String> blackListedModels = WebRtcAudioUtils.getBlackListedModelsForAecUsage();
|
||||
boolean isBlacklisted = blackListedModels.contains(Build.MODEL);
|
||||
if (isBlacklisted) {
|
||||
Logging.w(TAG, Build.MODEL + " is blacklisted for HW AEC usage!");
|
||||
@ -104,8 +100,7 @@ class WebRtcAudioEffects {
|
||||
|
||||
// Returns true if the device is blacklisted for HW AGC usage.
|
||||
public static boolean isAutomaticGainControlBlacklisted() {
|
||||
List<String> blackListedModels =
|
||||
WebRtcAudioUtils.getBlackListedModelsForAgcUsage();
|
||||
List<String> blackListedModels = WebRtcAudioUtils.getBlackListedModelsForAgcUsage();
|
||||
boolean isBlacklisted = blackListedModels.contains(Build.MODEL);
|
||||
if (isBlacklisted) {
|
||||
Logging.w(TAG, Build.MODEL + " is blacklisted for HW AGC usage!");
|
||||
@ -115,8 +110,7 @@ class WebRtcAudioEffects {
|
||||
|
||||
// Returns true if the device is blacklisted for HW NS usage.
|
||||
public static boolean isNoiseSuppressorBlacklisted() {
|
||||
List<String> blackListedModels =
|
||||
WebRtcAudioUtils.getBlackListedModelsForNsUsage();
|
||||
List<String> blackListedModels = WebRtcAudioUtils.getBlackListedModelsForNsUsage();
|
||||
boolean isBlacklisted = blackListedModels.contains(Build.MODEL);
|
||||
if (isBlacklisted) {
|
||||
Logging.w(TAG, Build.MODEL + " is blacklisted for HW NS usage!");
|
||||
@ -129,8 +123,8 @@ class WebRtcAudioEffects {
|
||||
@TargetApi(18)
|
||||
private static boolean isAcousticEchoCancelerExcludedByUUID() {
|
||||
for (Descriptor d : getAvailableEffects()) {
|
||||
if (d.type.equals(AudioEffect.EFFECT_TYPE_AEC) &&
|
||||
d.uuid.equals(AOSP_ACOUSTIC_ECHO_CANCELER)) {
|
||||
if (d.type.equals(AudioEffect.EFFECT_TYPE_AEC)
|
||||
&& d.uuid.equals(AOSP_ACOUSTIC_ECHO_CANCELER)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@ -142,8 +136,8 @@ class WebRtcAudioEffects {
|
||||
@TargetApi(18)
|
||||
private static boolean isAutomaticGainControlExcludedByUUID() {
|
||||
for (Descriptor d : getAvailableEffects()) {
|
||||
if (d.type.equals(AudioEffect.EFFECT_TYPE_AGC) &&
|
||||
d.uuid.equals(AOSP_AUTOMATIC_GAIN_CONTROL)) {
|
||||
if (d.type.equals(AudioEffect.EFFECT_TYPE_AGC)
|
||||
&& d.uuid.equals(AOSP_AUTOMATIC_GAIN_CONTROL)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@ -155,8 +149,7 @@ class WebRtcAudioEffects {
|
||||
@TargetApi(18)
|
||||
private static boolean isNoiseSuppressorExcludedByUUID() {
|
||||
for (Descriptor d : getAvailableEffects()) {
|
||||
if (d.type.equals(AudioEffect.EFFECT_TYPE_NS) &&
|
||||
d.uuid.equals(AOSP_NOISE_SUPPRESSOR)) {
|
||||
if (d.type.equals(AudioEffect.EFFECT_TYPE_NS) && d.uuid.equals(AOSP_NOISE_SUPPRESSOR)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@ -184,36 +177,28 @@ class WebRtcAudioEffects {
|
||||
// Returns true if all conditions for supporting the HW AEC are fulfilled.
|
||||
// It will not be possible to enable the HW AEC if this method returns false.
|
||||
public static boolean canUseAcousticEchoCanceler() {
|
||||
boolean canUseAcousticEchoCanceler =
|
||||
isAcousticEchoCancelerSupported()
|
||||
boolean canUseAcousticEchoCanceler = isAcousticEchoCancelerSupported()
|
||||
&& !WebRtcAudioUtils.useWebRtcBasedAcousticEchoCanceler()
|
||||
&& !isAcousticEchoCancelerBlacklisted()
|
||||
&& !isAcousticEchoCancelerExcludedByUUID();
|
||||
Logging.d(TAG, "canUseAcousticEchoCanceler: "
|
||||
+ canUseAcousticEchoCanceler);
|
||||
&& !isAcousticEchoCancelerBlacklisted() && !isAcousticEchoCancelerExcludedByUUID();
|
||||
Logging.d(TAG, "canUseAcousticEchoCanceler: " + canUseAcousticEchoCanceler);
|
||||
return canUseAcousticEchoCanceler;
|
||||
}
|
||||
|
||||
// Returns true if all conditions for supporting the HW AGC are fulfilled.
|
||||
// It will not be possible to enable the HW AGC if this method returns false.
|
||||
public static boolean canUseAutomaticGainControl() {
|
||||
boolean canUseAutomaticGainControl =
|
||||
isAutomaticGainControlSupported()
|
||||
boolean canUseAutomaticGainControl = isAutomaticGainControlSupported()
|
||||
&& !WebRtcAudioUtils.useWebRtcBasedAutomaticGainControl()
|
||||
&& !isAutomaticGainControlBlacklisted()
|
||||
&& !isAutomaticGainControlExcludedByUUID();
|
||||
Logging.d(TAG, "canUseAutomaticGainControl: "
|
||||
+ canUseAutomaticGainControl);
|
||||
&& !isAutomaticGainControlBlacklisted() && !isAutomaticGainControlExcludedByUUID();
|
||||
Logging.d(TAG, "canUseAutomaticGainControl: " + canUseAutomaticGainControl);
|
||||
return canUseAutomaticGainControl;
|
||||
}
|
||||
|
||||
// Returns true if all conditions for supporting the HW NS are fulfilled.
|
||||
// It will not be possible to enable the HW NS if this method returns false.
|
||||
public static boolean canUseNoiseSuppressor() {
|
||||
boolean canUseNoiseSuppressor =
|
||||
isNoiseSuppressorSupported()
|
||||
&& !WebRtcAudioUtils.useWebRtcBasedNoiseSuppressor()
|
||||
&& !isNoiseSuppressorBlacklisted()
|
||||
boolean canUseNoiseSuppressor = isNoiseSuppressorSupported()
|
||||
&& !WebRtcAudioUtils.useWebRtcBasedNoiseSuppressor() && !isNoiseSuppressorBlacklisted()
|
||||
&& !isNoiseSuppressorExcludedByUUID();
|
||||
Logging.d(TAG, "canUseNoiseSuppressor: " + canUseNoiseSuppressor);
|
||||
return canUseNoiseSuppressor;
|
||||
@ -316,8 +301,7 @@ class WebRtcAudioEffects {
|
||||
if (aec.setEnabled(enable) != AudioEffect.SUCCESS) {
|
||||
Logging.e(TAG, "Failed to set the AcousticEchoCanceler state");
|
||||
}
|
||||
Logging.d(TAG, "AcousticEchoCanceler: was "
|
||||
+ (enabled ? "enabled" : "disabled")
|
||||
Logging.d(TAG, "AcousticEchoCanceler: was " + (enabled ? "enabled" : "disabled")
|
||||
+ ", enable: " + enable + ", is now: "
|
||||
+ (aec.getEnabled() ? "enabled" : "disabled"));
|
||||
} else {
|
||||
@ -335,8 +319,7 @@ class WebRtcAudioEffects {
|
||||
if (agc.setEnabled(enable) != AudioEffect.SUCCESS) {
|
||||
Logging.e(TAG, "Failed to set the AutomaticGainControl state");
|
||||
}
|
||||
Logging.d(TAG, "AutomaticGainControl: was "
|
||||
+ (enabled ? "enabled" : "disabled")
|
||||
Logging.d(TAG, "AutomaticGainControl: was " + (enabled ? "enabled" : "disabled")
|
||||
+ ", enable: " + enable + ", is now: "
|
||||
+ (agc.getEnabled() ? "enabled" : "disabled"));
|
||||
} else {
|
||||
@ -354,10 +337,8 @@ class WebRtcAudioEffects {
|
||||
if (ns.setEnabled(enable) != AudioEffect.SUCCESS) {
|
||||
Logging.e(TAG, "Failed to set the NoiseSuppressor state");
|
||||
}
|
||||
Logging.d(TAG, "NoiseSuppressor: was "
|
||||
+ (enabled ? "enabled" : "disabled")
|
||||
+ ", enable: " + enable + ", is now: "
|
||||
+ (ns.getEnabled() ? "enabled" : "disabled"));
|
||||
Logging.d(TAG, "NoiseSuppressor: was " + (enabled ? "enabled" : "disabled") + ", enable: "
|
||||
+ enable + ", is now: " + (ns.getEnabled() ? "enabled" : "disabled"));
|
||||
} else {
|
||||
Logging.e(TAG, "Failed to create the NoiseSuppressor instance");
|
||||
}
|
||||
@ -395,12 +376,9 @@ class WebRtcAudioEffects {
|
||||
if (!WebRtcAudioUtils.runningOnJellyBeanMR2OrHigher())
|
||||
return false;
|
||||
|
||||
return (AudioEffect.EFFECT_TYPE_AEC.equals(type)
|
||||
&& isAcousticEchoCancelerSupported())
|
||||
|| (AudioEffect.EFFECT_TYPE_AGC.equals(type)
|
||||
&& isAutomaticGainControlSupported())
|
||||
|| (AudioEffect.EFFECT_TYPE_NS.equals(type)
|
||||
&& isNoiseSuppressorSupported());
|
||||
return (AudioEffect.EFFECT_TYPE_AEC.equals(type) && isAcousticEchoCancelerSupported())
|
||||
|| (AudioEffect.EFFECT_TYPE_AGC.equals(type) && isAutomaticGainControlSupported())
|
||||
|| (AudioEffect.EFFECT_TYPE_NS.equals(type) && isNoiseSuppressorSupported());
|
||||
}
|
||||
|
||||
// Helper method which throws an exception when an assertion has failed.
|
||||
|
||||
@ -45,8 +45,7 @@ public class WebRtcAudioManager {
|
||||
// specified in WebRtcAudioUtils.BLACKLISTED_OPEN_SL_ES_MODELS.
|
||||
// Allows an app to take control over which devices to exlude from using
|
||||
// the OpenSL ES audio output path
|
||||
public static synchronized void setBlacklistDeviceForOpenSLESUsage(
|
||||
boolean enable) {
|
||||
public static synchronized void setBlacklistDeviceForOpenSLESUsage(boolean enable) {
|
||||
blacklistDeviceForOpenSLESUsageIsOverridden = true;
|
||||
blacklistDeviceForOpenSLESUsage = enable;
|
||||
}
|
||||
@ -62,10 +61,7 @@ public class WebRtcAudioManager {
|
||||
|
||||
// List of possible audio modes.
|
||||
private static final String[] AUDIO_MODES = new String[] {
|
||||
"MODE_NORMAL",
|
||||
"MODE_RINGTONE",
|
||||
"MODE_IN_CALL",
|
||||
"MODE_IN_COMMUNICATION",
|
||||
"MODE_NORMAL", "MODE_RINGTONE", "MODE_IN_CALL", "MODE_IN_COMMUNICATION",
|
||||
};
|
||||
|
||||
// Private utility class that periodically checks and logs the volume level
|
||||
@ -85,8 +81,7 @@ public class WebRtcAudioManager {
|
||||
|
||||
public void start() {
|
||||
timer = new Timer(THREAD_NAME);
|
||||
timer.schedule(new LogVolumeTask(
|
||||
audioManager.getStreamMaxVolume(AudioManager.STREAM_RING),
|
||||
timer.schedule(new LogVolumeTask(audioManager.getStreamMaxVolume(AudioManager.STREAM_RING),
|
||||
audioManager.getStreamMaxVolume(AudioManager.STREAM_VOICE_CALL)),
|
||||
0, TIMER_PERIOD_IN_SECONDS * 1000);
|
||||
}
|
||||
@ -104,12 +99,12 @@ public class WebRtcAudioManager {
|
||||
final int mode = audioManager.getMode();
|
||||
if (mode == AudioManager.MODE_RINGTONE) {
|
||||
Logging.d(TAG, "STREAM_RING stream volume: "
|
||||
+ audioManager.getStreamVolume(AudioManager.STREAM_RING)
|
||||
+ " (max=" + maxRingVolume + ")");
|
||||
+ audioManager.getStreamVolume(AudioManager.STREAM_RING) + " (max="
|
||||
+ maxRingVolume + ")");
|
||||
} else if (mode == AudioManager.MODE_IN_COMMUNICATION) {
|
||||
Logging.d(TAG, "VOICE_CALL stream volume: "
|
||||
+ audioManager.getStreamVolume(AudioManager.STREAM_VOICE_CALL)
|
||||
+ " (max=" + maxVoiceCallVolume + ")");
|
||||
+ audioManager.getStreamVolume(AudioManager.STREAM_VOICE_CALL) + " (max="
|
||||
+ maxVoiceCallVolume + ")");
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -147,8 +142,7 @@ public class WebRtcAudioManager {
|
||||
Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo());
|
||||
this.context = context;
|
||||
this.nativeAudioManager = nativeAudioManager;
|
||||
audioManager = (AudioManager) context.getSystemService(
|
||||
Context.AUDIO_SERVICE);
|
||||
audioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
|
||||
if (DEBUG) {
|
||||
WebRtcAudioUtils.logDeviceInfo(TAG);
|
||||
}
|
||||
@ -183,9 +177,9 @@ public class WebRtcAudioManager {
|
||||
}
|
||||
|
||||
private boolean isDeviceBlacklistedForOpenSLESUsage() {
|
||||
boolean blacklisted = blacklistDeviceForOpenSLESUsageIsOverridden ?
|
||||
blacklistDeviceForOpenSLESUsage :
|
||||
WebRtcAudioUtils.deviceIsBlacklistedForOpenSLESUsage();
|
||||
boolean blacklisted = blacklistDeviceForOpenSLESUsageIsOverridden
|
||||
? blacklistDeviceForOpenSLESUsage
|
||||
: WebRtcAudioUtils.deviceIsBlacklistedForOpenSLESUsage();
|
||||
if (blacklisted) {
|
||||
Logging.e(TAG, Build.MODEL + " is blacklisted for OpenSL ES usage!");
|
||||
}
|
||||
@ -203,24 +197,21 @@ public class WebRtcAudioManager {
|
||||
lowLatencyOutput = isLowLatencyOutputSupported();
|
||||
lowLatencyInput = isLowLatencyInputSupported();
|
||||
proAudio = isProAudioSupported();
|
||||
outputBufferSize = lowLatencyOutput ?
|
||||
getLowLatencyOutputFramesPerBuffer() :
|
||||
getMinOutputFrameSize(sampleRate, channels);
|
||||
outputBufferSize = lowLatencyOutput ? getLowLatencyOutputFramesPerBuffer()
|
||||
: getMinOutputFrameSize(sampleRate, channels);
|
||||
inputBufferSize = lowLatencyInput ? getLowLatencyInputFramesPerBuffer()
|
||||
: getMinInputFrameSize(sampleRate, channels);
|
||||
}
|
||||
|
||||
// Gets the current earpiece state.
|
||||
private boolean hasEarpiece() {
|
||||
return context.getPackageManager().hasSystemFeature(
|
||||
PackageManager.FEATURE_TELEPHONY);
|
||||
return context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_TELEPHONY);
|
||||
}
|
||||
|
||||
// Returns true if low-latency audio output is supported.
|
||||
private boolean isLowLatencyOutputSupported() {
|
||||
return isOpenSLESSupported() &&
|
||||
context.getPackageManager().hasSystemFeature(
|
||||
PackageManager.FEATURE_AUDIO_LOW_LATENCY);
|
||||
return isOpenSLESSupported()
|
||||
&& context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_AUDIO_LOW_LATENCY);
|
||||
}
|
||||
|
||||
// Returns true if low-latency audio input is supported.
|
||||
@ -231,16 +222,14 @@ public class WebRtcAudioManager {
|
||||
// as well. The NDK doc states that: "As of API level 21, lower latency
|
||||
// audio input is supported on select devices. To take advantage of this
|
||||
// feature, first confirm that lower latency output is available".
|
||||
return WebRtcAudioUtils.runningOnLollipopOrHigher() &&
|
||||
isLowLatencyOutputSupported();
|
||||
return WebRtcAudioUtils.runningOnLollipopOrHigher() && isLowLatencyOutputSupported();
|
||||
}
|
||||
|
||||
// Returns true if the device has professional audio level of functionality
|
||||
// and therefore supports the lowest possible round-trip latency.
|
||||
private boolean isProAudioSupported() {
|
||||
return WebRtcAudioUtils.runningOnMarshmallowOrHigher()
|
||||
&& context.getPackageManager().hasSystemFeature(
|
||||
PackageManager.FEATURE_AUDIO_PRO);
|
||||
&& context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_AUDIO_PRO);
|
||||
}
|
||||
|
||||
// Returns the native output sample rate for this device's output stream.
|
||||
@ -254,8 +243,8 @@ public class WebRtcAudioManager {
|
||||
// Default can be overriden by WebRtcAudioUtils.setDefaultSampleRateHz().
|
||||
// If so, use that value and return here.
|
||||
if (WebRtcAudioUtils.isDefaultSampleRateOverridden()) {
|
||||
Logging.d(TAG, "Default sample rate is overriden to " +
|
||||
WebRtcAudioUtils.getDefaultSampleRateHz() + " Hz");
|
||||
Logging.d(TAG, "Default sample rate is overriden to "
|
||||
+ WebRtcAudioUtils.getDefaultSampleRateHz() + " Hz");
|
||||
return WebRtcAudioUtils.getDefaultSampleRateHz();
|
||||
}
|
||||
// No overrides available. Deliver best possible estimate based on default
|
||||
@ -272,10 +261,8 @@ public class WebRtcAudioManager {
|
||||
|
||||
@TargetApi(17)
|
||||
private int getSampleRateOnJellyBeanMR10OrHigher() {
|
||||
String sampleRateString = audioManager.getProperty(
|
||||
AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE);
|
||||
return (sampleRateString == null)
|
||||
? WebRtcAudioUtils.getDefaultSampleRateHz()
|
||||
String sampleRateString = audioManager.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE);
|
||||
return (sampleRateString == null) ? WebRtcAudioUtils.getDefaultSampleRateHz()
|
||||
: Integer.parseInt(sampleRateString);
|
||||
}
|
||||
|
||||
@ -286,10 +273,9 @@ public class WebRtcAudioManager {
|
||||
if (!WebRtcAudioUtils.runningOnJellyBeanMR1OrHigher()) {
|
||||
return DEFAULT_FRAME_PER_BUFFER;
|
||||
}
|
||||
String framesPerBuffer = audioManager.getProperty(
|
||||
AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER);
|
||||
return framesPerBuffer == null ?
|
||||
DEFAULT_FRAME_PER_BUFFER : Integer.parseInt(framesPerBuffer);
|
||||
String framesPerBuffer =
|
||||
audioManager.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER);
|
||||
return framesPerBuffer == null ? DEFAULT_FRAME_PER_BUFFER : Integer.parseInt(framesPerBuffer);
|
||||
}
|
||||
|
||||
// Returns true if the device supports an audio effect (AEC, AGC or NS).
|
||||
@ -322,8 +308,8 @@ public class WebRtcAudioManager {
|
||||
return -1;
|
||||
}
|
||||
return AudioTrack.getMinBufferSize(
|
||||
sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT) /
|
||||
bytesPerFrame;
|
||||
sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT)
|
||||
/ bytesPerFrame;
|
||||
}
|
||||
|
||||
// Returns the native input buffer size for input streams.
|
||||
@ -338,9 +324,9 @@ public class WebRtcAudioManager {
|
||||
private static int getMinInputFrameSize(int sampleRateInHz, int numChannels) {
|
||||
final int bytesPerFrame = numChannels * (BITS_PER_SAMPLE / 8);
|
||||
assertTrue(numChannels == CHANNELS);
|
||||
return AudioRecord.getMinBufferSize(sampleRateInHz,
|
||||
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT) /
|
||||
bytesPerFrame;
|
||||
return AudioRecord.getMinBufferSize(
|
||||
sampleRateInHz, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT)
|
||||
/ bytesPerFrame;
|
||||
}
|
||||
|
||||
// Returns true if OpenSL ES audio is supported.
|
||||
@ -357,7 +343,6 @@ public class WebRtcAudioManager {
|
||||
}
|
||||
|
||||
private native void nativeCacheAudioParameters(int sampleRate, int channels, boolean hardwareAEC,
|
||||
boolean hardwareAGC, boolean hardwareNS, boolean lowLatencyOutput,
|
||||
boolean lowLatencyInput, boolean proAudio, int outputBufferSize, int inputBufferSize,
|
||||
long nativeAudioManager);
|
||||
boolean hardwareAGC, boolean hardwareNS, boolean lowLatencyOutput, boolean lowLatencyInput,
|
||||
boolean proAudio, int outputBufferSize, int inputBufferSize, long nativeAudioManager);
|
||||
}
|
||||
|
||||
@ -77,8 +77,7 @@ public class WebRtcAudioRecord {
|
||||
public void run() {
|
||||
Process.setThreadPriority(Process.THREAD_PRIORITY_URGENT_AUDIO);
|
||||
Logging.d(TAG, "AudioRecordThread" + WebRtcAudioUtils.getThreadInfo());
|
||||
assertTrue(audioRecord.getRecordingState()
|
||||
== AudioRecord.RECORDSTATE_RECORDING);
|
||||
assertTrue(audioRecord.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING);
|
||||
|
||||
long lastTime = System.nanoTime();
|
||||
while (keepAlive) {
|
||||
@ -90,15 +89,14 @@ public class WebRtcAudioRecord {
|
||||
}
|
||||
nativeDataIsRecorded(bytesRead, nativeAudioRecord);
|
||||
} else {
|
||||
Logging.e(TAG,"AudioRecord.read failed: " + bytesRead);
|
||||
Logging.e(TAG, "AudioRecord.read failed: " + bytesRead);
|
||||
if (bytesRead == AudioRecord.ERROR_INVALID_OPERATION) {
|
||||
keepAlive = false;
|
||||
}
|
||||
}
|
||||
if (DEBUG) {
|
||||
long nowTime = System.nanoTime();
|
||||
long durationInMs =
|
||||
TimeUnit.NANOSECONDS.toMillis((nowTime - lastTime));
|
||||
long durationInMs = TimeUnit.NANOSECONDS.toMillis((nowTime - lastTime));
|
||||
lastTime = nowTime;
|
||||
Logging.d(TAG, "bytesRead[" + durationInMs + "] " + bytesRead);
|
||||
}
|
||||
@ -159,10 +157,8 @@ public class WebRtcAudioRecord {
|
||||
}
|
||||
|
||||
private int initRecording(int sampleRate, int channels) {
|
||||
Logging.d(TAG, "initRecording(sampleRate=" + sampleRate + ", channels=" +
|
||||
channels + ")");
|
||||
if (!WebRtcAudioUtils.hasPermission(
|
||||
context, android.Manifest.permission.RECORD_AUDIO)) {
|
||||
Logging.d(TAG, "initRecording(sampleRate=" + sampleRate + ", channels=" + channels + ")");
|
||||
if (!WebRtcAudioUtils.hasPermission(context, android.Manifest.permission.RECORD_AUDIO)) {
|
||||
Logging.e(TAG, "RECORD_AUDIO permission is missing");
|
||||
return -1;
|
||||
}
|
||||
@ -184,11 +180,8 @@ public class WebRtcAudioRecord {
|
||||
// an AudioRecord object, in byte units.
|
||||
// Note that this size doesn't guarantee a smooth recording under load.
|
||||
int minBufferSize = AudioRecord.getMinBufferSize(
|
||||
sampleRate,
|
||||
AudioFormat.CHANNEL_IN_MONO,
|
||||
AudioFormat.ENCODING_PCM_16BIT);
|
||||
if (minBufferSize == AudioRecord.ERROR
|
||||
|| minBufferSize == AudioRecord.ERROR_BAD_VALUE) {
|
||||
sampleRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
|
||||
if (minBufferSize == AudioRecord.ERROR || minBufferSize == AudioRecord.ERROR_BAD_VALUE) {
|
||||
Logging.e(TAG, "AudioRecord.getMinBufferSize failed: " + minBufferSize);
|
||||
return -1;
|
||||
}
|
||||
@ -197,21 +190,16 @@ public class WebRtcAudioRecord {
|
||||
// Use a larger buffer size than the minimum required when creating the
|
||||
// AudioRecord instance to ensure smooth recording under load. It has been
|
||||
// verified that it does not increase the actual recording latency.
|
||||
int bufferSizeInBytes =
|
||||
Math.max(BUFFER_SIZE_FACTOR * minBufferSize, byteBuffer.capacity());
|
||||
int bufferSizeInBytes = Math.max(BUFFER_SIZE_FACTOR * minBufferSize, byteBuffer.capacity());
|
||||
Logging.d(TAG, "bufferSizeInBytes: " + bufferSizeInBytes);
|
||||
try {
|
||||
audioRecord = new AudioRecord(AudioSource.VOICE_COMMUNICATION,
|
||||
sampleRate,
|
||||
AudioFormat.CHANNEL_IN_MONO,
|
||||
AudioFormat.ENCODING_PCM_16BIT,
|
||||
bufferSizeInBytes);
|
||||
audioRecord = new AudioRecord(AudioSource.VOICE_COMMUNICATION, sampleRate,
|
||||
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSizeInBytes);
|
||||
} catch (IllegalArgumentException e) {
|
||||
Logging.e(TAG,e.getMessage());
|
||||
Logging.e(TAG, e.getMessage());
|
||||
return -1;
|
||||
}
|
||||
if (audioRecord == null ||
|
||||
audioRecord.getState() != AudioRecord.STATE_INITIALIZED) {
|
||||
if (audioRecord == null || audioRecord.getState() != AudioRecord.STATE_INITIALIZED) {
|
||||
Logging.e(TAG, "Failed to create a new AudioRecord instance");
|
||||
return -1;
|
||||
}
|
||||
@ -261,8 +249,7 @@ public class WebRtcAudioRecord {
|
||||
Logging.d(TAG, "stopRecording");
|
||||
assertTrue(audioThread != null);
|
||||
audioThread.stopThread();
|
||||
if (!ThreadUtils.joinUninterruptibly(
|
||||
audioThread, AUDIO_RECORD_THREAD_JOIN_TIMEOUT_MS)) {
|
||||
if (!ThreadUtils.joinUninterruptibly(audioThread, AUDIO_RECORD_THREAD_JOIN_TIMEOUT_MS)) {
|
||||
Logging.e(TAG, "Join of AudioRecordJavaThread timed out");
|
||||
}
|
||||
audioThread = null;
|
||||
@ -281,8 +268,7 @@ public class WebRtcAudioRecord {
|
||||
}
|
||||
}
|
||||
|
||||
private native void nativeCacheDirectBufferAddress(
|
||||
ByteBuffer byteBuffer, long nativeAudioRecord);
|
||||
private native void nativeCacheDirectBufferAddress(ByteBuffer byteBuffer, long nativeAudioRecord);
|
||||
|
||||
private native void nativeDataIsRecorded(int bytes, long nativeAudioRecord);
|
||||
|
||||
|
||||
@ -155,19 +155,16 @@ public class WebRtcAudioTrack {
|
||||
Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo());
|
||||
this.context = context;
|
||||
this.nativeAudioTrack = nativeAudioTrack;
|
||||
audioManager = (AudioManager) context.getSystemService(
|
||||
Context.AUDIO_SERVICE);
|
||||
audioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
|
||||
if (DEBUG) {
|
||||
WebRtcAudioUtils.logDeviceInfo(TAG);
|
||||
}
|
||||
}
|
||||
|
||||
private boolean initPlayout(int sampleRate, int channels) {
|
||||
Logging.d(TAG, "initPlayout(sampleRate=" + sampleRate + ", channels="
|
||||
+ channels + ")");
|
||||
Logging.d(TAG, "initPlayout(sampleRate=" + sampleRate + ", channels=" + channels + ")");
|
||||
final int bytesPerFrame = channels * (BITS_PER_SAMPLE / 8);
|
||||
byteBuffer = byteBuffer.allocateDirect(
|
||||
bytesPerFrame * (sampleRate / BUFFERS_PER_SECOND));
|
||||
byteBuffer = byteBuffer.allocateDirect(bytesPerFrame * (sampleRate / BUFFERS_PER_SECOND));
|
||||
Logging.d(TAG, "byteBuffer.capacity: " + byteBuffer.capacity());
|
||||
emptyBytes = new byte[byteBuffer.capacity()];
|
||||
// Rather than passing the ByteBuffer with every callback (requiring
|
||||
@ -180,9 +177,7 @@ public class WebRtcAudioTrack {
|
||||
// Note that this size doesn't guarantee a smooth playback under load.
|
||||
// TODO(henrika): should we extend the buffer size to avoid glitches?
|
||||
final int minBufferSizeInBytes = AudioTrack.getMinBufferSize(
|
||||
sampleRate,
|
||||
AudioFormat.CHANNEL_OUT_MONO,
|
||||
AudioFormat.ENCODING_PCM_16BIT);
|
||||
sampleRate, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT);
|
||||
Logging.d(TAG, "AudioTrack.getMinBufferSize: " + minBufferSizeInBytes);
|
||||
// For the streaming mode, data must be written to the audio sink in
|
||||
// chunks of size (given by byteBuffer.capacity()) less than or equal
|
||||
@ -204,12 +199,9 @@ public class WebRtcAudioTrack {
|
||||
// Create an AudioTrack object and initialize its associated audio buffer.
|
||||
// The size of this buffer determines how long an AudioTrack can play
|
||||
// before running out of data.
|
||||
audioTrack = new AudioTrack(AudioManager.STREAM_VOICE_CALL,
|
||||
sampleRate,
|
||||
AudioFormat.CHANNEL_OUT_MONO,
|
||||
AudioFormat.ENCODING_PCM_16BIT,
|
||||
minBufferSizeInBytes,
|
||||
AudioTrack.MODE_STREAM);
|
||||
audioTrack =
|
||||
new AudioTrack(AudioManager.STREAM_VOICE_CALL, sampleRate, AudioFormat.CHANNEL_OUT_MONO,
|
||||
AudioFormat.ENCODING_PCM_16BIT, minBufferSizeInBytes, AudioTrack.MODE_STREAM);
|
||||
} catch (IllegalArgumentException e) {
|
||||
Logging.d(TAG, e.getMessage());
|
||||
return false;
|
||||
@ -290,8 +282,7 @@ public class WebRtcAudioTrack {
|
||||
}
|
||||
}
|
||||
|
||||
private native void nativeCacheDirectBufferAddress(
|
||||
ByteBuffer byteBuffer, long nativeAudioRecord);
|
||||
private native void nativeCacheDirectBufferAddress(ByteBuffer byteBuffer, long nativeAudioRecord);
|
||||
|
||||
private native void nativeGetPlayoutData(int bytes, long nativeAudioRecord);
|
||||
|
||||
|
||||
@ -40,12 +40,10 @@ public final class WebRtcAudioUtils {
|
||||
"MotoG3", // Moto G (3rd Generation)
|
||||
};
|
||||
private static final String[] BLACKLISTED_AGC_MODELS = new String[] {
|
||||
"Nexus 10",
|
||||
"Nexus 9",
|
||||
"Nexus 10", "Nexus 9",
|
||||
};
|
||||
private static final String[] BLACKLISTED_NS_MODELS = new String[] {
|
||||
"Nexus 10",
|
||||
"Nexus 9",
|
||||
"Nexus 10", "Nexus 9",
|
||||
"ONE A2005", // OnePlus 2
|
||||
};
|
||||
|
||||
@ -63,16 +61,13 @@ public final class WebRtcAudioUtils {
|
||||
|
||||
// Call these methods if any hardware based effect shall be replaced by a
|
||||
// software based version provided by the WebRTC stack instead.
|
||||
public static synchronized void setWebRtcBasedAcousticEchoCanceler(
|
||||
boolean enable) {
|
||||
public static synchronized void setWebRtcBasedAcousticEchoCanceler(boolean enable) {
|
||||
useWebRtcBasedAcousticEchoCanceler = enable;
|
||||
}
|
||||
public static synchronized void setWebRtcBasedAutomaticGainControl(
|
||||
boolean enable) {
|
||||
public static synchronized void setWebRtcBasedAutomaticGainControl(boolean enable) {
|
||||
useWebRtcBasedAutomaticGainControl = enable;
|
||||
}
|
||||
public static synchronized void setWebRtcBasedNoiseSuppressor(
|
||||
boolean enable) {
|
||||
public static synchronized void setWebRtcBasedNoiseSuppressor(boolean enable) {
|
||||
useWebRtcBasedNoiseSuppressor = enable;
|
||||
}
|
||||
|
||||
@ -171,20 +166,18 @@ public final class WebRtcAudioUtils {
|
||||
|
||||
// Helper method for building a string of thread information.
|
||||
public static String getThreadInfo() {
|
||||
return "@[name=" + Thread.currentThread().getName()
|
||||
+ ", id=" + Thread.currentThread().getId() + "]";
|
||||
return "@[name=" + Thread.currentThread().getName() + ", id=" + Thread.currentThread().getId()
|
||||
+ "]";
|
||||
}
|
||||
|
||||
// Returns true if we're running on emulator.
|
||||
public static boolean runningOnEmulator() {
|
||||
return Build.HARDWARE.equals("goldfish") &&
|
||||
Build.BRAND.startsWith("generic_");
|
||||
return Build.HARDWARE.equals("goldfish") && Build.BRAND.startsWith("generic_");
|
||||
}
|
||||
|
||||
// Returns true if the device is blacklisted for OpenSL ES usage.
|
||||
public static boolean deviceIsBlacklistedForOpenSLESUsage() {
|
||||
List<String> blackListedModels =
|
||||
Arrays.asList(BLACKLISTED_OPEN_SL_ES_MODELS);
|
||||
List<String> blackListedModels = Arrays.asList(BLACKLISTED_OPEN_SL_ES_MODELS);
|
||||
return blackListedModels.contains(Build.MODEL);
|
||||
}
|
||||
|
||||
@ -203,9 +196,7 @@ public final class WebRtcAudioUtils {
|
||||
|
||||
// Checks if the process has as specified permission or not.
|
||||
public static boolean hasPermission(Context context, String permission) {
|
||||
return context.checkPermission(
|
||||
permission,
|
||||
Process.myPid(),
|
||||
Process.myUid()) == PackageManager.PERMISSION_GRANTED;
|
||||
return context.checkPermission(permission, Process.myPid(), Process.myUid())
|
||||
== PackageManager.PERMISSION_GRANTED;
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user