Format all Java in WebRTC.

BUG=webrtc:6419
TBR=henrika@webrtc.org

Review-Url: https://codereview.webrtc.org/2377003002
Cr-Commit-Position: refs/heads/master@{#14432}
This commit is contained in:
sakal
2016-09-29 04:12:44 -07:00
committed by Commit bot
parent a48ddb7636
commit b6760f9e44
88 changed files with 1555 additions and 2071 deletions

View File

@ -31,24 +31,20 @@ public class CameraEnumerationTest {
@Test @Test
public void testGetClosestSupportedFramerateRange() { public void testGetClosestSupportedFramerateRange() {
assertEquals(new FramerateRange(10000, 30000), assertEquals(new FramerateRange(10000, 30000),
getClosestSupportedFramerateRange( getClosestSupportedFramerateRange(Arrays.asList(new FramerateRange(10000, 30000),
Arrays.asList(new FramerateRange(10000, 30000),
new FramerateRange(30000, 30000)), new FramerateRange(30000, 30000)),
30 /* requestedFps */)); 30 /* requestedFps */));
assertEquals(new FramerateRange(10000, 20000), assertEquals(new FramerateRange(10000, 20000),
getClosestSupportedFramerateRange( getClosestSupportedFramerateRange(
Arrays.asList(new FramerateRange(0, 30000), Arrays.asList(new FramerateRange(0, 30000), new FramerateRange(10000, 20000),
new FramerateRange(10000, 20000), new FramerateRange(14000, 16000), new FramerateRange(15000, 15000)),
new FramerateRange(14000, 16000),
new FramerateRange(15000, 15000)),
15 /* requestedFps */)); 15 /* requestedFps */));
assertEquals(new FramerateRange(10000, 20000), assertEquals(new FramerateRange(10000, 20000),
getClosestSupportedFramerateRange( getClosestSupportedFramerateRange(
Arrays.asList(new FramerateRange(15000, 15000), Arrays.asList(new FramerateRange(15000, 15000),
new FramerateRange(10000, 20000), new FramerateRange(10000, 20000), new FramerateRange(10000, 30000)),
new FramerateRange(10000, 30000)),
10 /* requestedFps */)); 10 /* requestedFps */));
} }
} }

View File

@ -33,8 +33,7 @@ public class CallSessionFileRotatingLogSink {
} }
} }
private static native long nativeAddSink( private static native long nativeAddSink(String dirPath, int maxFileSize, int severity);
String dirPath, int maxFileSize, int severity);
private static native void nativeDeleteSink(long nativeSink); private static native void nativeDeleteSink(long nativeSink);
private static native byte[] nativeGetLogData(String dirPath); private static native byte[] nativeGetLogData(String dirPath);
} }

View File

@ -19,20 +19,20 @@ import java.util.List;
public class Camera1Capturer extends CameraCapturer { public class Camera1Capturer extends CameraCapturer {
private final boolean captureToTexture; private final boolean captureToTexture;
public Camera1Capturer(String cameraName, CameraEventsHandler eventsHandler, public Camera1Capturer(
boolean captureToTexture) { String cameraName, CameraEventsHandler eventsHandler, boolean captureToTexture) {
super(cameraName, eventsHandler, new Camera1Enumerator(captureToTexture)); super(cameraName, eventsHandler, new Camera1Enumerator(captureToTexture));
this.captureToTexture = captureToTexture; this.captureToTexture = captureToTexture;
} }
@Override @Override
protected void createCameraSession( protected void createCameraSession(CameraSession.CreateSessionCallback createSessionCallback,
CameraSession.CreateSessionCallback createSessionCallback, CameraSession.Events events, CameraSession.Events events, Context applicationContext,
Context applicationContext, SurfaceTextureHelper surfaceTextureHelper, SurfaceTextureHelper surfaceTextureHelper, String cameraName, int width, int height,
String cameraName, int width, int height, int framerate) { int framerate) {
Camera1Session.create( Camera1Session.create(createSessionCallback, events, captureToTexture, applicationContext,
createSessionCallback, events, captureToTexture, applicationContext, surfaceTextureHelper, surfaceTextureHelper, Camera1Enumerator.getCameraIndex(cameraName), width, height,
Camera1Enumerator.getCameraIndex(cameraName), width, height, framerate); framerate);
} }
} }

View File

@ -70,8 +70,8 @@ public class Camera1Enumerator implements CameraEnumerator {
} }
@Override @Override
public CameraVideoCapturer createCapturer(String deviceName, public CameraVideoCapturer createCapturer(
CameraVideoCapturer.CameraEventsHandler eventsHandler) { String deviceName, CameraVideoCapturer.CameraEventsHandler eventsHandler) {
return new VideoCapturerAndroid(deviceName, eventsHandler, captureToTexture); return new VideoCapturerAndroid(deviceName, eventsHandler, captureToTexture);
} }
@ -181,7 +181,6 @@ public class Camera1Enumerator implements CameraEnumerator {
String facing = String facing =
(info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT) ? "front" : "back"; (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT) ? "front" : "back";
return "Camera " + index + ", Facing " + facing return "Camera " + index + ", Facing " + facing + ", Orientation " + info.orientation;
+ ", Orientation " + info.orientation;
} }
} }

View File

@ -34,7 +34,7 @@ public class Camera1Session implements CameraSession {
private static final Histogram camera1StopTimeMsHistogram = private static final Histogram camera1StopTimeMsHistogram =
Histogram.createCounts("WebRTC.Android.Camera1.StopTimeMs", 1, 10000, 50); Histogram.createCounts("WebRTC.Android.Camera1.StopTimeMs", 1, 10000, 50);
private static enum SessionState { RUNNING, STOPPED }; private static enum SessionState { RUNNING, STOPPED }
private final Handler cameraThreadHandler; private final Handler cameraThreadHandler;
private final Events events; private final Events events;
@ -54,11 +54,10 @@ public class Camera1Session implements CameraSession {
private SessionState state; private SessionState state;
private boolean firstFrameReported = false; private boolean firstFrameReported = false;
public static void create( public static void create(final CreateSessionCallback callback, final Events events,
final CreateSessionCallback callback, final Events events,
final boolean captureToTexture, final Context applicationContext, final boolean captureToTexture, final Context applicationContext,
final SurfaceTextureHelper surfaceTextureHelper, final SurfaceTextureHelper surfaceTextureHelper, final int cameraId, final int width,
final int cameraId, final int width, final int height, final int framerate) { final int height, final int framerate) {
final long constructionTimeNs = System.nanoTime(); final long constructionTimeNs = System.nanoTime();
Logging.d(TAG, "Open camera " + cameraId); Logging.d(TAG, "Open camera " + cameraId);
events.onCameraOpening(); events.onCameraOpening();
@ -83,8 +82,8 @@ public class Camera1Session implements CameraSession {
android.hardware.Camera.getCameraInfo(cameraId, info); android.hardware.Camera.getCameraInfo(cameraId, info);
final android.hardware.Camera.Parameters parameters = camera.getParameters(); final android.hardware.Camera.Parameters parameters = camera.getParameters();
final CaptureFormat captureFormat = findClosestCaptureFormat( final CaptureFormat captureFormat =
parameters, width, height, framerate); findClosestCaptureFormat(parameters, width, height, framerate);
final Size pictureSize = findClosestPictureSize(parameters, width, height); final Size pictureSize = findClosestPictureSize(parameters, width, height);
updateCameraParameters(camera, parameters, captureFormat, pictureSize, captureToTexture); updateCameraParameters(camera, parameters, captureFormat, pictureSize, captureToTexture);
@ -101,10 +100,9 @@ public class Camera1Session implements CameraSession {
// Calculate orientation manually and send it as CVO insted. // Calculate orientation manually and send it as CVO insted.
camera.setDisplayOrientation(0 /* degrees */); camera.setDisplayOrientation(0 /* degrees */);
callback.onDone(new Camera1Session( callback.onDone(
events, captureToTexture, applicationContext, surfaceTextureHelper, new Camera1Session(events, captureToTexture, applicationContext, surfaceTextureHelper,
cameraId, width, height, framerate, cameraId, width, height, framerate, camera, info, captureFormat, constructionTimeNs));
camera, info, captureFormat, constructionTimeNs));
} }
private static void updateCameraParameters(android.hardware.Camera camera, private static void updateCameraParameters(android.hardware.Camera camera,
@ -136,27 +134,22 @@ public class Camera1Session implements CameraSession {
Logging.d(TAG, "Available fps ranges: " + supportedFramerates); Logging.d(TAG, "Available fps ranges: " + supportedFramerates);
final CaptureFormat.FramerateRange fpsRange = final CaptureFormat.FramerateRange fpsRange =
CameraEnumerationAndroid.getClosestSupportedFramerateRange( CameraEnumerationAndroid.getClosestSupportedFramerateRange(supportedFramerates, framerate);
supportedFramerates, framerate);
final Size previewSize = CameraEnumerationAndroid.getClosestSupportedSize( final Size previewSize = CameraEnumerationAndroid.getClosestSupportedSize(
Camera1Enumerator.convertSizes(parameters.getSupportedPreviewSizes()), Camera1Enumerator.convertSizes(parameters.getSupportedPreviewSizes()), width, height);
width, height);
return new CaptureFormat(previewSize.width, previewSize.height, fpsRange); return new CaptureFormat(previewSize.width, previewSize.height, fpsRange);
} }
private static Size findClosestPictureSize(android.hardware.Camera.Parameters parameters, private static Size findClosestPictureSize(
int width, int height) { android.hardware.Camera.Parameters parameters, int width, int height) {
return CameraEnumerationAndroid.getClosestSupportedSize( return CameraEnumerationAndroid.getClosestSupportedSize(
Camera1Enumerator.convertSizes(parameters.getSupportedPictureSizes()), Camera1Enumerator.convertSizes(parameters.getSupportedPictureSizes()), width, height);
width, height);
} }
private Camera1Session( private Camera1Session(Events events, boolean captureToTexture, Context applicationContext,
Events events, boolean captureToTexture, SurfaceTextureHelper surfaceTextureHelper, int cameraId, int width, int height, int framerate,
Context applicationContext, SurfaceTextureHelper surfaceTextureHelper,
int cameraId, int width, int height, int framerate,
android.hardware.Camera camera, android.hardware.Camera.CameraInfo info, android.hardware.Camera camera, android.hardware.Camera.CameraInfo info,
CaptureFormat captureFormat, long constructionTimeNs) { CaptureFormat captureFormat, long constructionTimeNs) {
Logging.d(TAG, "Create new camera1 session on camera " + cameraId); Logging.d(TAG, "Create new camera1 session on camera " + cameraId);
@ -186,8 +179,7 @@ public class Camera1Session implements CameraSession {
final long stopStartTime = System.nanoTime(); final long stopStartTime = System.nanoTime();
state = SessionState.STOPPED; state = SessionState.STOPPED;
stopInternal(); stopInternal();
final int stopTimeMs = final int stopTimeMs = (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStartTime);
(int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStartTime);
camera1StopTimeMsHistogram.addSample(stopTimeMs); camera1StopTimeMsHistogram.addSample(stopTimeMs);
} }
} }
@ -312,8 +304,7 @@ public class Camera1Session implements CameraSession {
private int getDeviceOrientation() { private int getDeviceOrientation() {
int orientation = 0; int orientation = 0;
WindowManager wm = (WindowManager) applicationContext.getSystemService( WindowManager wm = (WindowManager) applicationContext.getSystemService(Context.WINDOW_SERVICE);
Context.WINDOW_SERVICE);
switch (wm.getDefaultDisplay().getRotation()) { switch (wm.getDefaultDisplay().getRotation()) {
case Surface.ROTATION_90: case Surface.ROTATION_90:
orientation = 90; orientation = 90;

View File

@ -27,14 +27,11 @@ public class Camera2Capturer extends CameraCapturer {
} }
@Override @Override
protected void createCameraSession( protected void createCameraSession(CameraSession.CreateSessionCallback createSessionCallback,
CameraSession.CreateSessionCallback createSessionCallback, CameraSession.Events events, CameraSession.Events events, Context applicationContext,
Context applicationContext, SurfaceTextureHelper surfaceTextureHelper, SurfaceTextureHelper surfaceTextureHelper, String cameraName, int width, int height,
String cameraName, int width, int height, int framerate) { int framerate) {
Camera2Session.create( Camera2Session.create(createSessionCallback, events, applicationContext, cameraManager,
createSessionCallback, events, surfaceTextureHelper, cameraName, width, height, framerate);
applicationContext, cameraManager,
surfaceTextureHelper,
cameraName, width, height, framerate);
} }
} }

View File

@ -63,8 +63,7 @@ public class Camera2Enumerator implements CameraEnumerator {
@Override @Override
public boolean isFrontFacing(String deviceName) { public boolean isFrontFacing(String deviceName) {
CameraCharacteristics characteristics CameraCharacteristics characteristics = getCameraCharacteristics(deviceName);
= getCameraCharacteristics(deviceName);
return characteristics != null return characteristics != null
&& characteristics.get(CameraCharacteristics.LENS_FACING) && characteristics.get(CameraCharacteristics.LENS_FACING)
@ -73,8 +72,7 @@ public class Camera2Enumerator implements CameraEnumerator {
@Override @Override
public boolean isBackFacing(String deviceName) { public boolean isBackFacing(String deviceName) {
CameraCharacteristics characteristics CameraCharacteristics characteristics = getCameraCharacteristics(deviceName);
= getCameraCharacteristics(deviceName);
return characteristics != null return characteristics != null
&& characteristics.get(CameraCharacteristics.LENS_FACING) && characteristics.get(CameraCharacteristics.LENS_FACING)
@ -87,8 +85,8 @@ public class Camera2Enumerator implements CameraEnumerator {
} }
@Override @Override
public CameraVideoCapturer createCapturer(String deviceName, public CameraVideoCapturer createCapturer(
CameraVideoCapturer.CameraEventsHandler eventsHandler) { String deviceName, CameraVideoCapturer.CameraEventsHandler eventsHandler) {
return new Camera2Capturer(context, deviceName, eventsHandler); return new Camera2Capturer(context, deviceName, eventsHandler);
} }
@ -149,8 +147,7 @@ public class Camera2Enumerator implements CameraEnumerator {
return fpsRanges[0].getUpper() < 1000 ? 1000 : 1; return fpsRanges[0].getUpper() < 1000 ? 1000 : 1;
} }
static List<Size> getSupportedSizes( static List<Size> getSupportedSizes(CameraCharacteristics cameraCharacteristics) {
CameraCharacteristics cameraCharacteristics) {
final StreamConfigurationMap streamMap = final StreamConfigurationMap streamMap =
cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
final int supportLevel = final int supportLevel =
@ -161,8 +158,8 @@ public class Camera2Enumerator implements CameraEnumerator {
// Video may be stretched pre LMR1 on legacy implementations. // Video may be stretched pre LMR1 on legacy implementations.
// Filter out formats that have different aspect ratio than the sensor array. // Filter out formats that have different aspect ratio than the sensor array.
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP_MR1 && if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP_MR1
supportLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) { && supportLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
final Rect activeArraySize = final Rect activeArraySize =
cameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE); cameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
final ArrayList<Size> filteredSizes = new ArrayList<Size>(); final ArrayList<Size> filteredSizes = new ArrayList<Size>();
@ -184,8 +181,7 @@ public class Camera2Enumerator implements CameraEnumerator {
(CameraManager) context.getSystemService(Context.CAMERA_SERVICE), cameraId); (CameraManager) context.getSystemService(Context.CAMERA_SERVICE), cameraId);
} }
static List<CaptureFormat> getSupportedFormats( static List<CaptureFormat> getSupportedFormats(CameraManager cameraManager, String cameraId) {
CameraManager cameraManager, String cameraId) {
synchronized (cachedSupportedFormats) { synchronized (cachedSupportedFormats) {
if (cachedSupportedFormats.containsKey(cameraId)) { if (cachedSupportedFormats.containsKey(cameraId)) {
return cachedSupportedFormats.get(cameraId); return cachedSupportedFormats.get(cameraId);
@ -220,8 +216,8 @@ public class Camera2Enumerator implements CameraEnumerator {
for (Size size : sizes) { for (Size size : sizes) {
long minFrameDurationNs = 0; long minFrameDurationNs = 0;
try { try {
minFrameDurationNs = streamMap.getOutputMinFrameDuration(SurfaceTexture.class, minFrameDurationNs = streamMap.getOutputMinFrameDuration(
new android.util.Size(size.width, size.height)); SurfaceTexture.class, new android.util.Size(size.width, size.height));
} catch (Exception e) { } catch (Exception e) {
// getOutputMinFrameDuration() is not supported on all devices. Ignore silently. // getOutputMinFrameDuration() is not supported on all devices. Ignore silently.
} }
@ -255,8 +251,7 @@ public class Camera2Enumerator implements CameraEnumerator {
final List<CaptureFormat.FramerateRange> ranges = new ArrayList<CaptureFormat.FramerateRange>(); final List<CaptureFormat.FramerateRange> ranges = new ArrayList<CaptureFormat.FramerateRange>();
for (Range<Integer> range : arrayRanges) { for (Range<Integer> range : arrayRanges) {
ranges.add(new CaptureFormat.FramerateRange( ranges.add(new CaptureFormat.FramerateRange(
range.getLower() * unitFactor, range.getLower() * unitFactor, range.getUpper() * unitFactor));
range.getUpper() * unitFactor));
} }
return ranges; return ranges;
} }

View File

@ -42,7 +42,7 @@ public class Camera2Session implements CameraSession {
private static final Histogram camera2StopTimeMsHistogram = private static final Histogram camera2StopTimeMsHistogram =
Histogram.createCounts("WebRTC.Android.Camera2.StopTimeMs", 1, 10000, 50); Histogram.createCounts("WebRTC.Android.Camera2.StopTimeMs", 1, 10000, 50);
private static enum SessionState { RUNNING, STOPPED }; private static enum SessionState { RUNNING, STOPPED }
private final Handler cameraThreadHandler; private final Handler cameraThreadHandler;
private final CreateSessionCallback callback; private final CreateSessionCallback callback;
@ -159,11 +159,11 @@ public class Camera2Session implements CameraSession {
final CaptureRequest.Builder captureRequestBuilder = final CaptureRequest.Builder captureRequestBuilder =
cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD); cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
// Set auto exposure fps range. // Set auto exposure fps range.
captureRequestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, new Range<Integer>( captureRequestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE,
captureFormat.framerate.min / fpsUnitFactor, new Range<Integer>(captureFormat.framerate.min / fpsUnitFactor,
captureFormat.framerate.max / fpsUnitFactor)); captureFormat.framerate.max / fpsUnitFactor));
captureRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE, captureRequestBuilder.set(
CaptureRequest.CONTROL_AE_MODE_ON); CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
captureRequestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false); captureRequestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false);
chooseStabilizationMode(captureRequestBuilder); chooseStabilizationMode(captureRequestBuilder);
chooseFocusMode(captureRequestBuilder); chooseFocusMode(captureRequestBuilder);
@ -205,8 +205,8 @@ public class Camera2Session implements CameraSession {
} }
// Undo camera orientation - we report it as rotation instead. // Undo camera orientation - we report it as rotation instead.
transformMatrix = RendererCommon.rotateTextureMatrix( transformMatrix =
transformMatrix, -cameraOrientation); RendererCommon.rotateTextureMatrix(transformMatrix, -cameraOrientation);
events.onTextureFrameCaptured(Camera2Session.this, captureFormat.width, events.onTextureFrameCaptured(Camera2Session.this, captureFormat.width,
captureFormat.height, oesTextureId, transformMatrix, rotation, timestampNs); captureFormat.height, oesTextureId, transformMatrix, rotation, timestampNs);
@ -250,12 +250,12 @@ public class Camera2Session implements CameraSession {
} }
private void chooseFocusMode(CaptureRequest.Builder captureRequestBuilder) { private void chooseFocusMode(CaptureRequest.Builder captureRequestBuilder) {
final int[] availableFocusModes = cameraCharacteristics.get( final int[] availableFocusModes =
CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES); cameraCharacteristics.get(CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES);
for (int mode : availableFocusModes) { for (int mode : availableFocusModes) {
if (mode == CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO) { if (mode == CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO) {
captureRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, captureRequestBuilder.set(
CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO); CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO);
Logging.d(TAG, "Using continuous video auto-focus."); Logging.d(TAG, "Using continuous video auto-focus.");
return; return;
} }
@ -272,23 +272,17 @@ public class Camera2Session implements CameraSession {
} }
} }
public static void create( public static void create(CreateSessionCallback callback, Events events,
CreateSessionCallback callback, Events events,
Context applicationContext, CameraManager cameraManager, Context applicationContext, CameraManager cameraManager,
SurfaceTextureHelper surfaceTextureHelper, SurfaceTextureHelper surfaceTextureHelper, String cameraId, int width, int height,
String cameraId, int width, int height, int framerate) { int framerate) {
new Camera2Session( new Camera2Session(callback, events, applicationContext, cameraManager, surfaceTextureHelper,
callback, events,
applicationContext, cameraManager,
surfaceTextureHelper,
cameraId, width, height, framerate); cameraId, width, height, framerate);
} }
private Camera2Session( private Camera2Session(CreateSessionCallback callback, Events events, Context applicationContext,
CreateSessionCallback callback, Events events, CameraManager cameraManager, SurfaceTextureHelper surfaceTextureHelper, String cameraId,
Context applicationContext, CameraManager cameraManager, int width, int height, int framerate) {
SurfaceTextureHelper surfaceTextureHelper,
String cameraId, int width, int height, int framerate) {
Logging.d(TAG, "Create new camera2 session on camera " + cameraId); Logging.d(TAG, "Create new camera2 session on camera " + cameraId);
constructionTimeNs = System.nanoTime(); constructionTimeNs = System.nanoTime();
@ -341,11 +335,9 @@ public class Camera2Session implements CameraSession {
} }
final CaptureFormat.FramerateRange bestFpsRange = final CaptureFormat.FramerateRange bestFpsRange =
CameraEnumerationAndroid.getClosestSupportedFramerateRange( CameraEnumerationAndroid.getClosestSupportedFramerateRange(framerateRanges, framerate);
framerateRanges, framerate);
final Size bestSize = CameraEnumerationAndroid.getClosestSupportedSize( final Size bestSize = CameraEnumerationAndroid.getClosestSupportedSize(sizes, width, height);
sizes, width, height);
captureFormat = new CaptureFormat(bestSize.width, bestSize.height, bestFpsRange); captureFormat = new CaptureFormat(bestSize.width, bestSize.height, bestFpsRange);
Logging.d(TAG, "Using capture format: " + captureFormat); Logging.d(TAG, "Using capture format: " + captureFormat);
@ -372,8 +364,7 @@ public class Camera2Session implements CameraSession {
final long stopStartTime = System.nanoTime(); final long stopStartTime = System.nanoTime();
state = SessionState.STOPPED; state = SessionState.STOPPED;
stopInternal(); stopInternal();
final int stopTimeMs = final int stopTimeMs = (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStartTime);
(int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStartTime);
camera2StopTimeMsHistogram.addSample(stopTimeMs); camera2StopTimeMsHistogram.addSample(stopTimeMs);
} }
} }
@ -417,8 +408,7 @@ public class Camera2Session implements CameraSession {
private int getDeviceOrientation() { private int getDeviceOrientation() {
int orientation = 0; int orientation = 0;
WindowManager wm = (WindowManager) applicationContext.getSystemService( WindowManager wm = (WindowManager) applicationContext.getSystemService(Context.WINDOW_SERVICE);
Context.WINDOW_SERVICE);
switch (wm.getDefaultDisplay().getRotation()) { switch (wm.getDefaultDisplay().getRotation()) {
case Surface.ROTATION_90: case Surface.ROTATION_90:
orientation = 90; orientation = 90;

View File

@ -50,8 +50,7 @@ public abstract class CameraCapturer implements CameraVideoCapturer {
if (switchState == SwitchState.IN_PROGRESS) { if (switchState == SwitchState.IN_PROGRESS) {
if (switchEventsHandler != null) { if (switchEventsHandler != null) {
switchEventsHandler.onCameraSwitchDone( switchEventsHandler.onCameraSwitchDone(cameraEnumerator.isFrontFacing(cameraName));
cameraEnumerator.isFrontFacing(cameraName));
switchEventsHandler = null; switchEventsHandler = null;
} }
switchState = SwitchState.IDLE; switchState = SwitchState.IDLE;
@ -133,8 +132,7 @@ public abstract class CameraCapturer implements CameraVideoCapturer {
@Override @Override
public void onByteBufferFrameCaptured( public void onByteBufferFrameCaptured(
CameraSession session, byte[] data, int width, int height, int rotation, CameraSession session, byte[] data, int width, int height, int rotation, long timestamp) {
long timestamp) {
checkIsOnCameraThread(); checkIsOnCameraThread();
synchronized (stateLock) { synchronized (stateLock) {
if (session != currentSession) { if (session != currentSession) {
@ -151,9 +149,8 @@ public abstract class CameraCapturer implements CameraVideoCapturer {
} }
@Override @Override
public void onTextureFrameCaptured( public void onTextureFrameCaptured(CameraSession session, int width, int height,
CameraSession session, int width, int height, int oesTextureId, float[] transformMatrix, int oesTextureId, float[] transformMatrix, int rotation, long timestamp) {
int rotation, long timestamp) {
checkIsOnCameraThread(); checkIsOnCameraThread();
synchronized (stateLock) { synchronized (stateLock) {
if (session != currentSession) { if (session != currentSession) {
@ -268,9 +265,8 @@ public abstract class CameraCapturer implements CameraVideoCapturer {
cameraThreadHandler.postDelayed(new Runnable() { cameraThreadHandler.postDelayed(new Runnable() {
@Override @Override
public void run() { public void run() {
createCameraSession( createCameraSession(createSessionCallback, cameraSessionEventsHandler, applicationContext,
createSessionCallback, cameraSessionEventsHandler, applicationContext, surfaceHelper, surfaceHelper, cameraName, width, height, framerate);
cameraName, width, height, framerate);
} }
}, delayMs); }, delayMs);
} }
@ -285,7 +281,6 @@ public abstract class CameraCapturer implements CameraVideoCapturer {
ThreadUtils.waitUninterruptibly(stateLock); ThreadUtils.waitUninterruptibly(stateLock);
} }
if (currentSession != null) { if (currentSession != null) {
Logging.d(TAG, "Stop capture: Nulling session"); Logging.d(TAG, "Stop capture: Nulling session");
cameraStatistics.release(); cameraStatistics.release();
@ -428,6 +423,6 @@ public abstract class CameraCapturer implements CameraVideoCapturer {
abstract protected void createCameraSession( abstract protected void createCameraSession(
CameraSession.CreateSessionCallback createSessionCallback, CameraSession.Events events, CameraSession.CreateSessionCallback createSessionCallback, CameraSession.Events events,
Context applicationContext, SurfaceTextureHelper surfaceTextureHelper, Context applicationContext, SurfaceTextureHelper surfaceTextureHelper, String cameraName,
String cameraName, int width, int height, int framerate); int width, int height, int framerate);
} }

View File

@ -123,7 +123,6 @@ public class CameraEnumerationAndroid {
return new Camera1Enumerator().getDeviceNames(); return new Camera1Enumerator().getDeviceNames();
} }
/** /**
* @deprecated * @deprecated
* Please use Camera1Enumerator.getDeviceNames().length instead. * Please use Camera1Enumerator.getDeviceNames().length instead.
@ -177,8 +176,8 @@ public class CameraEnumerationAndroid {
// lower bound, to allow the framerate to fluctuate based on lightning conditions. // lower bound, to allow the framerate to fluctuate based on lightning conditions.
public static CaptureFormat.FramerateRange getClosestSupportedFramerateRange( public static CaptureFormat.FramerateRange getClosestSupportedFramerateRange(
List<CaptureFormat.FramerateRange> supportedFramerates, final int requestedFps) { List<CaptureFormat.FramerateRange> supportedFramerates, final int requestedFps) {
return Collections.min(supportedFramerates, return Collections.min(
new ClosestComparator<CaptureFormat.FramerateRange>() { supportedFramerates, new ClosestComparator<CaptureFormat.FramerateRange>() {
// Progressive penalty if the upper bound is further away than |MAX_FPS_DIFF_THRESHOLD| // Progressive penalty if the upper bound is further away than |MAX_FPS_DIFF_THRESHOLD|
// from requested. // from requested.
private static final int MAX_FPS_DIFF_THRESHOLD = 5000; private static final int MAX_FPS_DIFF_THRESHOLD = 5000;
@ -192,15 +191,14 @@ public class CameraEnumerationAndroid {
// Use one weight for small |value| less than |threshold|, and another weight above. // Use one weight for small |value| less than |threshold|, and another weight above.
private int progressivePenalty(int value, int threshold, int lowWeight, int highWeight) { private int progressivePenalty(int value, int threshold, int lowWeight, int highWeight) {
return (value < threshold) return (value < threshold) ? value * lowWeight
? value * lowWeight
: threshold * lowWeight + (value - threshold) * highWeight; : threshold * lowWeight + (value - threshold) * highWeight;
} }
@Override @Override
int diff(CaptureFormat.FramerateRange range) { int diff(CaptureFormat.FramerateRange range) {
final int minFpsError = progressivePenalty(range.min, final int minFpsError = progressivePenalty(
MIN_FPS_THRESHOLD, MIN_FPS_LOW_VALUE_WEIGHT, MIN_FPS_HIGH_VALUE_WEIGHT); range.min, MIN_FPS_THRESHOLD, MIN_FPS_LOW_VALUE_WEIGHT, MIN_FPS_HIGH_VALUE_WEIGHT);
final int maxFpsError = progressivePenalty(Math.abs(requestedFps * 1000 - range.max), final int maxFpsError = progressivePenalty(Math.abs(requestedFps * 1000 - range.max),
MAX_FPS_DIFF_THRESHOLD, MAX_FPS_LOW_DIFF_WEIGHT, MAX_FPS_HIGH_DIFF_WEIGHT); MAX_FPS_DIFF_THRESHOLD, MAX_FPS_LOW_DIFF_WEIGHT, MAX_FPS_HIGH_DIFF_WEIGHT);
return minFpsError + maxFpsError; return minFpsError + maxFpsError;
@ -209,10 +207,8 @@ public class CameraEnumerationAndroid {
} }
public static Size getClosestSupportedSize( public static Size getClosestSupportedSize(
List<Size> supportedSizes, final int requestedWidth, List<Size> supportedSizes, final int requestedWidth, final int requestedHeight) {
final int requestedHeight) { return Collections.min(supportedSizes, new ClosestComparator<Size>() {
return Collections.min(supportedSizes,
new ClosestComparator<Size>() {
@Override @Override
int diff(Size size) { int diff(Size size) {
return abs(requestedWidth - size.width) + abs(requestedHeight - size.height); return abs(requestedWidth - size.width) + abs(requestedHeight - size.height);

View File

@ -20,6 +20,6 @@ public interface CameraEnumerator {
public boolean isBackFacing(String deviceName); public boolean isBackFacing(String deviceName);
public List<CaptureFormat> getSupportedFormats(String deviceName); public List<CaptureFormat> getSupportedFormats(String deviceName);
public CameraVideoCapturer createCapturer(String deviceName, public CameraVideoCapturer createCapturer(
CameraVideoCapturer.CameraEventsHandler eventsHandler); String deviceName, CameraVideoCapturer.CameraEventsHandler eventsHandler);
} }

View File

@ -22,8 +22,8 @@ public interface CameraSession {
void onCameraOpening(); void onCameraOpening();
void onCameraError(CameraSession session, String error); void onCameraError(CameraSession session, String error);
void onCameraClosed(CameraSession session); void onCameraClosed(CameraSession session);
void onByteBufferFrameCaptured(CameraSession session, byte[] data, int width, int height, void onByteBufferFrameCaptured(
int rotation, long timestamp); CameraSession session, byte[] data, int width, int height, int rotation, long timestamp);
void onTextureFrameCaptured(CameraSession session, int width, int height, int oesTextureId, void onTextureFrameCaptured(CameraSession session, int width, int height, int oesTextureId,
float[] transformMatrix, int rotation, long timestamp); float[] transformMatrix, int rotation, long timestamp);
} }

View File

@ -29,9 +29,8 @@ public class DataChannel {
public Init() {} public Init() {}
// Called only by native code. // Called only by native code.
private Init( private Init(boolean ordered, int maxRetransmitTimeMs, int maxRetransmits, String protocol,
boolean ordered, int maxRetransmitTimeMs, int maxRetransmits, boolean negotiated, int id) {
String protocol, boolean negotiated, int id) {
this.ordered = ordered; this.ordered = ordered;
this.maxRetransmitTimeMs = maxRetransmitTimeMs; this.maxRetransmitTimeMs = maxRetransmitTimeMs;
this.maxRetransmits = maxRetransmits; this.maxRetransmits = maxRetransmits;
@ -73,7 +72,7 @@ public class DataChannel {
} }
/** Keep in sync with DataChannelInterface::DataState. */ /** Keep in sync with DataChannelInterface::DataState. */
public enum State { CONNECTING, OPEN, CLOSING, CLOSED }; public enum State { CONNECTING, OPEN, CLOSING, CLOSED }
private final long nativeDataChannel; private final long nativeDataChannel;
private long nativeObserver; private long nativeObserver;

View File

@ -15,15 +15,13 @@ import android.view.Surface;
import javax.microedition.khronos.egl.EGL10; import javax.microedition.khronos.egl.EGL10;
/** /**
* Holds EGL state and utility methods for handling an egl 1.0 EGLContext, an EGLDisplay, * Holds EGL state and utility methods for handling an egl 1.0 EGLContext, an EGLDisplay,
* and an EGLSurface. * and an EGLSurface.
*/ */
public abstract class EglBase { public abstract class EglBase {
// EGL wrapper for an actual EGLContext. // EGL wrapper for an actual EGLContext.
public static class Context { public static class Context {}
}
// According to the documentation, EGL can be used from multiple threads at the same time if each // According to the documentation, EGL can be used from multiple threads at the same time if each
// thread has its own EGLContext, but in practice it deadlocks on some devices when doing this. // thread has its own EGLContext, but in practice it deadlocks on some devices when doing this.
@ -39,6 +37,7 @@ public abstract class EglBase {
// Android-specific extension. // Android-specific extension.
private static final int EGL_RECORDABLE_ANDROID = 0x3142; private static final int EGL_RECORDABLE_ANDROID = 0x3142;
// clang-format off
public static final int[] CONFIG_PLAIN = { public static final int[] CONFIG_PLAIN = {
EGL10.EGL_RED_SIZE, 8, EGL10.EGL_RED_SIZE, 8,
EGL10.EGL_GREEN_SIZE, 8, EGL10.EGL_GREEN_SIZE, 8,
@ -79,6 +78,7 @@ public abstract class EglBase {
EGL_RECORDABLE_ANDROID, 1, EGL_RECORDABLE_ANDROID, 1,
EGL10.EGL_NONE EGL10.EGL_NONE
}; };
// clang-format on
// Create a new context with the specified config attributes, sharing data with sharedContext. // Create a new context with the specified config attributes, sharing data with sharedContext.
// |sharedContext| can be null. // |sharedContext| can be null.

View File

@ -159,9 +159,8 @@ public final class EglBase10 extends EglBase {
int[] surfaceAttribs = {EGL10.EGL_WIDTH, width, EGL10.EGL_HEIGHT, height, EGL10.EGL_NONE}; int[] surfaceAttribs = {EGL10.EGL_WIDTH, width, EGL10.EGL_HEIGHT, height, EGL10.EGL_NONE};
eglSurface = egl.eglCreatePbufferSurface(eglDisplay, eglConfig, surfaceAttribs); eglSurface = egl.eglCreatePbufferSurface(eglDisplay, eglConfig, surfaceAttribs);
if (eglSurface == EGL10.EGL_NO_SURFACE) { if (eglSurface == EGL10.EGL_NO_SURFACE) {
throw new RuntimeException( throw new RuntimeException("Failed to create pixel buffer surface with size " + width + "x"
"Failed to create pixel buffer surface with size " + width + "x" + height + height + ": 0x" + Integer.toHexString(egl.eglGetError()));
+ ": 0x" + Integer.toHexString(egl.eglGetError()));
} }
} }
@ -272,8 +271,7 @@ public final class EglBase10 extends EglBase {
private EGLConfig getEglConfig(EGLDisplay eglDisplay, int[] configAttributes) { private EGLConfig getEglConfig(EGLDisplay eglDisplay, int[] configAttributes) {
EGLConfig[] configs = new EGLConfig[1]; EGLConfig[] configs = new EGLConfig[1];
int[] numConfigs = new int[1]; int[] numConfigs = new int[1];
if (!egl.eglChooseConfig( if (!egl.eglChooseConfig(eglDisplay, configAttributes, configs, configs.length, numConfigs)) {
eglDisplay, configAttributes, configs, configs.length, numConfigs)) {
throw new RuntimeException( throw new RuntimeException(
"eglChooseConfig failed: 0x" + Integer.toHexString(egl.eglGetError())); "eglChooseConfig failed: 0x" + Integer.toHexString(egl.eglGetError()));
} }

View File

@ -37,8 +37,8 @@ public final class EglBase14 extends EglBase {
// EGL 1.4 is supported from API 17. But EGLExt that is used for setting presentation // EGL 1.4 is supported from API 17. But EGLExt that is used for setting presentation
// time stamp on a surface is supported from 18 so we require 18. // time stamp on a surface is supported from 18 so we require 18.
public static boolean isEGL14Supported() { public static boolean isEGL14Supported() {
Logging.d(TAG, "SDK version: " + CURRENT_SDK_VERSION Logging.d(TAG, "SDK version: " + CURRENT_SDK_VERSION + ". isEGL14Supported: "
+ ". isEGL14Supported: " + (CURRENT_SDK_VERSION >= EGLExt_SDK_VERSION)); + (CURRENT_SDK_VERSION >= EGLExt_SDK_VERSION));
return (CURRENT_SDK_VERSION >= EGLExt_SDK_VERSION); return (CURRENT_SDK_VERSION >= EGLExt_SDK_VERSION);
} }
@ -101,9 +101,8 @@ public final class EglBase14 extends EglBase {
int[] surfaceAttribs = {EGL14.EGL_WIDTH, width, EGL14.EGL_HEIGHT, height, EGL14.EGL_NONE}; int[] surfaceAttribs = {EGL14.EGL_WIDTH, width, EGL14.EGL_HEIGHT, height, EGL14.EGL_NONE};
eglSurface = EGL14.eglCreatePbufferSurface(eglDisplay, eglConfig, surfaceAttribs, 0); eglSurface = EGL14.eglCreatePbufferSurface(eglDisplay, eglConfig, surfaceAttribs, 0);
if (eglSurface == EGL14.EGL_NO_SURFACE) { if (eglSurface == EGL14.EGL_NO_SURFACE) {
throw new RuntimeException( throw new RuntimeException("Failed to create pixel buffer surface with size " + width + "x"
"Failed to create pixel buffer surface with size " + width + "x" + height + height + ": 0x" + Integer.toHexString(EGL14.eglGetError()));
+ ": 0x" + Integer.toHexString(EGL14.eglGetError()));
} }
} }
@ -202,7 +201,8 @@ public final class EglBase14 extends EglBase {
throw new RuntimeException("No EGLSurface - can't swap buffers"); throw new RuntimeException("No EGLSurface - can't swap buffers");
} }
synchronized (EglBase.lock) { synchronized (EglBase.lock) {
// See https://android.googlesource.com/platform/frameworks/native/+/tools_r22.2/opengl/specs/EGL_ANDROID_presentation_time.txt // See
// https://android.googlesource.com/platform/frameworks/native/+/tools_r22.2/opengl/specs/EGL_ANDROID_presentation_time.txt
EGLExt.eglPresentationTimeANDROID(eglDisplay, eglSurface, timeStampNs); EGLExt.eglPresentationTimeANDROID(eglDisplay, eglSurface, timeStampNs);
EGL14.eglSwapBuffers(eglDisplay, eglSurface); EGL14.eglSwapBuffers(eglDisplay, eglSurface);
} }

View File

@ -25,6 +25,7 @@ import java.util.Map;
* manually to free the resources held by this object. * manually to free the resources held by this object.
*/ */
public class GlRectDrawer implements RendererCommon.GlDrawer { public class GlRectDrawer implements RendererCommon.GlDrawer {
// clang-format off
// Simple vertex shader, used for both YUV and OES. // Simple vertex shader, used for both YUV and OES.
private static final String VERTEX_SHADER_STRING = private static final String VERTEX_SHADER_STRING =
"varying vec2 interp_tc;\n" "varying vec2 interp_tc;\n"
@ -76,11 +77,11 @@ public class GlRectDrawer implements RendererCommon.GlDrawer {
+ "void main() {\n" + "void main() {\n"
+ " gl_FragColor = texture2D(oes_tex, interp_tc);\n" + " gl_FragColor = texture2D(oes_tex, interp_tc);\n"
+ "}\n"; + "}\n";
// clang-format on
// Vertex coordinates in Normalized Device Coordinates, i.e. (-1, -1) is bottom-left and (1, 1) is // Vertex coordinates in Normalized Device Coordinates, i.e. (-1, -1) is bottom-left and (1, 1) is
// top-right. // top-right.
private static final FloatBuffer FULL_RECTANGLE_BUF = private static final FloatBuffer FULL_RECTANGLE_BUF = GlUtil.createFloatBuffer(new float[] {
GlUtil.createFloatBuffer(new float[] {
-1.0f, -1.0f, // Bottom left. -1.0f, -1.0f, // Bottom left.
1.0f, -1.0f, // Bottom right. 1.0f, -1.0f, // Bottom right.
-1.0f, 1.0f, // Top left. -1.0f, 1.0f, // Top left.
@ -88,8 +89,7 @@ public class GlRectDrawer implements RendererCommon.GlDrawer {
}); });
// Texture coordinates - (0, 0) is bottom-left and (1, 1) is top-right. // Texture coordinates - (0, 0) is bottom-left and (1, 1) is top-right.
private static final FloatBuffer FULL_RECTANGLE_TEX_BUF = private static final FloatBuffer FULL_RECTANGLE_TEX_BUF = GlUtil.createFloatBuffer(new float[] {
GlUtil.createFloatBuffer(new float[] {
0.0f, 0.0f, // Bottom left. 0.0f, 0.0f, // Bottom left.
1.0f, 0.0f, // Bottom right. 1.0f, 0.0f, // Bottom right.
0.0f, 1.0f, // Top left. 0.0f, 1.0f, // Top left.

View File

@ -25,13 +25,11 @@ public class GlShader {
} }
GLES20.glShaderSource(shader, source); GLES20.glShaderSource(shader, source);
GLES20.glCompileShader(shader); GLES20.glCompileShader(shader);
int[] compileStatus = new int[] { int[] compileStatus = new int[] {GLES20.GL_FALSE};
GLES20.GL_FALSE
};
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0); GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
if (compileStatus[0] != GLES20.GL_TRUE) { if (compileStatus[0] != GLES20.GL_TRUE) {
Logging.e(TAG, "Could not compile shader " + shaderType + ":" + Logging.e(
GLES20.glGetShaderInfoLog(shader)); TAG, "Could not compile shader " + shaderType + ":" + GLES20.glGetShaderInfoLog(shader));
throw new RuntimeException(GLES20.glGetShaderInfoLog(shader)); throw new RuntimeException(GLES20.glGetShaderInfoLog(shader));
} }
GlUtil.checkNoGLES2Error("compileShader"); GlUtil.checkNoGLES2Error("compileShader");
@ -50,13 +48,10 @@ public class GlShader {
GLES20.glAttachShader(program, vertexShader); GLES20.glAttachShader(program, vertexShader);
GLES20.glAttachShader(program, fragmentShader); GLES20.glAttachShader(program, fragmentShader);
GLES20.glLinkProgram(program); GLES20.glLinkProgram(program);
int[] linkStatus = new int[] { int[] linkStatus = new int[] {GLES20.GL_FALSE};
GLES20.GL_FALSE
};
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0); GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
if (linkStatus[0] != GLES20.GL_TRUE) { if (linkStatus[0] != GLES20.GL_TRUE) {
Logging.e(TAG, "Could not link program: " + Logging.e(TAG, "Could not link program: " + GLES20.glGetProgramInfoLog(program));
GLES20.glGetProgramInfoLog(program));
throw new RuntimeException(GLES20.glGetProgramInfoLog(program)); throw new RuntimeException(GLES20.glGetProgramInfoLog(program));
} }
// According to the documentation of glLinkProgram(): // According to the documentation of glLinkProgram():

View File

@ -52,8 +52,8 @@ public class GlTextureFrameBuffer {
GlUtil.checkNoGLES2Error("Generate framebuffer"); GlUtil.checkNoGLES2Error("Generate framebuffer");
// Attach the texture to the framebuffer as color attachment. // Attach the texture to the framebuffer as color attachment.
GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.glFramebufferTexture2D(
GLES20.GL_TEXTURE_2D, textureId, 0); GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, textureId, 0);
GlUtil.checkNoGLES2Error("Attach texture to framebuffer"); GlUtil.checkNoGLES2Error("Attach texture to framebuffer");
// Restore normal framebuffer. // Restore normal framebuffer.

View File

@ -42,11 +42,7 @@ public class MediaCodecVideoDecoder {
private static final long MAX_DECODE_TIME_MS = 200; private static final long MAX_DECODE_TIME_MS = 200;
// Tracks webrtc::VideoCodecType. // Tracks webrtc::VideoCodecType.
public enum VideoCodecType { public enum VideoCodecType { VIDEO_CODEC_VP8, VIDEO_CODEC_VP9, VIDEO_CODEC_H264 }
VIDEO_CODEC_VP8,
VIDEO_CODEC_VP9,
VIDEO_CODEC_H264
}
// Timeout for input buffer dequeue. // Timeout for input buffer dequeue.
private static final int DEQUEUE_INPUT_TIMEOUT = 500000; private static final int DEQUEUE_INPUT_TIMEOUT = 500000;
@ -70,14 +66,13 @@ public class MediaCodecVideoDecoder {
private static final String VP9_MIME_TYPE = "video/x-vnd.on2.vp9"; private static final String VP9_MIME_TYPE = "video/x-vnd.on2.vp9";
private static final String H264_MIME_TYPE = "video/avc"; private static final String H264_MIME_TYPE = "video/avc";
// List of supported HW VP8 decoders. // List of supported HW VP8 decoders.
private static final String[] supportedVp8HwCodecPrefixes = private static final String[] supportedVp8HwCodecPrefixes = {
{"OMX.qcom.", "OMX.Nvidia.", "OMX.Exynos.", "OMX.Intel." }; "OMX.qcom.", "OMX.Nvidia.", "OMX.Exynos.", "OMX.Intel."};
// List of supported HW VP9 decoders. // List of supported HW VP9 decoders.
private static final String[] supportedVp9HwCodecPrefixes = private static final String[] supportedVp9HwCodecPrefixes = {"OMX.qcom.", "OMX.Exynos."};
{"OMX.qcom.", "OMX.Exynos." };
// List of supported HW H.264 decoders. // List of supported HW H.264 decoders.
private static final String[] supportedH264HwCodecPrefixes = private static final String[] supportedH264HwCodecPrefixes = {
{"OMX.qcom.", "OMX.Intel.", "OMX.Exynos." }; "OMX.qcom.", "OMX.Intel.", "OMX.Exynos."};
// NV12 color format supported by QCOM codec, but not declared in MediaCodec - // NV12 color format supported by QCOM codec, but not declared in MediaCodec -
// see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h // see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h
@ -87,11 +82,9 @@ public class MediaCodecVideoDecoder {
private static final int COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04; private static final int COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04;
// Allowable color formats supported by codec - in order of preference. // Allowable color formats supported by codec - in order of preference.
private static final List<Integer> supportedColorList = Arrays.asList( private static final List<Integer> supportedColorList = Arrays.asList(
CodecCapabilities.COLOR_FormatYUV420Planar, CodecCapabilities.COLOR_FormatYUV420Planar, CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar, CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
COLOR_QCOM_FORMATYVU420PackedSemiPlanar32m4ka, COLOR_QCOM_FORMATYVU420PackedSemiPlanar32m4ka, COLOR_QCOM_FORMATYVU420PackedSemiPlanar16m4ka,
COLOR_QCOM_FORMATYVU420PackedSemiPlanar16m4ka,
COLOR_QCOM_FORMATYVU420PackedSemiPlanar64x32Tile2m8ka, COLOR_QCOM_FORMATYVU420PackedSemiPlanar64x32Tile2m8ka,
COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m); COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m);
@ -108,8 +101,8 @@ public class MediaCodecVideoDecoder {
private TextureListener textureListener; private TextureListener textureListener;
private int droppedFrames; private int droppedFrames;
private Surface surface = null; private Surface surface = null;
private final Queue<DecodedOutputBuffer> private final Queue<DecodedOutputBuffer> dequeuedSurfaceOutputBuffers =
dequeuedSurfaceOutputBuffers = new LinkedList<DecodedOutputBuffer>(); new LinkedList<DecodedOutputBuffer>();
// MediaCodec error handler - invoked when critical error happens which may prevent // MediaCodec error handler - invoked when critical error happens which may prevent
// further use of media codec API. Now it means that one of media codec instances // further use of media codec API. Now it means that one of media codec instances
@ -142,18 +135,18 @@ public class MediaCodecVideoDecoder {
// Functions to query if HW decoding is supported. // Functions to query if HW decoding is supported.
public static boolean isVp8HwSupported() { public static boolean isVp8HwSupported() {
return !hwDecoderDisabledTypes.contains(VP8_MIME_TYPE) && return !hwDecoderDisabledTypes.contains(VP8_MIME_TYPE)
(findDecoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes) != null); && (findDecoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes) != null);
} }
public static boolean isVp9HwSupported() { public static boolean isVp9HwSupported() {
return !hwDecoderDisabledTypes.contains(VP9_MIME_TYPE) && return !hwDecoderDisabledTypes.contains(VP9_MIME_TYPE)
(findDecoder(VP9_MIME_TYPE, supportedVp9HwCodecPrefixes) != null); && (findDecoder(VP9_MIME_TYPE, supportedVp9HwCodecPrefixes) != null);
} }
public static boolean isH264HwSupported() { public static boolean isH264HwSupported() {
return !hwDecoderDisabledTypes.contains(H264_MIME_TYPE) && return !hwDecoderDisabledTypes.contains(H264_MIME_TYPE)
(findDecoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes) != null); && (findDecoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes) != null);
} }
public static void printStackTrace() { public static void printStackTrace() {
@ -178,8 +171,7 @@ public class MediaCodecVideoDecoder {
public final int colorFormat; // Color format supported by codec. public final int colorFormat; // Color format supported by codec.
} }
private static DecoderProperties findDecoder( private static DecoderProperties findDecoder(String mime, String[] supportedCodecPrefixes) {
String mime, String[] supportedCodecPrefixes) {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
return null; // MediaCodec.setParameters is missing. return null; // MediaCodec.setParameters is missing.
} }
@ -233,8 +225,8 @@ public class MediaCodecVideoDecoder {
for (int codecColorFormat : capabilities.colorFormats) { for (int codecColorFormat : capabilities.colorFormats) {
if (codecColorFormat == supportedColorFormat) { if (codecColorFormat == supportedColorFormat) {
// Found supported HW decoder. // Found supported HW decoder.
Logging.d(TAG, "Found target decoder " + name + Logging.d(TAG, "Found target decoder " + name + ". Color: 0x"
". Color: 0x" + Integer.toHexString(codecColorFormat)); + Integer.toHexString(codecColorFormat));
return new DecoderProperties(name, codecColorFormat); return new DecoderProperties(name, codecColorFormat);
} }
} }
@ -246,16 +238,14 @@ public class MediaCodecVideoDecoder {
private void checkOnMediaCodecThread() throws IllegalStateException { private void checkOnMediaCodecThread() throws IllegalStateException {
if (mediaCodecThread.getId() != Thread.currentThread().getId()) { if (mediaCodecThread.getId() != Thread.currentThread().getId()) {
throw new IllegalStateException( throw new IllegalStateException("MediaCodecVideoDecoder previously operated on "
"MediaCodecVideoDecoder previously operated on " + mediaCodecThread + + mediaCodecThread + " but is now called on " + Thread.currentThread());
" but is now called on " + Thread.currentThread());
} }
} }
// Pass null in |surfaceTextureHelper| to configure the codec for ByteBuffer output. // Pass null in |surfaceTextureHelper| to configure the codec for ByteBuffer output.
private boolean initDecode( private boolean initDecode(
VideoCodecType type, int width, int height, VideoCodecType type, int width, int height, SurfaceTextureHelper surfaceTextureHelper) {
SurfaceTextureHelper surfaceTextureHelper) {
if (mediaCodecThread != null) { if (mediaCodecThread != null) {
throw new RuntimeException("initDecode: Forgot to release()?"); throw new RuntimeException("initDecode: Forgot to release()?");
} }
@ -280,9 +270,8 @@ public class MediaCodecVideoDecoder {
throw new RuntimeException("Cannot find HW decoder for " + type); throw new RuntimeException("Cannot find HW decoder for " + type);
} }
Logging.d(TAG, "Java initDecode: " + type + " : "+ width + " x " + height + Logging.d(TAG, "Java initDecode: " + type + " : " + width + " x " + height + ". Color: 0x"
". Color: 0x" + Integer.toHexString(properties.colorFormat) + + Integer.toHexString(properties.colorFormat) + ". Use Surface: " + useSurface);
". Use Surface: " + useSurface);
runningInstance = this; // Decoder is now running and can be queried for stack traces. runningInstance = this; // Decoder is now running and can be queried for stack traces.
mediaCodecThread = Thread.currentThread(); mediaCodecThread = Thread.currentThread();
@ -317,8 +306,8 @@ public class MediaCodecVideoDecoder {
hasDecodedFirstFrame = false; hasDecodedFirstFrame = false;
dequeuedSurfaceOutputBuffers.clear(); dequeuedSurfaceOutputBuffers.clear();
droppedFrames = 0; droppedFrames = 0;
Logging.d(TAG, "Input buffers: " + inputBuffers.length + Logging.d(TAG,
". Output buffers: " + outputBuffers.length); "Input buffers: " + inputBuffers.length + ". Output buffers: " + outputBuffers.length);
return true; return true;
} catch (IllegalStateException e) { } catch (IllegalStateException e) {
Logging.e(TAG, "initDecode failed", e); Logging.e(TAG, "initDecode failed", e);
@ -406,12 +395,11 @@ public class MediaCodecVideoDecoder {
try { try {
inputBuffers[inputBufferIndex].position(0); inputBuffers[inputBufferIndex].position(0);
inputBuffers[inputBufferIndex].limit(size); inputBuffers[inputBufferIndex].limit(size);
decodeStartTimeMs.add(new TimeStamps(SystemClock.elapsedRealtime(), timeStampMs, decodeStartTimeMs.add(
ntpTimeStamp)); new TimeStamps(SystemClock.elapsedRealtime(), timeStampMs, ntpTimeStamp));
mediaCodec.queueInputBuffer(inputBufferIndex, 0, size, presentationTimeStamUs, 0); mediaCodec.queueInputBuffer(inputBufferIndex, 0, size, presentationTimeStamUs, 0);
return true; return true;
} } catch (IllegalStateException e) {
catch (IllegalStateException e) {
Logging.e(TAG, "decode failed", e); Logging.e(TAG, "decode failed", e);
return false; return false;
} }
@ -511,8 +499,7 @@ public class MediaCodecVideoDecoder {
public void addBufferToRender(DecodedOutputBuffer buffer) { public void addBufferToRender(DecodedOutputBuffer buffer) {
if (bufferToRender != null) { if (bufferToRender != null) {
Logging.e(TAG, Logging.e(TAG, "Unexpected addBufferToRender() called while waiting for a texture.");
"Unexpected addBufferToRender() called while waiting for a texture.");
throw new IllegalStateException("Waiting for a texture."); throw new IllegalStateException("Waiting for a texture.");
} }
bufferToRender = buffer; bufferToRender = buffer;
@ -530,8 +517,8 @@ public class MediaCodecVideoDecoder {
int oesTextureId, float[] transformMatrix, long timestampNs) { int oesTextureId, float[] transformMatrix, long timestampNs) {
synchronized (newFrameLock) { synchronized (newFrameLock) {
if (renderedBuffer != null) { if (renderedBuffer != null) {
Logging.e(TAG, Logging.e(
"Unexpected onTextureFrameAvailable() called while already holding a texture."); TAG, "Unexpected onTextureFrameAvailable() called while already holding a texture.");
throw new IllegalStateException("Already holding a texture."); throw new IllegalStateException("Already holding a texture.");
} }
// |timestampNs| is always zero on some Android versions. // |timestampNs| is always zero on some Android versions.
@ -588,8 +575,8 @@ public class MediaCodecVideoDecoder {
// MediaCodec.INFO_TRY_AGAIN_LATER. // MediaCodec.INFO_TRY_AGAIN_LATER.
final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
while (true) { while (true) {
final int result = mediaCodec.dequeueOutputBuffer( final int result =
info, TimeUnit.MILLISECONDS.toMicros(dequeueTimeoutMs)); mediaCodec.dequeueOutputBuffer(info, TimeUnit.MILLISECONDS.toMicros(dequeueTimeoutMs));
switch (result) { switch (result) {
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED: case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
outputBuffers = mediaCodec.getOutputBuffers(); outputBuffers = mediaCodec.getOutputBuffers();
@ -604,8 +591,8 @@ public class MediaCodecVideoDecoder {
int new_width = format.getInteger(MediaFormat.KEY_WIDTH); int new_width = format.getInteger(MediaFormat.KEY_WIDTH);
int new_height = format.getInteger(MediaFormat.KEY_HEIGHT); int new_height = format.getInteger(MediaFormat.KEY_HEIGHT);
if (hasDecodedFirstFrame && (new_width != width || new_height != height)) { if (hasDecodedFirstFrame && (new_width != width || new_height != height)) {
throw new RuntimeException("Unexpected size change. Configured " + width + "*" + throw new RuntimeException("Unexpected size change. Configured " + width + "*" + height
height + ". New " + new_width + "*" + new_height); + ". New " + new_width + "*" + new_height);
} }
width = format.getInteger(MediaFormat.KEY_WIDTH); width = format.getInteger(MediaFormat.KEY_WIDTH);
height = format.getInteger(MediaFormat.KEY_HEIGHT); height = format.getInteger(MediaFormat.KEY_HEIGHT);
@ -639,14 +626,9 @@ public class MediaCodecVideoDecoder {
+ ". Might be caused by resuming H264 decoding after a pause."); + ". Might be caused by resuming H264 decoding after a pause.");
decodeTimeMs = MAX_DECODE_TIME_MS; decodeTimeMs = MAX_DECODE_TIME_MS;
} }
return new DecodedOutputBuffer(result, return new DecodedOutputBuffer(result, info.offset, info.size,
info.offset, TimeUnit.MICROSECONDS.toMillis(info.presentationTimeUs), timeStamps.timeStampMs,
info.size, timeStamps.ntpTimeStampMs, decodeTimeMs, SystemClock.elapsedRealtime());
TimeUnit.MICROSECONDS.toMillis(info.presentationTimeUs),
timeStamps.timeStampMs,
timeStamps.ntpTimeStampMs,
decodeTimeMs,
SystemClock.elapsedRealtime());
} }
} }
} }
@ -686,18 +668,17 @@ public class MediaCodecVideoDecoder {
// TODO(perkj): Re-add the below log when VideoRenderGUI has been removed or fixed to // TODO(perkj): Re-add the below log when VideoRenderGUI has been removed or fixed to
// return the one and only texture even if it does not render. // return the one and only texture even if it does not render.
Logging.w(TAG, "Draining decoder. Dropping frame with TS: " Logging.w(TAG, "Draining decoder. Dropping frame with TS: "
+ droppedFrame.presentationTimeStampMs + + droppedFrame.presentationTimeStampMs + ". Total number of dropped frames: "
". Total number of dropped frames: " + droppedFrames); + droppedFrames);
} else { } else {
Logging.w(TAG, "Too many output buffers " + dequeuedSurfaceOutputBuffers.size() + Logging.w(TAG, "Too many output buffers " + dequeuedSurfaceOutputBuffers.size()
". Dropping frame with TS: " + droppedFrame.presentationTimeStampMs + + ". Dropping frame with TS: " + droppedFrame.presentationTimeStampMs
". Total number of dropped frames: " + droppedFrames); + ". Total number of dropped frames: " + droppedFrames);
} }
mediaCodec.releaseOutputBuffer(droppedFrame.index, false /* render */); mediaCodec.releaseOutputBuffer(droppedFrame.index, false /* render */);
return new DecodedTextureBuffer(0, null, return new DecodedTextureBuffer(0, null, droppedFrame.presentationTimeStampMs,
droppedFrame.presentationTimeStampMs, droppedFrame.timeStampMs, droppedFrame.timeStampMs, droppedFrame.ntpTimeStampMs, droppedFrame.decodeTimeMs,
droppedFrame.ntpTimeStampMs, droppedFrame.decodeTimeMs,
SystemClock.elapsedRealtime() - droppedFrame.endDecodeTimeMs); SystemClock.elapsedRealtime() - droppedFrame.endDecodeTimeMs);
} }
return null; return null;

View File

@ -42,11 +42,7 @@ public class MediaCodecVideoEncoder {
private static final String TAG = "MediaCodecVideoEncoder"; private static final String TAG = "MediaCodecVideoEncoder";
// Tracks webrtc::VideoCodecType. // Tracks webrtc::VideoCodecType.
public enum VideoCodecType { public enum VideoCodecType { VIDEO_CODEC_VP8, VIDEO_CODEC_VP9, VIDEO_CODEC_H264 }
VIDEO_CODEC_VP8,
VIDEO_CODEC_VP9,
VIDEO_CODEC_H264
}
private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000; // Timeout for codec releasing. private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000; // Timeout for codec releasing.
private static final int DEQUEUE_TIMEOUT = 0; // Non-blocking, no wait. private static final int DEQUEUE_TIMEOUT = 0; // Non-blocking, no wait.
@ -115,54 +111,43 @@ public class MediaCodecVideoEncoder {
"OMX.qcom.", Build.VERSION_CODES.KITKAT, BitrateAdjustmentType.NO_ADJUSTMENT); "OMX.qcom.", Build.VERSION_CODES.KITKAT, BitrateAdjustmentType.NO_ADJUSTMENT);
private static final MediaCodecProperties exynosVp8HwProperties = new MediaCodecProperties( private static final MediaCodecProperties exynosVp8HwProperties = new MediaCodecProperties(
"OMX.Exynos.", Build.VERSION_CODES.M, BitrateAdjustmentType.DYNAMIC_ADJUSTMENT); "OMX.Exynos.", Build.VERSION_CODES.M, BitrateAdjustmentType.DYNAMIC_ADJUSTMENT);
private static final MediaCodecProperties[] vp8HwList = new MediaCodecProperties[] { private static final MediaCodecProperties[] vp8HwList =
qcomVp8HwProperties, exynosVp8HwProperties new MediaCodecProperties[] {qcomVp8HwProperties, exynosVp8HwProperties};
};
// List of supported HW VP9 encoders. // List of supported HW VP9 encoders.
private static final MediaCodecProperties qcomVp9HwProperties = new MediaCodecProperties( private static final MediaCodecProperties qcomVp9HwProperties = new MediaCodecProperties(
"OMX.qcom.", Build.VERSION_CODES.M, BitrateAdjustmentType.NO_ADJUSTMENT); "OMX.qcom.", Build.VERSION_CODES.M, BitrateAdjustmentType.NO_ADJUSTMENT);
private static final MediaCodecProperties exynosVp9HwProperties = new MediaCodecProperties( private static final MediaCodecProperties exynosVp9HwProperties = new MediaCodecProperties(
"OMX.Exynos.", Build.VERSION_CODES.M, BitrateAdjustmentType.NO_ADJUSTMENT); "OMX.Exynos.", Build.VERSION_CODES.M, BitrateAdjustmentType.NO_ADJUSTMENT);
private static final MediaCodecProperties[] vp9HwList = new MediaCodecProperties[] { private static final MediaCodecProperties[] vp9HwList =
qcomVp9HwProperties, exynosVp9HwProperties new MediaCodecProperties[] {qcomVp9HwProperties, exynosVp9HwProperties};
};
// List of supported HW H.264 encoders. // List of supported HW H.264 encoders.
private static final MediaCodecProperties qcomH264HwProperties = new MediaCodecProperties( private static final MediaCodecProperties qcomH264HwProperties = new MediaCodecProperties(
"OMX.qcom.", Build.VERSION_CODES.KITKAT, BitrateAdjustmentType.NO_ADJUSTMENT); "OMX.qcom.", Build.VERSION_CODES.KITKAT, BitrateAdjustmentType.NO_ADJUSTMENT);
private static final MediaCodecProperties exynosH264HwProperties = new MediaCodecProperties( private static final MediaCodecProperties exynosH264HwProperties = new MediaCodecProperties(
"OMX.Exynos.", Build.VERSION_CODES.LOLLIPOP, BitrateAdjustmentType.FRAMERATE_ADJUSTMENT); "OMX.Exynos.", Build.VERSION_CODES.LOLLIPOP, BitrateAdjustmentType.FRAMERATE_ADJUSTMENT);
private static final MediaCodecProperties[] h264HwList = new MediaCodecProperties[] { private static final MediaCodecProperties[] h264HwList =
qcomH264HwProperties, exynosH264HwProperties new MediaCodecProperties[] {qcomH264HwProperties, exynosH264HwProperties};
};
// List of devices with poor H.264 encoder quality. // List of devices with poor H.264 encoder quality.
private static final String[] H264_HW_EXCEPTION_MODELS = new String[] {
// HW H.264 encoder on below devices has poor bitrate control - actual // HW H.264 encoder on below devices has poor bitrate control - actual
// bitrates deviates a lot from the target value. // bitrates deviates a lot from the target value.
"SAMSUNG-SGH-I337", private static final String[] H264_HW_EXCEPTION_MODELS =
"Nexus 7", new String[] {"SAMSUNG-SGH-I337", "Nexus 7", "Nexus 4"};
"Nexus 4"
};
// Bitrate modes - should be in sync with OMX_VIDEO_CONTROLRATETYPE defined // Bitrate modes - should be in sync with OMX_VIDEO_CONTROLRATETYPE defined
// in OMX_Video.h // in OMX_Video.h
private static final int VIDEO_ControlRateConstant = 2; private static final int VIDEO_ControlRateConstant = 2;
// NV12 color format supported by QCOM codec, but not declared in MediaCodec - // NV12 color format supported by QCOM codec, but not declared in MediaCodec -
// see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h // see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h
private static final int private static final int COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04;
COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04;
// Allowable color formats supported by codec - in order of preference. // Allowable color formats supported by codec - in order of preference.
private static final int[] supportedColorList = { private static final int[] supportedColorList = {CodecCapabilities.COLOR_FormatYUV420Planar,
CodecCapabilities.COLOR_FormatYUV420Planar,
CodecCapabilities.COLOR_FormatYUV420SemiPlanar, CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar, CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m};
}; private static final int[] supportedSurfaceColorList = {CodecCapabilities.COLOR_FormatSurface};
private static final int[] supportedSurfaceColorList = {
CodecCapabilities.COLOR_FormatSurface
};
private VideoCodecType type; private VideoCodecType type;
private int colorFormat; // Used by native code. private int colorFormat; // Used by native code.
@ -209,33 +194,33 @@ public class MediaCodecVideoEncoder {
// Functions to query if HW encoding is supported. // Functions to query if HW encoding is supported.
public static boolean isVp8HwSupported() { public static boolean isVp8HwSupported() {
return !hwEncoderDisabledTypes.contains(VP8_MIME_TYPE) && return !hwEncoderDisabledTypes.contains(VP8_MIME_TYPE)
(findHwEncoder(VP8_MIME_TYPE, vp8HwList, supportedColorList) != null); && (findHwEncoder(VP8_MIME_TYPE, vp8HwList, supportedColorList) != null);
} }
public static boolean isVp9HwSupported() { public static boolean isVp9HwSupported() {
return !hwEncoderDisabledTypes.contains(VP9_MIME_TYPE) && return !hwEncoderDisabledTypes.contains(VP9_MIME_TYPE)
(findHwEncoder(VP9_MIME_TYPE, vp9HwList, supportedColorList) != null); && (findHwEncoder(VP9_MIME_TYPE, vp9HwList, supportedColorList) != null);
} }
public static boolean isH264HwSupported() { public static boolean isH264HwSupported() {
return !hwEncoderDisabledTypes.contains(H264_MIME_TYPE) && return !hwEncoderDisabledTypes.contains(H264_MIME_TYPE)
(findHwEncoder(H264_MIME_TYPE, h264HwList, supportedColorList) != null); && (findHwEncoder(H264_MIME_TYPE, h264HwList, supportedColorList) != null);
} }
public static boolean isVp8HwSupportedUsingTextures() { public static boolean isVp8HwSupportedUsingTextures() {
return !hwEncoderDisabledTypes.contains(VP8_MIME_TYPE) && return !hwEncoderDisabledTypes.contains(VP8_MIME_TYPE)
(findHwEncoder(VP8_MIME_TYPE, vp8HwList, supportedSurfaceColorList) != null); && (findHwEncoder(VP8_MIME_TYPE, vp8HwList, supportedSurfaceColorList) != null);
} }
public static boolean isVp9HwSupportedUsingTextures() { public static boolean isVp9HwSupportedUsingTextures() {
return !hwEncoderDisabledTypes.contains(VP9_MIME_TYPE) && return !hwEncoderDisabledTypes.contains(VP9_MIME_TYPE)
(findHwEncoder(VP9_MIME_TYPE, vp9HwList, supportedSurfaceColorList) != null); && (findHwEncoder(VP9_MIME_TYPE, vp9HwList, supportedSurfaceColorList) != null);
} }
public static boolean isH264HwSupportedUsingTextures() { public static boolean isH264HwSupportedUsingTextures() {
return !hwEncoderDisabledTypes.contains(H264_MIME_TYPE) && return !hwEncoderDisabledTypes.contains(H264_MIME_TYPE)
(findHwEncoder(H264_MIME_TYPE, h264HwList, supportedSurfaceColorList) != null); && (findHwEncoder(H264_MIME_TYPE, h264HwList, supportedSurfaceColorList) != null);
} }
// Helper struct for findHwEncoder() below. // Helper struct for findHwEncoder() below.
@ -296,14 +281,14 @@ public class MediaCodecVideoEncoder {
for (MediaCodecProperties codecProperties : supportedHwCodecProperties) { for (MediaCodecProperties codecProperties : supportedHwCodecProperties) {
if (name.startsWith(codecProperties.codecPrefix)) { if (name.startsWith(codecProperties.codecPrefix)) {
if (Build.VERSION.SDK_INT < codecProperties.minSdk) { if (Build.VERSION.SDK_INT < codecProperties.minSdk) {
Logging.w(TAG, "Codec " + name + " is disabled due to SDK version " + Logging.w(
Build.VERSION.SDK_INT); TAG, "Codec " + name + " is disabled due to SDK version " + Build.VERSION.SDK_INT);
continue; continue;
} }
if (codecProperties.bitrateAdjustmentType != BitrateAdjustmentType.NO_ADJUSTMENT) { if (codecProperties.bitrateAdjustmentType != BitrateAdjustmentType.NO_ADJUSTMENT) {
bitrateAdjustmentType = codecProperties.bitrateAdjustmentType; bitrateAdjustmentType = codecProperties.bitrateAdjustmentType;
Logging.w(TAG, "Codec " + name Logging.w(
+ " requires bitrate adjustment: " + bitrateAdjustmentType); TAG, "Codec " + name + " requires bitrate adjustment: " + bitrateAdjustmentType);
} }
supportedCodec = true; supportedCodec = true;
break; break;
@ -329,9 +314,9 @@ public class MediaCodecVideoEncoder {
for (int codecColorFormat : capabilities.colorFormats) { for (int codecColorFormat : capabilities.colorFormats) {
if (codecColorFormat == supportedColorFormat) { if (codecColorFormat == supportedColorFormat) {
// Found supported HW encoder. // Found supported HW encoder.
Logging.d(TAG, "Found target encoder for mime " + mime + " : " + name Logging.d(TAG, "Found target encoder for mime " + mime + " : " + name + ". Color: 0x"
+ ". Color: 0x" + Integer.toHexString(codecColorFormat) + Integer.toHexString(codecColorFormat) + ". Bitrate adjustment: "
+ ". Bitrate adjustment: " + bitrateAdjustmentType); + bitrateAdjustmentType);
return new EncoderProperties(name, codecColorFormat, bitrateAdjustmentType); return new EncoderProperties(name, codecColorFormat, bitrateAdjustmentType);
} }
} }
@ -342,9 +327,8 @@ public class MediaCodecVideoEncoder {
private void checkOnMediaCodecThread() { private void checkOnMediaCodecThread() {
if (mediaCodecThread.getId() != Thread.currentThread().getId()) { if (mediaCodecThread.getId() != Thread.currentThread().getId()) {
throw new RuntimeException( throw new RuntimeException("MediaCodecVideoEncoder previously operated on " + mediaCodecThread
"MediaCodecVideoEncoder previously operated on " + mediaCodecThread + + " but is now called on " + Thread.currentThread());
" but is now called on " + Thread.currentThread());
} }
} }
@ -373,8 +357,8 @@ public class MediaCodecVideoEncoder {
boolean initEncode(VideoCodecType type, int width, int height, int kbps, int fps, boolean initEncode(VideoCodecType type, int width, int height, int kbps, int fps,
EglBase14.Context sharedContext) { EglBase14.Context sharedContext) {
final boolean useSurface = sharedContext != null; final boolean useSurface = sharedContext != null;
Logging.d(TAG, "Java initEncode: " + type + " : " + width + " x " + height + Logging.d(TAG, "Java initEncode: " + type + " : " + width + " x " + height + ". @ " + kbps
". @ " + kbps + " kbps. Fps: " + fps + ". Encode from texture : " + useSurface); + " kbps. Fps: " + fps + ". Encode from texture : " + useSurface);
this.width = width; this.width = width;
this.height = height; this.height = height;
@ -411,9 +395,8 @@ public class MediaCodecVideoEncoder {
} else { } else {
fps = Math.min(fps, MAXIMUM_INITIAL_FPS); fps = Math.min(fps, MAXIMUM_INITIAL_FPS);
} }
Logging.d(TAG, "Color format: " + colorFormat + Logging.d(TAG, "Color format: " + colorFormat + ". Bitrate adjustment: " + bitrateAdjustmentType
". Bitrate adjustment: " + bitrateAdjustmentType + + ". Initial fps: " + fps);
". Initial fps: " + fps);
targetBitrateBps = 1000 * kbps; targetBitrateBps = 1000 * kbps;
targetFps = fps; targetFps = fps;
bitrateAccumulatorMax = targetBitrateBps / 8.0; bitrateAccumulatorMax = targetBitrateBps / 8.0;
@ -436,8 +419,7 @@ public class MediaCodecVideoEncoder {
Logging.e(TAG, "Can not create media encoder"); Logging.e(TAG, "Can not create media encoder");
return false; return false;
} }
mediaCodec.configure( mediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
if (useSurface) { if (useSurface) {
eglBase = new EglBase14(sharedContext, EglBase.CONFIG_RECORDABLE); eglBase = new EglBase14(sharedContext, EglBase.CONFIG_RECORDABLE);
@ -464,8 +446,7 @@ public class MediaCodecVideoEncoder {
} }
boolean encodeBuffer( boolean encodeBuffer(
boolean isKeyframe, int inputBuffer, int size, boolean isKeyframe, int inputBuffer, int size, long presentationTimestampUs) {
long presentationTimestampUs) {
checkOnMediaCodecThread(); checkOnMediaCodecThread();
try { try {
if (isKeyframe) { if (isKeyframe) {
@ -478,11 +459,9 @@ public class MediaCodecVideoEncoder {
b.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0); b.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
mediaCodec.setParameters(b); mediaCodec.setParameters(b);
} }
mediaCodec.queueInputBuffer( mediaCodec.queueInputBuffer(inputBuffer, 0, size, presentationTimestampUs, 0);
inputBuffer, 0, size, presentationTimestampUs, 0);
return true; return true;
} } catch (IllegalStateException e) {
catch (IllegalStateException e) {
Logging.e(TAG, "encodeBuffer failed", e); Logging.e(TAG, "encodeBuffer failed", e);
return false; return false;
} }
@ -505,8 +484,7 @@ public class MediaCodecVideoEncoder {
drawer.drawOes(oesTextureId, transformationMatrix, width, height, 0, 0, width, height); drawer.drawOes(oesTextureId, transformationMatrix, width, height, 0, 0, width, height);
eglBase.swapBuffers(TimeUnit.MICROSECONDS.toNanos(presentationTimestampUs)); eglBase.swapBuffers(TimeUnit.MICROSECONDS.toNanos(presentationTimestampUs));
return true; return true;
} } catch (RuntimeException e) {
catch (RuntimeException e) {
Logging.e(TAG, "encodeTexture failed", e); Logging.e(TAG, "encodeTexture failed", e);
return false; return false;
} }
@ -580,11 +558,11 @@ public class MediaCodecVideoEncoder {
// Adjust actual encoder bitrate based on bitrate adjustment type. // Adjust actual encoder bitrate based on bitrate adjustment type.
if (bitrateAdjustmentType == BitrateAdjustmentType.FRAMERATE_ADJUSTMENT && targetFps > 0) { if (bitrateAdjustmentType == BitrateAdjustmentType.FRAMERATE_ADJUSTMENT && targetFps > 0) {
codecBitrateBps = BITRATE_ADJUSTMENT_FPS * targetBitrateBps / targetFps; codecBitrateBps = BITRATE_ADJUSTMENT_FPS * targetBitrateBps / targetFps;
Logging.v(TAG, "setRates: " + kbps + " -> " + (codecBitrateBps / 1000) Logging.v(TAG,
+ " kbps. Fps: " + targetFps); "setRates: " + kbps + " -> " + (codecBitrateBps / 1000) + " kbps. Fps: " + targetFps);
} else if (bitrateAdjustmentType == BitrateAdjustmentType.DYNAMIC_ADJUSTMENT) { } else if (bitrateAdjustmentType == BitrateAdjustmentType.DYNAMIC_ADJUSTMENT) {
Logging.v(TAG, "setRates: " + kbps + " kbps. Fps: " + targetFps Logging.v(TAG, "setRates: " + kbps + " kbps. Fps: " + targetFps + ". ExpScale: "
+ ". ExpScale: " + bitrateAdjustmentScaleExp); + bitrateAdjustmentScaleExp);
if (bitrateAdjustmentScaleExp != 0) { if (bitrateAdjustmentScaleExp != 0) {
codecBitrateBps = (int) (codecBitrateBps * getBitrateScale(bitrateAdjustmentScaleExp)); codecBitrateBps = (int) (codecBitrateBps * getBitrateScale(bitrateAdjustmentScaleExp));
} }
@ -618,8 +596,7 @@ public class MediaCodecVideoEncoder {
// Helper struct for dequeueOutputBuffer() below. // Helper struct for dequeueOutputBuffer() below.
static class OutputBufferInfo { static class OutputBufferInfo {
public OutputBufferInfo( public OutputBufferInfo(
int index, ByteBuffer buffer, int index, ByteBuffer buffer, boolean isKeyFrame, long presentationTimestampUs) {
boolean isKeyFrame, long presentationTimestampUs) {
this.index = index; this.index = index;
this.buffer = buffer; this.buffer = buffer;
this.isKeyFrame = isKeyFrame; this.isKeyFrame = isKeyFrame;
@ -641,11 +618,9 @@ public class MediaCodecVideoEncoder {
int result = mediaCodec.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT); int result = mediaCodec.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT);
// Check if this is config frame and save configuration data. // Check if this is config frame and save configuration data.
if (result >= 0) { if (result >= 0) {
boolean isConfigFrame = boolean isConfigFrame = (info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0;
(info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0;
if (isConfigFrame) { if (isConfigFrame) {
Logging.d(TAG, "Config frame generated. Offset: " + info.offset + Logging.d(TAG, "Config frame generated. Offset: " + info.offset + ". Size: " + info.size);
". Size: " + info.size);
configData = ByteBuffer.allocateDirect(info.size); configData = ByteBuffer.allocateDirect(info.size);
outputBuffers[result].position(info.offset); outputBuffers[result].position(info.offset);
outputBuffers[result].limit(info.offset + info.size); outputBuffers[result].limit(info.offset + info.size);
@ -666,27 +641,23 @@ public class MediaCodecVideoEncoder {
reportEncodedFrame(info.size); reportEncodedFrame(info.size);
// Check key frame flag. // Check key frame flag.
boolean isKeyFrame = boolean isKeyFrame = (info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
(info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
if (isKeyFrame) { if (isKeyFrame) {
Logging.d(TAG, "Sync frame generated"); Logging.d(TAG, "Sync frame generated");
} }
if (isKeyFrame && type == VideoCodecType.VIDEO_CODEC_H264) { if (isKeyFrame && type == VideoCodecType.VIDEO_CODEC_H264) {
Logging.d(TAG, "Appending config frame of size " + configData.capacity() + Logging.d(TAG, "Appending config frame of size " + configData.capacity()
" to output buffer with offset " + info.offset + ", size " + + " to output buffer with offset " + info.offset + ", size " + info.size);
info.size);
// For H.264 key frame append SPS and PPS NALs at the start // For H.264 key frame append SPS and PPS NALs at the start
ByteBuffer keyFrameBuffer = ByteBuffer.allocateDirect( ByteBuffer keyFrameBuffer = ByteBuffer.allocateDirect(configData.capacity() + info.size);
configData.capacity() + info.size);
configData.rewind(); configData.rewind();
keyFrameBuffer.put(configData); keyFrameBuffer.put(configData);
keyFrameBuffer.put(outputBuffer); keyFrameBuffer.put(outputBuffer);
keyFrameBuffer.position(0); keyFrameBuffer.position(0);
return new OutputBufferInfo(result, keyFrameBuffer, return new OutputBufferInfo(result, keyFrameBuffer, isKeyFrame, info.presentationTimeUs);
isKeyFrame, info.presentationTimeUs);
} else { } else {
return new OutputBufferInfo(result, outputBuffer.slice(), return new OutputBufferInfo(
isKeyFrame, info.presentationTimeUs); result, outputBuffer.slice(), isKeyFrame, info.presentationTimeUs);
} }
} else if (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { } else if (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
outputBuffers = mediaCodec.getOutputBuffers(); outputBuffers = mediaCodec.getOutputBuffers();
@ -727,8 +698,7 @@ public class MediaCodecVideoEncoder {
// Do bitrate adjustment every 3 seconds if actual encoder bitrate deviates too much // Do bitrate adjustment every 3 seconds if actual encoder bitrate deviates too much
// form the target value. // form the target value.
if (bitrateObservationTimeMs > 1000 * BITRATE_CORRECTION_SEC) { if (bitrateObservationTimeMs > 1000 * BITRATE_CORRECTION_SEC) {
Logging.d(TAG, "Acc: " + (int)bitrateAccumulator Logging.d(TAG, "Acc: " + (int) bitrateAccumulator + ". Max: " + (int) bitrateAccumulatorMax
+ ". Max: " + (int)bitrateAccumulatorMax
+ ". ExpScale: " + bitrateAdjustmentScaleExp); + ". ExpScale: " + bitrateAdjustmentScaleExp);
boolean bitrateAdjustmentScaleChanged = false; boolean bitrateAdjustmentScaleChanged = false;
if (bitrateAccumulator > bitrateAccumulatorMax) { if (bitrateAccumulator > bitrateAccumulatorMax) {
@ -745,8 +715,8 @@ public class MediaCodecVideoEncoder {
if (bitrateAdjustmentScaleChanged) { if (bitrateAdjustmentScaleChanged) {
bitrateAdjustmentScaleExp = Math.min(bitrateAdjustmentScaleExp, BITRATE_CORRECTION_STEPS); bitrateAdjustmentScaleExp = Math.min(bitrateAdjustmentScaleExp, BITRATE_CORRECTION_STEPS);
bitrateAdjustmentScaleExp = Math.max(bitrateAdjustmentScaleExp, -BITRATE_CORRECTION_STEPS); bitrateAdjustmentScaleExp = Math.max(bitrateAdjustmentScaleExp, -BITRATE_CORRECTION_STEPS);
Logging.d(TAG, "Adjusting bitrate scale to " + bitrateAdjustmentScaleExp Logging.d(TAG, "Adjusting bitrate scale to " + bitrateAdjustmentScaleExp + ". Value: "
+ ". Value: " + getBitrateScale(bitrateAdjustmentScaleExp)); + getBitrateScale(bitrateAdjustmentScaleExp));
setRates(targetBitrateBps / 1000, targetFps); setRates(targetBitrateBps / 1000, targetFps);
} }
bitrateObservationTimeMs = 0; bitrateObservationTimeMs = 0;

View File

@ -78,7 +78,7 @@ public class MediaConstraints {
} }
public String toString() { public String toString() {
return "mandatory: " + stringifyKeyValuePairList(mandatory) + return "mandatory: " + stringifyKeyValuePairList(mandatory) + ", optional: "
", optional: " + stringifyKeyValuePairList(optional); + stringifyKeyValuePairList(optional);
} }
} }

View File

@ -8,15 +8,12 @@
* be found in the AUTHORS file in the root of the source tree. * be found in the AUTHORS file in the root of the source tree.
*/ */
package org.webrtc; package org.webrtc;
/** Java wrapper for a C++ MediaSourceInterface. */ /** Java wrapper for a C++ MediaSourceInterface. */
public class MediaSource { public class MediaSource {
/** Tracks MediaSourceInterface.SourceState */ /** Tracks MediaSourceInterface.SourceState */
public enum State { public enum State { INITIALIZING, LIVE, ENDED, MUTED }
INITIALIZING, LIVE, ENDED, MUTED
}
final long nativeSource; // Package-protected for PeerConnectionFactory. final long nativeSource; // Package-protected for PeerConnectionFactory.

View File

@ -89,21 +89,16 @@ public class MediaStream {
} }
public String toString() { public String toString() {
return "[" + label() + ":A=" + audioTracks.size() + return "[" + label() + ":A=" + audioTracks.size() + ":V=" + videoTracks.size() + "]";
":V=" + videoTracks.size() + "]";
} }
private static native boolean nativeAddAudioTrack( private static native boolean nativeAddAudioTrack(long nativeStream, long nativeAudioTrack);
long nativeStream, long nativeAudioTrack);
private static native boolean nativeAddVideoTrack( private static native boolean nativeAddVideoTrack(long nativeStream, long nativeVideoTrack);
long nativeStream, long nativeVideoTrack);
private static native boolean nativeRemoveAudioTrack( private static native boolean nativeRemoveAudioTrack(long nativeStream, long nativeAudioTrack);
long nativeStream, long nativeAudioTrack);
private static native boolean nativeRemoveVideoTrack( private static native boolean nativeRemoveVideoTrack(long nativeStream, long nativeVideoTrack);
long nativeStream, long nativeVideoTrack);
private static native String nativeLabel(long nativeStream); private static native String nativeLabel(long nativeStream);

View File

@ -51,8 +51,7 @@ public class MediaStreamTrack {
private static native boolean nativeEnabled(long nativeTrack); private static native boolean nativeEnabled(long nativeTrack);
private static native boolean nativeSetEnabled( private static native boolean nativeSetEnabled(long nativeTrack, boolean enabled);
long nativeTrack, boolean enabled);
private static native State nativeState(long nativeTrack); private static native State nativeState(long nativeTrack);

View File

@ -140,8 +140,7 @@ public class NetworkMonitor {
return; return;
} }
if (autoDetector == null) { if (autoDetector == null) {
autoDetector = new NetworkMonitorAutoDetect( autoDetector = new NetworkMonitorAutoDetect(new NetworkMonitorAutoDetect.Observer() {
new NetworkMonitorAutoDetect.Observer() {
@Override @Override
public void onConnectionTypeChanged(ConnectionType newConnectionType) { public void onConnectionTypeChanged(ConnectionType newConnectionType) {
@ -157,8 +156,7 @@ public class NetworkMonitor {
public void onNetworkDisconnect(long networkHandle) { public void onNetworkDisconnect(long networkHandle) {
notifyObserversOfNetworkDisconnect(networkHandle); notifyObserversOfNetworkDisconnect(networkHandle);
} }
}, }, applicationContext);
applicationContext);
final NetworkMonitorAutoDetect.NetworkState networkState = final NetworkMonitorAutoDetect.NetworkState networkState =
autoDetector.getCurrentNetworkState(); autoDetector.getCurrentNetworkState();
updateCurrentConnectionType(NetworkMonitorAutoDetect.getConnectionType(networkState)); updateCurrentConnectionType(NetworkMonitorAutoDetect.getConnectionType(networkState));
@ -241,8 +239,8 @@ public class NetworkMonitor {
private native void nativeNotifyConnectionTypeChanged(long nativePtr); private native void nativeNotifyConnectionTypeChanged(long nativePtr);
private native void nativeNotifyOfNetworkConnect(long nativePtr, NetworkInformation networkInfo); private native void nativeNotifyOfNetworkConnect(long nativePtr, NetworkInformation networkInfo);
private native void nativeNotifyOfNetworkDisconnect(long nativePtr, long networkHandle); private native void nativeNotifyOfNetworkDisconnect(long nativePtr, long networkHandle);
private native void nativeNotifyOfActiveNetworkList(long nativePtr, private native void nativeNotifyOfActiveNetworkList(
NetworkInformation[] networkInfos); long nativePtr, NetworkInformation[] networkInfos);
// For testing only. // For testing only.
static void resetInstanceForTests(Context context) { static void resetInstanceForTests(Context context) {

View File

@ -69,8 +69,8 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
public final ConnectionType type; public final ConnectionType type;
public final long handle; public final long handle;
public final IPAddress[] ipAddresses; public final IPAddress[] ipAddresses;
public NetworkInformation(String name, ConnectionType type, long handle, public NetworkInformation(
IPAddress[] addresses) { String name, ConnectionType type, long handle, IPAddress[] addresses) {
this.name = name; this.name = name;
this.type = type; this.type = type;
this.handle = handle; this.handle = handle;
@ -112,7 +112,6 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
*/ */
@SuppressLint("NewApi") @SuppressLint("NewApi")
private class SimpleNetworkCallback extends NetworkCallback { private class SimpleNetworkCallback extends NetworkCallback {
@Override @Override
public void onAvailable(Network network) { public void onAvailable(Network network) {
Logging.d(TAG, "Network becomes available: " + network.toString()); Logging.d(TAG, "Network becomes available: " + network.toString());
@ -120,8 +119,7 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
} }
@Override @Override
public void onCapabilitiesChanged( public void onCapabilitiesChanged(Network network, NetworkCapabilities networkCapabilities) {
Network network, NetworkCapabilities networkCapabilities) {
// A capabilities change may indicate the ConnectionType has changed, // A capabilities change may indicate the ConnectionType has changed,
// so forward the new NetworkInformation along to the observer. // so forward the new NetworkInformation along to the observer.
Logging.d(TAG, "capabilities changed: " + networkCapabilities.toString()); Logging.d(TAG, "capabilities changed: " + networkCapabilities.toString());
@ -140,8 +138,8 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
public void onLosing(Network network, int maxMsToLive) { public void onLosing(Network network, int maxMsToLive) {
// Tell the network is going to lose in MaxMsToLive milliseconds. // Tell the network is going to lose in MaxMsToLive milliseconds.
// We may use this signal later. // We may use this signal later.
Logging.d(TAG, Logging.d(
"Network " + network.toString() + " is about to lose in " + maxMsToLive + "ms"); TAG, "Network " + network.toString() + " is about to lose in " + maxMsToLive + "ms");
} }
@Override @Override
@ -303,15 +301,13 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
if (connectionType == ConnectionType.CONNECTION_UNKNOWN if (connectionType == ConnectionType.CONNECTION_UNKNOWN
|| connectionType == ConnectionType.CONNECTION_UNKNOWN_CELLULAR) { || connectionType == ConnectionType.CONNECTION_UNKNOWN_CELLULAR) {
Logging.d(TAG, "Network " + network.toString() + " connection type is " + connectionType Logging.d(TAG, "Network " + network.toString() + " connection type is " + connectionType
+ " because it has type " + networkState.getNetworkType() + " because it has type " + networkState.getNetworkType() + " and subtype "
+ " and subtype " + networkState.getNetworkSubType()); + networkState.getNetworkSubType());
} }
NetworkInformation networkInformation = new NetworkInformation( NetworkInformation networkInformation =
linkProperties.getInterfaceName(), new NetworkInformation(linkProperties.getInterfaceName(), connectionType,
connectionType, networkToNetId(network), getIPAddresses(linkProperties));
networkToNetId(network),
getIPAddresses(linkProperties));
return networkInformation; return networkInformation;
} }
@ -324,8 +320,7 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
if (connectivityManager == null) { if (connectivityManager == null) {
return false; return false;
} }
final NetworkCapabilities capabilities = final NetworkCapabilities capabilities = connectivityManager.getNetworkCapabilities(network);
connectivityManager.getNetworkCapabilities(network);
return capabilities != null && capabilities.hasCapability(NET_CAPABILITY_INTERNET); return capabilities != null && capabilities.hasCapability(NET_CAPABILITY_INTERNET);
} }
@ -369,7 +364,6 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
} }
} }
/** Queries the WifiManager for SSID of the current Wifi connection. */ /** Queries the WifiManager for SSID of the current Wifi connection. */
static class WifiManagerDelegate { static class WifiManagerDelegate {
private final Context context; private final Context context;
@ -384,8 +378,8 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
} }
String getWifiSSID() { String getWifiSSID() {
final Intent intent = context.registerReceiver(null, final Intent intent = context.registerReceiver(
new IntentFilter(WifiManager.NETWORK_STATE_CHANGED_ACTION)); null, new IntentFilter(WifiManager.NETWORK_STATE_CHANGED_ACTION));
if (intent != null) { if (intent != null) {
final WifiInfo wifiInfo = intent.getParcelableExtra(WifiManager.EXTRA_WIFI_INFO); final WifiInfo wifiInfo = intent.getParcelableExtra(WifiManager.EXTRA_WIFI_INFO);
if (wifiInfo != null) { if (wifiInfo != null) {
@ -397,7 +391,6 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
} }
return ""; return "";
} }
} }
static final long INVALID_NET_ID = -1; static final long INVALID_NET_ID = -1;
@ -507,7 +500,8 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
* Registers a BroadcastReceiver in the given context. * Registers a BroadcastReceiver in the given context.
*/ */
private void registerReceiver() { private void registerReceiver() {
if (isRegistered) return; if (isRegistered)
return;
isRegistered = true; isRegistered = true;
context.registerReceiver(this, intentFilter); context.registerReceiver(this, intentFilter);
@ -517,7 +511,8 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
* Unregisters the BroadcastReceiver in the given context. * Unregisters the BroadcastReceiver in the given context.
*/ */
private void unregisterReceiver() { private void unregisterReceiver() {
if (!isRegistered) return; if (!isRegistered)
return;
isRegistered = false; isRegistered = false;
context.unregisterReceiver(this); context.unregisterReceiver(this);
@ -581,7 +576,8 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
} }
private String getWifiSSID(NetworkState networkState) { private String getWifiSSID(NetworkState networkState) {
if (getConnectionType(networkState) != ConnectionType.CONNECTION_WIFI) return ""; if (getConnectionType(networkState) != ConnectionType.CONNECTION_WIFI)
return "";
return wifiManagerDelegate.getWifiSSID(); return wifiManagerDelegate.getWifiSSID();
} }
@ -597,7 +593,8 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
private void connectionTypeChanged(NetworkState networkState) { private void connectionTypeChanged(NetworkState networkState) {
ConnectionType newConnectionType = getConnectionType(networkState); ConnectionType newConnectionType = getConnectionType(networkState);
String newWifiSSID = getWifiSSID(networkState); String newWifiSSID = getWifiSSID(networkState);
if (newConnectionType == connectionType && newWifiSSID.equals(wifiSSID)) return; if (newConnectionType == connectionType && newWifiSSID.equals(wifiSSID))
return;
connectionType = newConnectionType; connectionType = newConnectionType;
wifiSSID = newWifiSSID; wifiSSID = newWifiSSID;

View File

@ -8,7 +8,6 @@
* be found in the AUTHORS file in the root of the source tree. * be found in the AUTHORS file in the root of the source tree.
*/ */
package org.webrtc; package org.webrtc;
import java.util.Collections; import java.util.Collections;
@ -27,19 +26,28 @@ public class PeerConnection {
} }
/** Tracks PeerConnectionInterface::IceGatheringState */ /** Tracks PeerConnectionInterface::IceGatheringState */
public enum IceGatheringState { NEW, GATHERING, COMPLETE }; public enum IceGatheringState { NEW, GATHERING, COMPLETE }
/** Tracks PeerConnectionInterface::IceConnectionState */ /** Tracks PeerConnectionInterface::IceConnectionState */
public enum IceConnectionState { public enum IceConnectionState {
NEW, CHECKING, CONNECTED, COMPLETED, FAILED, DISCONNECTED, CLOSED NEW,
}; CHECKING,
CONNECTED,
COMPLETED,
FAILED,
DISCONNECTED,
CLOSED
}
/** Tracks PeerConnectionInterface::SignalingState */ /** Tracks PeerConnectionInterface::SignalingState */
public enum SignalingState { public enum SignalingState {
STABLE, HAVE_LOCAL_OFFER, HAVE_LOCAL_PRANSWER, HAVE_REMOTE_OFFER, STABLE,
HAVE_REMOTE_PRANSWER, CLOSED HAVE_LOCAL_OFFER,
}; HAVE_LOCAL_PRANSWER,
HAVE_REMOTE_OFFER,
HAVE_REMOTE_PRANSWER,
CLOSED
}
/** Java version of PeerConnectionObserver. */ /** Java version of PeerConnectionObserver. */
public static interface Observer { public static interface Observer {
@ -97,39 +105,25 @@ public class PeerConnection {
} }
/** Java version of PeerConnectionInterface.IceTransportsType */ /** Java version of PeerConnectionInterface.IceTransportsType */
public enum IceTransportsType { public enum IceTransportsType { NONE, RELAY, NOHOST, ALL }
NONE, RELAY, NOHOST, ALL
};
/** Java version of PeerConnectionInterface.BundlePolicy */ /** Java version of PeerConnectionInterface.BundlePolicy */
public enum BundlePolicy { public enum BundlePolicy { BALANCED, MAXBUNDLE, MAXCOMPAT }
BALANCED, MAXBUNDLE, MAXCOMPAT
};
/** Java version of PeerConnectionInterface.RtcpMuxPolicy */ /** Java version of PeerConnectionInterface.RtcpMuxPolicy */
public enum RtcpMuxPolicy { public enum RtcpMuxPolicy { NEGOTIATE, REQUIRE }
NEGOTIATE, REQUIRE
};
/** Java version of PeerConnectionInterface.TcpCandidatePolicy */ /** Java version of PeerConnectionInterface.TcpCandidatePolicy */
public enum TcpCandidatePolicy { public enum TcpCandidatePolicy { ENABLED, DISABLED }
ENABLED, DISABLED
};
/** Java version of PeerConnectionInterface.CandidateNetworkPolicy */ /** Java version of PeerConnectionInterface.CandidateNetworkPolicy */
public enum CandidateNetworkPolicy { public enum CandidateNetworkPolicy { ALL, LOW_COST }
ALL, LOW_COST
};
/** Java version of rtc::KeyType */ /** Java version of rtc::KeyType */
public enum KeyType { public enum KeyType { RSA, ECDSA }
RSA, ECDSA
}
/** Java version of PeerConnectionInterface.ContinualGatheringPolicy */ /** Java version of PeerConnectionInterface.ContinualGatheringPolicy */
public enum ContinualGatheringPolicy { public enum ContinualGatheringPolicy { GATHER_ONCE, GATHER_CONTINUALLY }
GATHER_ONCE, GATHER_CONTINUALLY
}
/** Java version of PeerConnectionInterface.RTCConfiguration */ /** Java version of PeerConnectionInterface.RTCConfiguration */
public static class RTCConfiguration { public static class RTCConfiguration {
@ -187,26 +181,20 @@ public class PeerConnection {
public native SessionDescription getRemoteDescription(); public native SessionDescription getRemoteDescription();
public native DataChannel createDataChannel( public native DataChannel createDataChannel(String label, DataChannel.Init init);
String label, DataChannel.Init init);
public native void createOffer( public native void createOffer(SdpObserver observer, MediaConstraints constraints);
SdpObserver observer, MediaConstraints constraints);
public native void createAnswer( public native void createAnswer(SdpObserver observer, MediaConstraints constraints);
SdpObserver observer, MediaConstraints constraints);
public native void setLocalDescription( public native void setLocalDescription(SdpObserver observer, SessionDescription sdp);
SdpObserver observer, SessionDescription sdp);
public native void setRemoteDescription( public native void setRemoteDescription(SdpObserver observer, SessionDescription sdp);
SdpObserver observer, SessionDescription sdp);
public native boolean setConfiguration(RTCConfiguration config); public native boolean setConfiguration(RTCConfiguration config);
public boolean addIceCandidate(IceCandidate candidate) { public boolean addIceCandidate(IceCandidate candidate) {
return nativeAddIceCandidate( return nativeAddIceCandidate(candidate.sdpMid, candidate.sdpMLineIndex, candidate.sdp);
candidate.sdpMid, candidate.sdpMLineIndex, candidate.sdp);
} }
public boolean removeIceCandidates(final IceCandidate[] candidates) { public boolean removeIceCandidates(final IceCandidate[] candidates) {
@ -314,8 +302,7 @@ public class PeerConnection {
private native void nativeRemoveLocalStream(long nativeStream); private native void nativeRemoveLocalStream(long nativeStream);
private native boolean nativeGetStats( private native boolean nativeGetStats(StatsObserver observer, long nativeTrack);
StatsObserver observer, long nativeTrack);
private native RtpSender nativeCreateSender(String kind, String stream_id); private native RtpSender nativeCreateSender(String kind, String stream_id);
@ -323,9 +310,7 @@ public class PeerConnection {
private native List<RtpReceiver> nativeGetReceivers(); private native List<RtpReceiver> nativeGetReceivers();
private native boolean nativeStartRtcEventLog( private native boolean nativeStartRtcEventLog(int file_descriptor, int max_size_bytes);
int file_descriptor, int max_size_bytes);
private native void nativeStopRtcEventLog(); private native void nativeStopRtcEventLog();
} }

View File

@ -8,7 +8,6 @@
* be found in the AUTHORS file in the root of the source tree. * be found in the AUTHORS file in the root of the source tree.
*/ */
package org.webrtc; package org.webrtc;
import java.util.List; import java.util.List;
@ -52,9 +51,8 @@ public class PeerConnectionFactory {
// |renderEGLContext| can be provided to suport HW video decoding to // |renderEGLContext| can be provided to suport HW video decoding to
// texture and will be used to create a shared EGL context on video // texture and will be used to create a shared EGL context on video
// decoding thread. // decoding thread.
public static native boolean initializeAndroidGlobals( public static native boolean initializeAndroidGlobals(Object context, boolean initializeAudio,
Object context, boolean initializeAudio, boolean initializeVideo, boolean initializeVideo, boolean videoHwAcceleration);
boolean videoHwAcceleration);
// Field trial initialization. Must be called before PeerConnectionFactory // Field trial initialization. Must be called before PeerConnectionFactory
// is created. // is created.
@ -81,51 +79,44 @@ public class PeerConnectionFactory {
} }
} }
public PeerConnection createPeerConnection( public PeerConnection createPeerConnection(PeerConnection.RTCConfiguration rtcConfig,
PeerConnection.RTCConfiguration rtcConfig, MediaConstraints constraints, PeerConnection.Observer observer) {
MediaConstraints constraints,
PeerConnection.Observer observer) {
long nativeObserver = nativeCreateObserver(observer); long nativeObserver = nativeCreateObserver(observer);
if (nativeObserver == 0) { if (nativeObserver == 0) {
return null; return null;
} }
long nativePeerConnection = nativeCreatePeerConnection( long nativePeerConnection =
nativeFactory, rtcConfig, constraints, nativeObserver); nativeCreatePeerConnection(nativeFactory, rtcConfig, constraints, nativeObserver);
if (nativePeerConnection == 0) { if (nativePeerConnection == 0) {
return null; return null;
} }
return new PeerConnection(nativePeerConnection, nativeObserver); return new PeerConnection(nativePeerConnection, nativeObserver);
} }
public PeerConnection createPeerConnection( public PeerConnection createPeerConnection(List<PeerConnection.IceServer> iceServers,
List<PeerConnection.IceServer> iceServers, MediaConstraints constraints, PeerConnection.Observer observer) {
MediaConstraints constraints, PeerConnection.RTCConfiguration rtcConfig = new PeerConnection.RTCConfiguration(iceServers);
PeerConnection.Observer observer) {
PeerConnection.RTCConfiguration rtcConfig =
new PeerConnection.RTCConfiguration(iceServers);
return createPeerConnection(rtcConfig, constraints, observer); return createPeerConnection(rtcConfig, constraints, observer);
} }
public MediaStream createLocalMediaStream(String label) { public MediaStream createLocalMediaStream(String label) {
return new MediaStream( return new MediaStream(nativeCreateLocalMediaStream(nativeFactory, label));
nativeCreateLocalMediaStream(nativeFactory, label));
} }
public VideoSource createVideoSource(VideoCapturer capturer) { public VideoSource createVideoSource(VideoCapturer capturer) {
final EglBase.Context eglContext = final EglBase.Context eglContext =
localEglbase == null ? null : localEglbase.getEglBaseContext(); localEglbase == null ? null : localEglbase.getEglBaseContext();
long nativeAndroidVideoTrackSource = nativeCreateVideoSource( long nativeAndroidVideoTrackSource =
nativeFactory, eglContext, capturer.isScreencast()); nativeCreateVideoSource(nativeFactory, eglContext, capturer.isScreencast());
VideoCapturer.CapturerObserver capturerObserver VideoCapturer.CapturerObserver capturerObserver =
= new VideoCapturer.AndroidVideoTrackSourceObserver(nativeAndroidVideoTrackSource); new VideoCapturer.AndroidVideoTrackSourceObserver(nativeAndroidVideoTrackSource);
nativeInitializeVideoCapturer(nativeFactory, capturer, nativeAndroidVideoTrackSource, nativeInitializeVideoCapturer(
capturerObserver); nativeFactory, capturer, nativeAndroidVideoTrackSource, capturerObserver);
return new VideoSource(nativeAndroidVideoTrackSource); return new VideoSource(nativeAndroidVideoTrackSource);
} }
public VideoTrack createVideoTrack(String id, VideoSource source) { public VideoTrack createVideoTrack(String id, VideoSource source) {
return new VideoTrack(nativeCreateVideoTrack( return new VideoTrack(nativeCreateVideoTrack(nativeFactory, id, source.nativeSource));
nativeFactory, id, source.nativeSource));
} }
public AudioSource createAudioSource(MediaConstraints constraints) { public AudioSource createAudioSource(MediaConstraints constraints) {
@ -133,8 +124,7 @@ public class PeerConnectionFactory {
} }
public AudioTrack createAudioTrack(String id, AudioSource source) { public AudioTrack createAudioTrack(String id, AudioSource source) {
return new AudioTrack(nativeCreateAudioTrack( return new AudioTrack(nativeCreateAudioTrack(nativeFactory, id, source.nativeSource));
nativeFactory, id, source.nativeSource));
} }
// Starts recording an AEC dump. Ownership of the file is transfered to the // Starts recording an AEC dump. Ownership of the file is transfered to the
@ -161,8 +151,8 @@ public class PeerConnectionFactory {
* renderer. * renderer.
* @param remoteEglContext Must be the same as used by any remote video renderer. * @param remoteEglContext Must be the same as used by any remote video renderer.
*/ */
public void setVideoHwAccelerationOptions(EglBase.Context localEglContext, public void setVideoHwAccelerationOptions(
EglBase.Context remoteEglContext) { EglBase.Context localEglContext, EglBase.Context remoteEglContext) {
if (localEglbase != null) { if (localEglbase != null) {
Logging.w(TAG, "Egl context already set."); Logging.w(TAG, "Egl context already set.");
localEglbase.release(); localEglbase.release();
@ -173,8 +163,8 @@ public class PeerConnectionFactory {
} }
localEglbase = EglBase.create(localEglContext); localEglbase = EglBase.create(localEglContext);
remoteEglbase = EglBase.create(remoteEglContext); remoteEglbase = EglBase.create(remoteEglContext);
nativeSetVideoHwAccelerationOptions(nativeFactory, localEglbase.getEglBaseContext(), nativeSetVideoHwAccelerationOptions(
remoteEglbase.getEglBaseContext()); nativeFactory, localEglbase.getEglBaseContext(), remoteEglbase.getEglBaseContext());
} }
public void dispose() { public void dispose() {
@ -227,21 +217,18 @@ public class PeerConnectionFactory {
private static native long nativeCreatePeerConnectionFactory(Options options); private static native long nativeCreatePeerConnectionFactory(Options options);
private static native long nativeCreateObserver( private static native long nativeCreateObserver(PeerConnection.Observer observer);
PeerConnection.Observer observer);
private static native long nativeCreatePeerConnection( private static native long nativeCreatePeerConnection(long nativeFactory,
long nativeFactory, PeerConnection.RTCConfiguration rtcConfig, PeerConnection.RTCConfiguration rtcConfig, MediaConstraints constraints, long nativeObserver);
MediaConstraints constraints, long nativeObserver);
private static native long nativeCreateLocalMediaStream( private static native long nativeCreateLocalMediaStream(long nativeFactory, String label);
long nativeFactory, String label);
private static native long nativeCreateVideoSource( private static native long nativeCreateVideoSource(
long nativeFactory, EglBase.Context eglContext, boolean is_screencast); long nativeFactory, EglBase.Context eglContext, boolean is_screencast);
private static native void nativeInitializeVideoCapturer( private static native void nativeInitializeVideoCapturer(long native_factory,
long native_factory, VideoCapturer j_video_capturer, long native_source, VideoCapturer j_video_capturer, long native_source,
VideoCapturer.CapturerObserver j_frame_observer); VideoCapturer.CapturerObserver j_frame_observer);
private static native long nativeCreateVideoTrack( private static native long nativeCreateVideoTrack(
@ -258,8 +245,7 @@ public class PeerConnectionFactory {
private static native void nativeStopAecDump(long nativeFactory); private static native void nativeStopAecDump(long nativeFactory);
@Deprecated @Deprecated public native void nativeSetOptions(long nativeFactory, Options options);
public native void nativeSetOptions(long nativeFactory, Options options);
private static native void nativeSetVideoHwAccelerationOptions( private static native void nativeSetVideoHwAccelerationOptions(
long nativeFactory, Object localEGLContext, Object remoteEGLContext); long nativeFactory, Object localEGLContext, Object remoteEGLContext);

View File

@ -42,8 +42,8 @@ public class RendererCommon {
*/ */
void drawOes(int oesTextureId, float[] texMatrix, int frameWidth, int frameHeight, void drawOes(int oesTextureId, float[] texMatrix, int frameWidth, int frameHeight,
int viewportX, int viewportY, int viewportWidth, int viewportHeight); int viewportX, int viewportY, int viewportWidth, int viewportHeight);
void drawRgb(int textureId, float[] texMatrix, int frameWidth, int frameHeight, void drawRgb(int textureId, float[] texMatrix, int frameWidth, int frameHeight, int viewportX,
int viewportX, int viewportY, int viewportWidth, int viewportHeight); int viewportY, int viewportWidth, int viewportHeight);
void drawYuv(int[] yuvTextures, float[] texMatrix, int frameWidth, int frameHeight, void drawYuv(int[] yuvTextures, float[] texMatrix, int frameWidth, int frameHeight,
int viewportX, int viewportY, int viewportWidth, int viewportHeight); int viewportX, int viewportY, int viewportWidth, int viewportHeight);
@ -116,6 +116,7 @@ public class RendererCommon {
// The minimum fraction of the frame content that will be shown for |SCALE_ASPECT_BALANCED|. // The minimum fraction of the frame content that will be shown for |SCALE_ASPECT_BALANCED|.
// This limits excessive cropping when adjusting display size. // This limits excessive cropping when adjusting display size.
private static float BALANCED_VISIBLE_FRACTION = 0.5625f; private static float BALANCED_VISIBLE_FRACTION = 0.5625f;
// clang-format off
public static final float[] identityMatrix() { public static final float[] identityMatrix() {
return new float[] { return new float[] {
1, 0, 0, 0, 1, 0, 0, 0,
@ -140,6 +141,7 @@ public class RendererCommon {
0, 0, 1, 0, 0, 0, 1, 0,
1, 0, 0, 1}; 1, 0, 0, 1};
} }
// clang-format on
/** /**
* Returns texture matrix that will have the effect of rotating the frame |rotationDegree| * Returns texture matrix that will have the effect of rotating the frame |rotationDegree|
@ -189,8 +191,8 @@ public class RendererCommon {
/** /**
* Calculate display size based on scaling type, video aspect ratio, and maximum display size. * Calculate display size based on scaling type, video aspect ratio, and maximum display size.
*/ */
public static Point getDisplaySize(ScalingType scalingType, float videoAspectRatio, public static Point getDisplaySize(
int maxDisplayWidth, int maxDisplayHeight) { ScalingType scalingType, float videoAspectRatio, int maxDisplayWidth, int maxDisplayHeight) {
return getDisplaySize(convertScalingTypeToVisibleFraction(scalingType), videoAspectRatio, return getDisplaySize(convertScalingTypeToVisibleFraction(scalingType), videoAspectRatio,
maxDisplayWidth, maxDisplayHeight); maxDisplayWidth, maxDisplayHeight);
} }
@ -230,17 +232,17 @@ public class RendererCommon {
* Calculate display size based on minimum fraction of the video that must remain visible, * Calculate display size based on minimum fraction of the video that must remain visible,
* video aspect ratio, and maximum display size. * video aspect ratio, and maximum display size.
*/ */
private static Point getDisplaySize(float minVisibleFraction, float videoAspectRatio, private static Point getDisplaySize(
int maxDisplayWidth, int maxDisplayHeight) { float minVisibleFraction, float videoAspectRatio, int maxDisplayWidth, int maxDisplayHeight) {
// If there is no constraint on the amount of cropping, fill the allowed display area. // If there is no constraint on the amount of cropping, fill the allowed display area.
if (minVisibleFraction == 0 || videoAspectRatio == 0) { if (minVisibleFraction == 0 || videoAspectRatio == 0) {
return new Point(maxDisplayWidth, maxDisplayHeight); return new Point(maxDisplayWidth, maxDisplayHeight);
} }
// Each dimension is constrained on max display size and how much we are allowed to crop. // Each dimension is constrained on max display size and how much we are allowed to crop.
final int width = Math.min(maxDisplayWidth, final int width = Math.min(
Math.round(maxDisplayHeight / minVisibleFraction * videoAspectRatio)); maxDisplayWidth, Math.round(maxDisplayHeight / minVisibleFraction * videoAspectRatio));
final int height = Math.min(maxDisplayHeight, final int height = Math.min(
Math.round(maxDisplayWidth / minVisibleFraction / videoAspectRatio)); maxDisplayHeight, Math.round(maxDisplayWidth / minVisibleFraction / videoAspectRatio));
return new Point(width, height); return new Point(width, height);
} }
} }

View File

@ -48,8 +48,8 @@ public class RtpReceiver {
// Will be released in dispose(). // Will be released in dispose().
private static native long nativeGetTrack(long nativeRtpReceiver); private static native long nativeGetTrack(long nativeRtpReceiver);
private static native boolean nativeSetParameters(long nativeRtpReceiver, private static native boolean nativeSetParameters(
RtpParameters parameters); long nativeRtpReceiver, RtpParameters parameters);
private static native RtpParameters nativeGetParameters(long nativeRtpReceiver); private static native RtpParameters nativeGetParameters(long nativeRtpReceiver);

View File

@ -30,8 +30,7 @@ public class RtpSender {
// not appropriate when the track is owned by, for example, another RtpSender // not appropriate when the track is owned by, for example, another RtpSender
// or a MediaStream. // or a MediaStream.
public boolean setTrack(MediaStreamTrack track, boolean takeOwnership) { public boolean setTrack(MediaStreamTrack track, boolean takeOwnership) {
if (!nativeSetTrack(nativeRtpSender, if (!nativeSetTrack(nativeRtpSender, (track == null) ? 0 : track.nativeTrack)) {
(track == null) ? 0 : track.nativeTrack)) {
return false; return false;
} }
if (cachedTrack != null && ownsTrack) { if (cachedTrack != null && ownsTrack) {
@ -65,20 +64,17 @@ public class RtpSender {
free(nativeRtpSender); free(nativeRtpSender);
} }
private static native boolean nativeSetTrack(long nativeRtpSender, private static native boolean nativeSetTrack(long nativeRtpSender, long nativeTrack);
long nativeTrack);
// This should increment the reference count of the track. // This should increment the reference count of the track.
// Will be released in dispose() or setTrack(). // Will be released in dispose() or setTrack().
private static native long nativeGetTrack(long nativeRtpSender); private static native long nativeGetTrack(long nativeRtpSender);
private static native boolean nativeSetParameters(long nativeRtpSender, private static native boolean nativeSetParameters(long nativeRtpSender, RtpParameters parameters);
RtpParameters parameters);
private static native RtpParameters nativeGetParameters(long nativeRtpSender); private static native RtpParameters nativeGetParameters(long nativeRtpSender);
private static native String nativeId(long nativeRtpSender); private static native String nativeId(long nativeRtpSender);
private static native void free(long nativeRtpSender); private static native void free(long nativeRtpSender);
} };
;

View File

@ -35,11 +35,10 @@ import java.util.List;
* frames. At any time, at most one frame is being processed. * frames. At any time, at most one frame is being processed.
*/ */
@TargetApi(21) @TargetApi(21)
public class ScreenCapturerAndroid implements public class ScreenCapturerAndroid
VideoCapturer, SurfaceTextureHelper.OnTextureFrameAvailableListener { implements VideoCapturer, SurfaceTextureHelper.OnTextureFrameAvailableListener {
private static final int DISPLAY_FLAGS =
private static final int DISPLAY_FLAGS = DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC | DisplayManager.VIRTUAL_DISPLAY_FLAG_PRESENTATION;
| DisplayManager.VIRTUAL_DISPLAY_FLAG_PRESENTATION;
// DPI for VirtualDisplay, does not seem to matter for us. // DPI for VirtualDisplay, does not seem to matter for us.
private static final int VIRTUAL_DISPLAY_DPI = 400; private static final int VIRTUAL_DISPLAY_DPI = 400;
@ -65,8 +64,7 @@ public class ScreenCapturerAndroid implements
* @param mediaProjectionCallback MediaProjection callback to implement application specific * @param mediaProjectionCallback MediaProjection callback to implement application specific
* logic in events such as when the user revokes a previously granted capture permission. * logic in events such as when the user revokes a previously granted capture permission.
**/ **/
public ScreenCapturerAndroid( public ScreenCapturerAndroid(Intent mediaProjectionPermissionResultData,
Intent mediaProjectionPermissionResultData,
MediaProjection.Callback mediaProjectionCallback) { MediaProjection.Callback mediaProjectionCallback) {
this.mediaProjectionPermissionResultData = mediaProjectionPermissionResultData; this.mediaProjectionPermissionResultData = mediaProjectionPermissionResultData;
this.mediaProjectionCallback = mediaProjectionCallback; this.mediaProjectionCallback = mediaProjectionCallback;
@ -79,10 +77,8 @@ public class ScreenCapturerAndroid implements
} }
@Override @Override
public synchronized void initialize( public synchronized void initialize(final SurfaceTextureHelper surfaceTextureHelper,
final SurfaceTextureHelper surfaceTextureHelper, final Context applicationContext, final VideoCapturer.CapturerObserver capturerObserver) {
final Context applicationContext,
final VideoCapturer.CapturerObserver capturerObserver) {
checkNotDisposed(); checkNotDisposed();
if (capturerObserver == null) { if (capturerObserver == null) {
@ -95,13 +91,13 @@ public class ScreenCapturerAndroid implements
} }
this.surfaceTextureHelper = surfaceTextureHelper; this.surfaceTextureHelper = surfaceTextureHelper;
mediaProjectionManager = (MediaProjectionManager) mediaProjectionManager = (MediaProjectionManager) applicationContext.getSystemService(
applicationContext.getSystemService(Context.MEDIA_PROJECTION_SERVICE); Context.MEDIA_PROJECTION_SERVICE);
} }
@Override @Override
public synchronized void startCapture(final int width, final int height, public synchronized void startCapture(
final int ignoredFramerate) { final int width, final int height, final int ignoredFramerate) {
checkNotDisposed(); checkNotDisposed();
this.width = width; this.width = width;
@ -143,7 +139,6 @@ public class ScreenCapturerAndroid implements
}); });
} }
@Override @Override
public synchronized void dispose() { public synchronized void dispose() {
isDisposed = true; isDisposed = true;
@ -184,9 +179,8 @@ public class ScreenCapturerAndroid implements
private void createVirtualDisplay() { private void createVirtualDisplay() {
surfaceTextureHelper.getSurfaceTexture().setDefaultBufferSize(width, height); surfaceTextureHelper.getSurfaceTexture().setDefaultBufferSize(width, height);
virtualDisplay = mediaProjection.createVirtualDisplay( virtualDisplay = mediaProjection.createVirtualDisplay("WebRTC_ScreenCapture", width, height,
"WebRTC_ScreenCapture", width, height, VIRTUAL_DISPLAY_DPI, VIRTUAL_DISPLAY_DPI, DISPLAY_FLAGS, new Surface(surfaceTextureHelper.getSurfaceTexture()),
DISPLAY_FLAGS, new Surface(surfaceTextureHelper.getSurfaceTexture()),
null /* callback */, null /* callback handler */); null /* callback */, null /* callback handler */);
} }
@ -194,8 +188,8 @@ public class ScreenCapturerAndroid implements
@Override @Override
public void onTextureFrameAvailable(int oesTextureId, float[] transformMatrix, long timestampNs) { public void onTextureFrameAvailable(int oesTextureId, float[] transformMatrix, long timestampNs) {
numCapturedFrames++; numCapturedFrames++;
capturerObserver.onTextureFrameCaptured(width, height, oesTextureId, transformMatrix, capturerObserver.onTextureFrameCaptured(
0 /* rotation */, timestampNs); width, height, oesTextureId, transformMatrix, 0 /* rotation */, timestampNs);
} }
@Override @Override
@ -207,4 +201,3 @@ public class ScreenCapturerAndroid implements
return numCapturedFrames; return numCapturedFrames;
} }
} }

View File

@ -8,7 +8,6 @@
* be found in the AUTHORS file in the root of the source tree. * be found in the AUTHORS file in the root of the source tree.
*/ */
package org.webrtc; package org.webrtc;
/** /**
@ -19,7 +18,9 @@ package org.webrtc;
public class SessionDescription { public class SessionDescription {
/** Java-land enum version of SessionDescriptionInterface's type() string. */ /** Java-land enum version of SessionDescriptionInterface's type() string. */
public static enum Type { public static enum Type {
OFFER, PRANSWER, ANSWER; OFFER,
PRANSWER,
ANSWER;
public String canonicalForm() { public String canonicalForm() {
return name().toLowerCase(); return name().toLowerCase();

View File

@ -12,7 +12,6 @@ package org.webrtc;
/** Java version of webrtc::StatsReport. */ /** Java version of webrtc::StatsReport. */
public class StatsReport { public class StatsReport {
/** Java version of webrtc::StatsReport::Value. */ /** Java version of webrtc::StatsReport::Value. */
public static class Value { public static class Value {
public final String name; public final String name;
@ -45,8 +44,13 @@ public class StatsReport {
public String toString() { public String toString() {
StringBuilder builder = new StringBuilder(); StringBuilder builder = new StringBuilder();
builder.append("id: ").append(id).append(", type: ").append(type) builder.append("id: ")
.append(", timestamp: ").append(timestamp).append(", values: "); .append(id)
.append(", type: ")
.append(type)
.append(", timestamp: ")
.append(timestamp)
.append(", values: ");
for (int i = 0; i < values.length; ++i) { for (int i = 0; i < values.length; ++i) {
builder.append(values[i].toString()).append(", "); builder.append(values[i].toString()).append(", ");
} }

View File

@ -83,8 +83,7 @@ class SurfaceTextureHelper {
// Vertex coordinates in Normalized Device Coordinates, i.e. // Vertex coordinates in Normalized Device Coordinates, i.e.
// (-1, -1) is bottom-left and (1, 1) is top-right. // (-1, -1) is bottom-left and (1, 1) is top-right.
private static final FloatBuffer DEVICE_RECTANGLE = private static final FloatBuffer DEVICE_RECTANGLE = GlUtil.createFloatBuffer(new float[] {
GlUtil.createFloatBuffer(new float[] {
-1.0f, -1.0f, // Bottom left. -1.0f, -1.0f, // Bottom left.
1.0f, -1.0f, // Bottom right. 1.0f, -1.0f, // Bottom right.
-1.0f, 1.0f, // Top left. -1.0f, 1.0f, // Top left.
@ -92,14 +91,14 @@ class SurfaceTextureHelper {
}); });
// Texture coordinates - (0, 0) is bottom-left and (1, 1) is top-right. // Texture coordinates - (0, 0) is bottom-left and (1, 1) is top-right.
private static final FloatBuffer TEXTURE_RECTANGLE = private static final FloatBuffer TEXTURE_RECTANGLE = GlUtil.createFloatBuffer(new float[] {
GlUtil.createFloatBuffer(new float[] {
0.0f, 0.0f, // Bottom left. 0.0f, 0.0f, // Bottom left.
1.0f, 0.0f, // Bottom right. 1.0f, 0.0f, // Bottom right.
0.0f, 1.0f, // Top left. 0.0f, 1.0f, // Top left.
1.0f, 1.0f // Top right. 1.0f, 1.0f // Top right.
}); });
// clang-format off
private static final String VERTEX_SHADER = private static final String VERTEX_SHADER =
"varying vec2 interp_tc;\n" "varying vec2 interp_tc;\n"
+ "attribute vec4 in_pos;\n" + "attribute vec4 in_pos;\n"
@ -140,10 +139,11 @@ class SurfaceTextureHelper {
+ " gl_FragColor.a = coeffs.a + dot(coeffs.rgb,\n" + " gl_FragColor.a = coeffs.a + dot(coeffs.rgb,\n"
+ " texture2D(oesTex, interp_tc + 1.5 * xUnit).rgb);\n" + " texture2D(oesTex, interp_tc + 1.5 * xUnit).rgb);\n"
+ "}\n"; + "}\n";
// clang-format on
private int texMatrixLoc; private int texMatrixLoc;
private int xUnitLoc; private int xUnitLoc;
private int coeffsLoc;; private int coeffsLoc;
YuvConverter(EglBase.Context sharedContext) { YuvConverter(EglBase.Context sharedContext) {
eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PIXEL_RGBA_BUFFER); eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PIXEL_RGBA_BUFFER);
@ -165,11 +165,10 @@ class SurfaceTextureHelper {
eglBase.detachCurrent(); eglBase.detachCurrent();
} }
synchronized void convert(ByteBuffer buf, synchronized void convert(
int width, int height, int stride, int textureId, float [] transformMatrix) { ByteBuffer buf, int width, int height, int stride, int textureId, float[] transformMatrix) {
if (released) { if (released) {
throw new IllegalStateException( throw new IllegalStateException("YuvConverter.convert called on released object");
"YuvConverter.convert called on released object");
} }
// We draw into a buffer laid out like // We draw into a buffer laid out like
@ -202,12 +201,10 @@ class SurfaceTextureHelper {
// has to be a multiple of 8 pixels. // has to be a multiple of 8 pixels.
if (stride % 8 != 0) { if (stride % 8 != 0) {
throw new IllegalArgumentException( throw new IllegalArgumentException("Invalid stride, must be a multiple of 8");
"Invalid stride, must be a multiple of 8");
} }
if (stride < width) { if (stride < width) {
throw new IllegalArgumentException( throw new IllegalArgumentException("Invalid stride, must >= width");
"Invalid stride, must >= width");
} }
int y_width = (width + 3) / 4; int y_width = (width + 3) / 4;
@ -222,13 +219,11 @@ class SurfaceTextureHelper {
// Produce a frame buffer starting at top-left corner, not // Produce a frame buffer starting at top-left corner, not
// bottom-left. // bottom-left.
transformMatrix = transformMatrix =
RendererCommon.multiplyMatrices(transformMatrix, RendererCommon.multiplyMatrices(transformMatrix, RendererCommon.verticalFlipMatrix());
RendererCommon.verticalFlipMatrix());
// Create new pBuffferSurface with the correct size if needed. // Create new pBuffferSurface with the correct size if needed.
if (eglBase.hasSurface()) { if (eglBase.hasSurface()) {
if (eglBase.surfaceWidth() != stride/4 || if (eglBase.surfaceWidth() != stride / 4 || eglBase.surfaceHeight() != total_height) {
eglBase.surfaceHeight() != total_height){
eglBase.releaseSurface(); eglBase.releaseSurface();
eglBase.createPbufferSurface(stride / 4, total_height); eglBase.createPbufferSurface(stride / 4, total_height);
} }
@ -245,9 +240,7 @@ class SurfaceTextureHelper {
// Draw Y // Draw Y
GLES20.glViewport(0, 0, y_width, height); GLES20.glViewport(0, 0, y_width, height);
// Matrix * (1;0;0;0) / width. Note that opengl uses column major order. // Matrix * (1;0;0;0) / width. Note that opengl uses column major order.
GLES20.glUniform2f(xUnitLoc, GLES20.glUniform2f(xUnitLoc, transformMatrix[0] / width, transformMatrix[1] / width);
transformMatrix[0] / width,
transformMatrix[1] / width);
// Y'UV444 to RGB888, see // Y'UV444 to RGB888, see
// https://en.wikipedia.org/wiki/YUV#Y.27UV444_to_RGB888_conversion. // https://en.wikipedia.org/wiki/YUV#Y.27UV444_to_RGB888_conversion.
// We use the ITU-R coefficients for U and V */ // We use the ITU-R coefficients for U and V */
@ -257,9 +250,8 @@ class SurfaceTextureHelper {
// Draw U // Draw U
GLES20.glViewport(0, height, uv_width, uv_height); GLES20.glViewport(0, height, uv_width, uv_height);
// Matrix * (1;0;0;0) / (width / 2). Note that opengl uses column major order. // Matrix * (1;0;0;0) / (width / 2). Note that opengl uses column major order.
GLES20.glUniform2f(xUnitLoc, GLES20.glUniform2f(
2.0f * transformMatrix[0] / width, xUnitLoc, 2.0f * transformMatrix[0] / width, 2.0f * transformMatrix[1] / width);
2.0f * transformMatrix[1] / width);
GLES20.glUniform4f(coeffsLoc, -0.169f, -0.331f, 0.499f, 0.5f); GLES20.glUniform4f(coeffsLoc, -0.169f, -0.331f, 0.499f, 0.5f);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
@ -268,8 +260,8 @@ class SurfaceTextureHelper {
GLES20.glUniform4f(coeffsLoc, 0.499f, -0.418f, -0.0813f, 0.5f); GLES20.glUniform4f(coeffsLoc, 0.499f, -0.418f, -0.0813f, 0.5f);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
GLES20.glReadPixels(0, 0, stride/4, total_height, GLES20.GL_RGBA, GLES20.glReadPixels(
GLES20.GL_UNSIGNED_BYTE, buf); 0, 0, stride / 4, total_height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buf);
GlUtil.checkNoGLES2Error("YuvConverter.convert"); GlUtil.checkNoGLES2Error("YuvConverter.convert");
@ -409,7 +401,8 @@ class SurfaceTextureHelper {
*/ */
public void returnTextureFrame() { public void returnTextureFrame() {
handler.post(new Runnable() { handler.post(new Runnable() {
@Override public void run() { @Override
public void run() {
isTextureInUse = false; isTextureInUse = false;
if (isQuitting) { if (isQuitting) {
release(); release();
@ -442,8 +435,8 @@ class SurfaceTextureHelper {
}); });
} }
public void textureToYUV(ByteBuffer buf, public void textureToYUV(
int width, int height, int stride, int textureId, float [] transformMatrix) { ByteBuffer buf, int width, int height, int stride, int textureId, float[] transformMatrix) {
if (textureId != oesTextureId) if (textureId != oesTextureId)
throw new IllegalStateException("textureToByteBuffer called with unexpected textureId"); throw new IllegalStateException("textureToByteBuffer called with unexpected textureId");

View File

@ -33,8 +33,8 @@ import javax.microedition.khronos.egl.EGLContext;
* Interaction from the Activity lifecycle in surfaceCreated, surfaceChanged, and surfaceDestroyed. * Interaction from the Activity lifecycle in surfaceCreated, surfaceChanged, and surfaceDestroyed.
* Interaction with the layout framework in onMeasure and onSizeChanged. * Interaction with the layout framework in onMeasure and onSizeChanged.
*/ */
public class SurfaceViewRenderer extends SurfaceView public class SurfaceViewRenderer
implements SurfaceHolder.Callback, VideoRenderer.Callbacks { extends SurfaceView implements SurfaceHolder.Callback, VideoRenderer.Callbacks {
private static final String TAG = "SurfaceViewRenderer"; private static final String TAG = "SurfaceViewRenderer";
// Dedicated render thread. // Dedicated render thread.
@ -103,13 +103,15 @@ public class SurfaceViewRenderer extends SurfaceView
// Runnable for posting frames to render thread. // Runnable for posting frames to render thread.
private final Runnable renderFrameRunnable = new Runnable() { private final Runnable renderFrameRunnable = new Runnable() {
@Override public void run() { @Override
public void run() {
renderFrameOnRenderThread(); renderFrameOnRenderThread();
} }
}; };
// Runnable for clearing Surface to black. // Runnable for clearing Surface to black.
private final Runnable makeBlackRunnable = new Runnable() { private final Runnable makeBlackRunnable = new Runnable() {
@Override public void run() { @Override
public void run() {
makeBlack(); makeBlack();
} }
}; };
@ -134,8 +136,7 @@ public class SurfaceViewRenderer extends SurfaceView
* Initialize this class, sharing resources with |sharedContext|. It is allowed to call init() to * Initialize this class, sharing resources with |sharedContext|. It is allowed to call init() to
* reinitialize the renderer after a previous init()/release() cycle. * reinitialize the renderer after a previous init()/release() cycle.
*/ */
public void init( public void init(EglBase.Context sharedContext, RendererCommon.RendererEvents rendererEvents) {
EglBase.Context sharedContext, RendererCommon.RendererEvents rendererEvents) {
init(sharedContext, rendererEvents, EglBase.CONFIG_PLAIN, new GlRectDrawer()); init(sharedContext, rendererEvents, EglBase.CONFIG_PLAIN, new GlRectDrawer());
} }
@ -145,9 +146,9 @@ public class SurfaceViewRenderer extends SurfaceView
* |drawer|. It is allowed to call init() to reinitialize the renderer after a previous * |drawer|. It is allowed to call init() to reinitialize the renderer after a previous
* init()/release() cycle. * init()/release() cycle.
*/ */
public void init( public void init(final EglBase.Context sharedContext,
final EglBase.Context sharedContext, RendererCommon.RendererEvents rendererEvents, RendererCommon.RendererEvents rendererEvents, final int[] configAttributes,
final int[] configAttributes, RendererCommon.GlDrawer drawer) { RendererCommon.GlDrawer drawer) {
synchronized (handlerLock) { synchronized (handlerLock) {
if (renderThreadHandler != null) { if (renderThreadHandler != null) {
throw new IllegalStateException(getResourceName() + "Already initialized"); throw new IllegalStateException(getResourceName() + "Already initialized");
@ -210,7 +211,8 @@ public class SurfaceViewRenderer extends SurfaceView
// when the EGL context is lost. It might be dangerous to delete them manually in // when the EGL context is lost. It might be dangerous to delete them manually in
// Activity.onDestroy(). // Activity.onDestroy().
renderThreadHandler.postAtFrontOfQueue(new Runnable() { renderThreadHandler.postAtFrontOfQueue(new Runnable() {
@Override public void run() { @Override
public void run() {
drawer.release(); drawer.release();
drawer = null; drawer = null;
if (yuvTextures != null) { if (yuvTextures != null) {
@ -289,8 +291,7 @@ public class SurfaceViewRenderer extends SurfaceView
} }
synchronized (handlerLock) { synchronized (handlerLock) {
if (renderThreadHandler == null) { if (renderThreadHandler == null) {
Logging.d(TAG, getResourceName() Logging.d(TAG, getResourceName() + "Dropping frame - Not initialized or already released.");
+ "Dropping frame - Not initialized or already released.");
VideoRenderer.renderFrameDone(frame); VideoRenderer.renderFrameDone(frame);
return; return;
} }
@ -335,8 +336,8 @@ public class SurfaceViewRenderer extends SurfaceView
return; return;
} }
desiredLayoutSize = getDesiredLayoutSize(widthSpec, heightSpec); desiredLayoutSize = getDesiredLayoutSize(widthSpec, heightSpec);
isNewSize = (desiredLayoutSize.x != getMeasuredWidth() isNewSize =
|| desiredLayoutSize.y != getMeasuredHeight()); (desiredLayoutSize.x != getMeasuredWidth() || desiredLayoutSize.y != getMeasuredHeight());
setMeasuredDimension(desiredLayoutSize.x, desiredLayoutSize.y); setMeasuredDimension(desiredLayoutSize.x, desiredLayoutSize.y);
} }
if (isNewSize) { if (isNewSize) {
@ -504,11 +505,11 @@ public class SurfaceViewRenderer extends SurfaceView
} }
yuvUploader.uploadYuvData( yuvUploader.uploadYuvData(
yuvTextures, frame.width, frame.height, frame.yuvStrides, frame.yuvPlanes); yuvTextures, frame.width, frame.height, frame.yuvStrides, frame.yuvPlanes);
drawer.drawYuv(yuvTextures, texMatrix, frame.rotatedWidth(), frame.rotatedHeight(), drawer.drawYuv(yuvTextures, texMatrix, frame.rotatedWidth(), frame.rotatedHeight(), 0, 0,
0, 0, surfaceSize.x, surfaceSize.y); surfaceSize.x, surfaceSize.y);
} else { } else {
drawer.drawOes(frame.textureId, texMatrix, frame.rotatedWidth(), frame.rotatedHeight(), drawer.drawOes(frame.textureId, texMatrix, frame.rotatedWidth(), frame.rotatedHeight(), 0, 0,
0, 0, surfaceSize.x, surfaceSize.y); surfaceSize.x, surfaceSize.y);
} }
eglBase.swapBuffers(); eglBase.swapBuffers();
@ -547,8 +548,8 @@ public class SurfaceViewRenderer extends SurfaceView
synchronized (layoutLock) { synchronized (layoutLock) {
if (frameWidth != frame.width || frameHeight != frame.height if (frameWidth != frame.width || frameHeight != frame.height
|| frameRotation != frame.rotationDegree) { || frameRotation != frame.rotationDegree) {
Logging.d(TAG, getResourceName() + "Reporting frame resolution changed to " Logging.d(TAG, getResourceName() + "Reporting frame resolution changed to " + frame.width
+ frame.width + "x" + frame.height + " with rotation " + frame.rotationDegree); + "x" + frame.height + " with rotation " + frame.rotationDegree);
if (rendererEvents != null) { if (rendererEvents != null) {
rendererEvents.onFrameResolutionChanged(frame.width, frame.height, frame.rotationDegree); rendererEvents.onFrameResolutionChanged(frame.width, frame.height, frame.rotationDegree);
} }
@ -556,7 +557,8 @@ public class SurfaceViewRenderer extends SurfaceView
frameHeight = frame.height; frameHeight = frame.height;
frameRotation = frame.rotationDegree; frameRotation = frame.rotationDegree;
post(new Runnable() { post(new Runnable() {
@Override public void run() { @Override
public void run() {
requestLayout(); requestLayout();
} }
}); });
@ -566,12 +568,12 @@ public class SurfaceViewRenderer extends SurfaceView
private void logStatistics() { private void logStatistics() {
synchronized (statisticsLock) { synchronized (statisticsLock) {
Logging.d(TAG, getResourceName() + "Frames received: " Logging.d(TAG, getResourceName() + "Frames received: " + framesReceived + ". Dropped: "
+ framesReceived + ". Dropped: " + framesDropped + ". Rendered: " + framesRendered); + framesDropped + ". Rendered: " + framesRendered);
if (framesReceived > 0 && framesRendered > 0) { if (framesReceived > 0 && framesRendered > 0) {
final long timeSinceFirstFrameNs = System.nanoTime() - firstFrameTimeNs; final long timeSinceFirstFrameNs = System.nanoTime() - firstFrameTimeNs;
Logging.d(TAG, getResourceName() + "Duration: " + (int) (timeSinceFirstFrameNs / 1e6) + Logging.d(TAG, getResourceName() + "Duration: " + (int) (timeSinceFirstFrameNs / 1e6)
" ms. FPS: " + framesRendered * 1e9 / timeSinceFirstFrameNs); + " ms. FPS: " + framesRendered * 1e9 / timeSinceFirstFrameNs);
Logging.d(TAG, getResourceName() + "Average render time: " Logging.d(TAG, getResourceName() + "Average render time: "
+ (int) (renderTimeNs / (1000 * framesRendered)) + " us."); + (int) (renderTimeNs / (1000 * framesRendered)) + " us.");
} }

View File

@ -24,14 +24,13 @@ public interface VideoCapturer {
void onCapturerStopped(); void onCapturerStopped();
// Delivers a captured frame. Called on a Java thread owned by VideoCapturer. // Delivers a captured frame. Called on a Java thread owned by VideoCapturer.
void onByteBufferFrameCaptured(byte[] data, int width, int height, int rotation, void onByteBufferFrameCaptured(
long timeStamp); byte[] data, int width, int height, int rotation, long timeStamp);
// Delivers a captured frame in a texture with id |oesTextureId|. Called on a Java thread // Delivers a captured frame in a texture with id |oesTextureId|. Called on a Java thread
// owned by VideoCapturer. // owned by VideoCapturer.
void onTextureFrameCaptured( void onTextureFrameCaptured(int width, int height, int oesTextureId, float[] transformMatrix,
int width, int height, int oesTextureId, float[] transformMatrix, int rotation, int rotation, long timestamp);
long timestamp);
} }
// An implementation of CapturerObserver that forwards all calls from // An implementation of CapturerObserver that forwards all calls from
@ -55,25 +54,23 @@ public interface VideoCapturer {
} }
@Override @Override
public void onByteBufferFrameCaptured(byte[] data, int width, int height, public void onByteBufferFrameCaptured(
int rotation, long timeStamp) { byte[] data, int width, int height, int rotation, long timeStamp) {
nativeOnByteBufferFrameCaptured(nativeSource, data, data.length, width, height, rotation, nativeOnByteBufferFrameCaptured(
timeStamp); nativeSource, data, data.length, width, height, rotation, timeStamp);
} }
@Override @Override
public void onTextureFrameCaptured( public void onTextureFrameCaptured(int width, int height, int oesTextureId,
int width, int height, int oesTextureId, float[] transformMatrix, int rotation, float[] transformMatrix, int rotation, long timestamp) {
long timestamp) { nativeOnTextureFrameCaptured(
nativeOnTextureFrameCaptured(nativeSource, width, height, oesTextureId, transformMatrix, nativeSource, width, height, oesTextureId, transformMatrix, rotation, timestamp);
rotation, timestamp);
} }
private native void nativeCapturerStarted(long nativeSource, private native void nativeCapturerStarted(long nativeSource, boolean success);
boolean success);
private native void nativeCapturerStopped(long nativeSource); private native void nativeCapturerStopped(long nativeSource);
private native void nativeOnByteBufferFrameCaptured(long nativeSource, private native void nativeOnByteBufferFrameCaptured(long nativeSource, byte[] data, int length,
byte[] data, int length, int width, int height, int rotation, long timeStamp); int width, int height, int rotation, long timeStamp);
private native void nativeOnTextureFrameCaptured(long nativeSource, int width, int height, private native void nativeOnTextureFrameCaptured(long nativeSource, int width, int height,
int oesTextureId, float[] transformMatrix, int rotation, long timestamp); int oesTextureId, float[] transformMatrix, int rotation, long timestamp);
} }

View File

@ -40,9 +40,8 @@ import java.util.concurrent.atomic.AtomicBoolean;
// the camera has been stopped. // the camera has been stopped.
// TODO(magjed): This class name is now confusing - rename to Camera1VideoCapturer. // TODO(magjed): This class name is now confusing - rename to Camera1VideoCapturer.
@SuppressWarnings("deprecation") @SuppressWarnings("deprecation")
public class VideoCapturerAndroid implements public class VideoCapturerAndroid
CameraVideoCapturer, implements CameraVideoCapturer, android.hardware.Camera.PreviewCallback,
android.hardware.Camera.PreviewCallback,
SurfaceTextureHelper.OnTextureFrameAvailableListener { SurfaceTextureHelper.OnTextureFrameAvailableListener {
private static final String TAG = "VideoCapturerAndroid"; private static final String TAG = "VideoCapturerAndroid";
private static final int CAMERA_STOP_TIMEOUT_MS = 7000; private static final int CAMERA_STOP_TIMEOUT_MS = 7000;
@ -98,15 +97,14 @@ public class VideoCapturerAndroid implements
} }
}; };
public static VideoCapturerAndroid create(String name, public static VideoCapturerAndroid create(String name, CameraEventsHandler eventsHandler) {
CameraEventsHandler eventsHandler) {
return VideoCapturerAndroid.create(name, eventsHandler, false /* captureToTexture */); return VideoCapturerAndroid.create(name, eventsHandler, false /* captureToTexture */);
} }
// Use ctor directly instead. // Use ctor directly instead.
@Deprecated @Deprecated
public static VideoCapturerAndroid create(String name, public static VideoCapturerAndroid create(
CameraEventsHandler eventsHandler, boolean captureToTexture) { String name, CameraEventsHandler eventsHandler, boolean captureToTexture) {
try { try {
return new VideoCapturerAndroid(name, eventsHandler, captureToTexture); return new VideoCapturerAndroid(name, eventsHandler, captureToTexture);
} catch (RuntimeException e) { } catch (RuntimeException e) {
@ -176,7 +174,8 @@ public class VideoCapturerAndroid implements
@Override @Override
public void changeCaptureFormat(final int width, final int height, final int framerate) { public void changeCaptureFormat(final int width, final int height, final int framerate) {
maybePostOnCameraThread(new Runnable() { maybePostOnCameraThread(new Runnable() {
@Override public void run() { @Override
public void run() {
startPreviewOnCameraThread(width, height, framerate); startPreviewOnCameraThread(width, height, framerate);
} }
}); });
@ -195,8 +194,8 @@ public class VideoCapturerAndroid implements
return isCapturingToTexture; return isCapturingToTexture;
} }
public VideoCapturerAndroid(String cameraName, CameraEventsHandler eventsHandler, public VideoCapturerAndroid(
boolean captureToTexture) { String cameraName, CameraEventsHandler eventsHandler, boolean captureToTexture) {
if (android.hardware.Camera.getNumberOfCameras() == 0) { if (android.hardware.Camera.getNumberOfCameras() == 0) {
throw new RuntimeException("No cameras available"); throw new RuntimeException("No cameras available");
} }
@ -332,8 +331,8 @@ public class VideoCapturerAndroid implements
camera.setPreviewTexture(surfaceHelper.getSurfaceTexture()); camera.setPreviewTexture(surfaceHelper.getSurfaceTexture());
Logging.d(TAG, "Camera orientation: " + info.orientation + Logging.d(TAG, "Camera orientation: " + info.orientation + " .Device orientation: "
" .Device orientation: " + getDeviceOrientation()); + getDeviceOrientation());
camera.setErrorCallback(cameraErrorCallback); camera.setErrorCallback(cameraErrorCallback);
startPreviewOnCameraThread(width, height, framerate); startPreviewOnCameraThread(width, height, framerate);
frameObserver.onCapturerStarted(true); frameObserver.onCapturerStarted(true);
@ -392,8 +391,7 @@ public class VideoCapturerAndroid implements
} }
// Update camera parameters. // Update camera parameters.
Logging.d(TAG, "isVideoStabilizationSupported: " + Logging.d(TAG, "isVideoStabilizationSupported: " + parameters.isVideoStabilizationSupported());
parameters.isVideoStabilizationSupported());
if (parameters.isVideoStabilizationSupported()) { if (parameters.isVideoStabilizationSupported()) {
parameters.setVideoStabilization(true); parameters.setVideoStabilization(true);
} }
@ -453,7 +451,8 @@ public class VideoCapturerAndroid implements
Logging.d(TAG, "stopCapture"); Logging.d(TAG, "stopCapture");
final CountDownLatch barrier = new CountDownLatch(1); final CountDownLatch barrier = new CountDownLatch(1);
final boolean didPost = maybePostOnCameraThread(new Runnable() { final boolean didPost = maybePostOnCameraThread(new Runnable() {
@Override public void run() { @Override
public void run() {
stopCaptureOnCameraThread(true /* stopHandler */); stopCaptureOnCameraThread(true /* stopHandler */);
barrier.countDown(); barrier.countDown();
} }
@ -535,8 +534,7 @@ public class VideoCapturerAndroid implements
private int getDeviceOrientation() { private int getDeviceOrientation() {
int orientation = 0; int orientation = 0;
WindowManager wm = (WindowManager) applicationContext.getSystemService( WindowManager wm = (WindowManager) applicationContext.getSystemService(Context.WINDOW_SERVICE);
Context.WINDOW_SERVICE);
switch (wm.getDefaultDisplay().getRotation()) { switch (wm.getDefaultDisplay().getRotation()) {
case Surface.ROTATION_90: case Surface.ROTATION_90:
orientation = 90; orientation = 90;
@ -579,8 +577,7 @@ public class VideoCapturerAndroid implements
throw new RuntimeException("Unexpected camera in callback!"); throw new RuntimeException("Unexpected camera in callback!");
} }
final long captureTimeNs = final long captureTimeNs = TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
if (eventsHandler != null && !firstFrameReported) { if (eventsHandler != null && !firstFrameReported) {
eventsHandler.onFirstFrameAvailable(); eventsHandler.onFirstFrameAvailable();
@ -588,14 +585,13 @@ public class VideoCapturerAndroid implements
} }
cameraStatistics.addFrame(); cameraStatistics.addFrame();
frameObserver.onByteBufferFrameCaptured(data, captureFormat.width, captureFormat.height, frameObserver.onByteBufferFrameCaptured(
getFrameOrientation(), captureTimeNs); data, captureFormat.width, captureFormat.height, getFrameOrientation(), captureTimeNs);
camera.addCallbackBuffer(data); camera.addCallbackBuffer(data);
} }
@Override @Override
public void onTextureFrameAvailable( public void onTextureFrameAvailable(int oesTextureId, float[] transformMatrix, long timestampNs) {
int oesTextureId, float[] transformMatrix, long timestampNs) {
checkIsOnCameraThread(); checkIsOnCameraThread();
if (!isCameraRunning.get()) { if (!isCameraRunning.get()) {
Logging.e(TAG, "onTextureFrameAvailable: Camera is stopped"); Logging.e(TAG, "onTextureFrameAvailable: Camera is stopped");

View File

@ -61,11 +61,13 @@ public class VideoRenderer {
// top-left corner of the image, but in glTexImage2D() the first element corresponds to the // top-left corner of the image, but in glTexImage2D() the first element corresponds to the
// bottom-left corner. This discrepancy is corrected by setting a vertical flip as sampling // bottom-left corner. This discrepancy is corrected by setting a vertical flip as sampling
// matrix. // matrix.
// clang-format off
samplingMatrix = new float[] { samplingMatrix = new float[] {
1, 0, 0, 0, 1, 0, 0, 0,
0, -1, 0, 0, 0, -1, 0, 0,
0, 0, 1, 0, 0, 0, 1, 0,
0, 1, 0, 1}; 0, 1, 0, 1};
// clang-format on
} }
/** /**
@ -97,14 +99,13 @@ public class VideoRenderer {
@Override @Override
public String toString() { public String toString() {
return width + "x" + height + ":" + yuvStrides[0] + ":" + yuvStrides[1] + return width + "x" + height + ":" + yuvStrides[0] + ":" + yuvStrides[1] + ":" + yuvStrides[2];
":" + yuvStrides[2];
} }
} }
// Helper native function to do a video frame plane copying. // Helper native function to do a video frame plane copying.
public static native void nativeCopyPlane(ByteBuffer src, int width, public static native void nativeCopyPlane(
int height, int srcStride, ByteBuffer dst, int dstStride); ByteBuffer src, int width, int height, int srcStride, ByteBuffer dst, int dstStride);
/** The real meat of VideoSinkInterface. */ /** The real meat of VideoSinkInterface. */
public static interface Callbacks { public static interface Callbacks {

View File

@ -90,7 +90,8 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
private I420Frame pendingFrame; private I420Frame pendingFrame;
private final Object pendingFrameLock = new Object(); private final Object pendingFrameLock = new Object();
// Type of video frame used for recent frame rendering. // Type of video frame used for recent frame rendering.
private static enum RendererType { RENDERER_YUV, RENDERER_TEXTURE }; private static enum RendererType { RENDERER_YUV, RENDERER_TEXTURE }
private RendererType rendererType; private RendererType rendererType;
private RendererCommon.ScalingType scalingType; private RendererCommon.ScalingType scalingType;
private boolean mirror; private boolean mirror;
@ -136,9 +137,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
// it rendered up right. // it rendered up right.
private int rotationDegree; private int rotationDegree;
private YuvImageRenderer( private YuvImageRenderer(GLSurfaceView surface, int id, int x, int y, int width, int height,
GLSurfaceView surface, int id,
int x, int y, int width, int height,
RendererCommon.ScalingType scalingType, boolean mirror, RendererCommon.GlDrawer drawer) { RendererCommon.ScalingType scalingType, boolean mirror, RendererCommon.GlDrawer drawer) {
Logging.d(TAG, "YuvImageRenderer.Create id: " + id); Logging.d(TAG, "YuvImageRenderer.Create id: " + id);
this.surface = surface; this.surface = surface;
@ -167,8 +166,8 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
} }
private void createTextures() { private void createTextures() {
Logging.d(TAG, " YuvImageRenderer.createTextures " + id + " on GL thread:" + Logging.d(TAG, " YuvImageRenderer.createTextures " + id + " on GL thread:"
Thread.currentThread().getId()); + Thread.currentThread().getId());
// Generate 3 texture ids for Y/U/V and place them into |yuvTextures|. // Generate 3 texture ids for Y/U/V and place them into |yuvTextures|.
for (int i = 0; i < 3; i++) { for (int i = 0; i < 3; i++) {
@ -185,8 +184,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
} }
// Initialize to maximum allowed area. Round to integer coordinates inwards the layout // Initialize to maximum allowed area. Round to integer coordinates inwards the layout
// bounding box (ceil left/top and floor right/bottom) to not break constraints. // bounding box (ceil left/top and floor right/bottom) to not break constraints.
displayLayout.set( displayLayout.set((screenWidth * layoutInPercentage.left + 99) / 100,
(screenWidth * layoutInPercentage.left + 99) / 100,
(screenHeight * layoutInPercentage.top + 99) / 100, (screenHeight * layoutInPercentage.top + 99) / 100,
(screenWidth * layoutInPercentage.right) / 100, (screenWidth * layoutInPercentage.right) / 100,
(screenHeight * layoutInPercentage.bottom) / 100); (screenHeight * layoutInPercentage.bottom) / 100);
@ -197,12 +195,12 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
? (float) videoWidth / videoHeight ? (float) videoWidth / videoHeight
: (float) videoHeight / videoWidth; : (float) videoHeight / videoWidth;
// Adjust display size based on |scalingType|. // Adjust display size based on |scalingType|.
final Point displaySize = RendererCommon.getDisplaySize(scalingType, final Point displaySize = RendererCommon.getDisplaySize(
videoAspectRatio, displayLayout.width(), displayLayout.height()); scalingType, videoAspectRatio, displayLayout.width(), displayLayout.height());
displayLayout.inset((displayLayout.width() - displaySize.x) / 2, displayLayout.inset((displayLayout.width() - displaySize.x) / 2,
(displayLayout.height() - displaySize.y) / 2); (displayLayout.height() - displaySize.y) / 2);
Logging.d(TAG, " Adjusted display size: " + displayLayout.width() + " x " Logging.d(TAG,
+ displayLayout.height()); " Adjusted display size: " + displayLayout.width() + " x " + displayLayout.height());
layoutMatrix = RendererCommon.getLayoutMatrix( layoutMatrix = RendererCommon.getLayoutMatrix(
mirror, videoAspectRatio, (float) displayLayout.width() / displayLayout.height()); mirror, videoAspectRatio, (float) displayLayout.width() / displayLayout.height());
updateLayoutProperties = false; updateLayoutProperties = false;
@ -242,9 +240,8 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
GlUtil.checkNoGLES2Error("glBindFramebuffer"); GlUtil.checkNoGLES2Error("glBindFramebuffer");
// Copy the OES texture content. This will also normalize the sampling matrix. // Copy the OES texture content. This will also normalize the sampling matrix.
drawer.drawOes(pendingFrame.textureId, rotatedSamplingMatrix, drawer.drawOes(pendingFrame.textureId, rotatedSamplingMatrix, textureCopy.getWidth(),
textureCopy.getWidth(), textureCopy.getHeight(), textureCopy.getHeight(), 0, 0, textureCopy.getWidth(), textureCopy.getHeight());
0, 0, textureCopy.getWidth(), textureCopy.getHeight());
rotatedSamplingMatrix = RendererCommon.identityMatrix(); rotatedSamplingMatrix = RendererCommon.identityMatrix();
// Restore normal framebuffer. // Restore normal framebuffer.
@ -263,8 +260,8 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
// OpenGL defaults to lower left origin - flip viewport position vertically. // OpenGL defaults to lower left origin - flip viewport position vertically.
final int viewportY = screenHeight - displayLayout.bottom; final int viewportY = screenHeight - displayLayout.bottom;
if (rendererType == RendererType.RENDERER_YUV) { if (rendererType == RendererType.RENDERER_YUV) {
drawer.drawYuv(yuvTextures, texMatrix, videoWidth, videoHeight, drawer.drawYuv(yuvTextures, texMatrix, videoWidth, videoHeight, displayLayout.left,
displayLayout.left, viewportY, displayLayout.width(), displayLayout.height()); viewportY, displayLayout.width(), displayLayout.height());
} else { } else {
drawer.drawRgb(textureCopy.getTextureId(), texMatrix, videoWidth, videoHeight, drawer.drawRgb(textureCopy.getTextureId(), texMatrix, videoWidth, videoHeight,
displayLayout.left, viewportY, displayLayout.width(), displayLayout.height()); displayLayout.left, viewportY, displayLayout.width(), displayLayout.height());
@ -281,15 +278,13 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
private void logStatistics() { private void logStatistics() {
long timeSinceFirstFrameNs = System.nanoTime() - startTimeNs; long timeSinceFirstFrameNs = System.nanoTime() - startTimeNs;
Logging.d(TAG, "ID: " + id + ". Type: " + rendererType + Logging.d(TAG, "ID: " + id + ". Type: " + rendererType + ". Frames received: "
". Frames received: " + framesReceived + + framesReceived + ". Dropped: " + framesDropped + ". Rendered: " + framesRendered);
". Dropped: " + framesDropped + ". Rendered: " + framesRendered);
if (framesReceived > 0 && framesRendered > 0) { if (framesReceived > 0 && framesRendered > 0) {
Logging.d(TAG, "Duration: " + (int)(timeSinceFirstFrameNs / 1e6) + Logging.d(TAG, "Duration: " + (int) (timeSinceFirstFrameNs / 1e6) + " ms. FPS: "
" ms. FPS: " + framesRendered * 1e9 / timeSinceFirstFrameNs); + framesRendered * 1e9 / timeSinceFirstFrameNs);
Logging.d(TAG, "Draw time: " + Logging.d(TAG, "Draw time: " + (int) (drawTimeNs / (1000 * framesRendered))
(int) (drawTimeNs / (1000 * framesRendered)) + " us. Copy time: " + + " us. Copy time: " + (int) (copyTimeNs / (1000 * framesReceived)) + " us");
(int) (copyTimeNs / (1000 * framesReceived)) + " us");
} }
} }
@ -298,8 +293,8 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
if (screenWidth == this.screenWidth && screenHeight == this.screenHeight) { if (screenWidth == this.screenWidth && screenHeight == this.screenHeight) {
return; return;
} }
Logging.d(TAG, "ID: " + id + ". YuvImageRenderer.setScreenSize: " + Logging.d(TAG, "ID: " + id + ". YuvImageRenderer.setScreenSize: " + screenWidth + " x "
screenWidth + " x " + screenHeight); + screenHeight);
this.screenWidth = screenWidth; this.screenWidth = screenWidth;
this.screenHeight = screenHeight; this.screenHeight = screenHeight;
updateLayoutProperties = true; updateLayoutProperties = true;
@ -315,9 +310,8 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
&& mirror == this.mirror) { && mirror == this.mirror) {
return; return;
} }
Logging.d(TAG, "ID: " + id + ". YuvImageRenderer.setPosition: (" + x + ", " + y + Logging.d(TAG, "ID: " + id + ". YuvImageRenderer.setPosition: (" + x + ", " + y + ") "
") " + width + " x " + height + ". Scaling: " + scalingType + + width + " x " + height + ". Scaling: " + scalingType + ". Mirror: " + mirror);
". Mirror: " + mirror);
this.layoutInPercentage.set(layoutInPercentage); this.layoutInPercentage.set(layoutInPercentage);
this.scalingType = scalingType; this.scalingType = scalingType;
this.mirror = mirror; this.mirror = mirror;
@ -331,14 +325,14 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
return; return;
} }
if (rendererEvents != null) { if (rendererEvents != null) {
Logging.d(TAG, "ID: " + id + Logging.d(TAG, "ID: " + id + ". Reporting frame resolution changed to " + videoWidth + " x "
". Reporting frame resolution changed to " + videoWidth + " x " + videoHeight); + videoHeight);
rendererEvents.onFrameResolutionChanged(videoWidth, videoHeight, rotation); rendererEvents.onFrameResolutionChanged(videoWidth, videoHeight, rotation);
} }
synchronized (updateLayoutLock) { synchronized (updateLayoutLock) {
Logging.d(TAG, "ID: " + id + ". YuvImageRenderer.setSize: " + Logging.d(TAG, "ID: " + id + ". YuvImageRenderer.setSize: " + videoWidth + " x "
videoWidth + " x " + videoHeight + " rotation " + rotation); + videoHeight + " rotation " + rotation);
this.videoWidth = videoWidth; this.videoWidth = videoWidth;
this.videoHeight = videoHeight; this.videoHeight = videoHeight;
@ -366,11 +360,10 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
synchronized (pendingFrameLock) { synchronized (pendingFrameLock) {
// Check input frame parameters. // Check input frame parameters.
if (frame.yuvFrame) { if (frame.yuvFrame) {
if (frame.yuvStrides[0] < frame.width || if (frame.yuvStrides[0] < frame.width || frame.yuvStrides[1] < frame.width / 2
frame.yuvStrides[1] < frame.width / 2 || || frame.yuvStrides[2] < frame.width / 2) {
frame.yuvStrides[2] < frame.width / 2) { Logging.e(TAG, "Incorrect strides " + frame.yuvStrides[0] + ", " + frame.yuvStrides[1]
Logging.e(TAG, "Incorrect strides " + frame.yuvStrides[0] + ", " + + ", " + frame.yuvStrides[2]);
frame.yuvStrides[1] + ", " + frame.yuvStrides[2]);
VideoRenderer.renderFrameDone(frame); VideoRenderer.renderFrameDone(frame);
return; return;
} }
@ -394,8 +387,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
} }
/** Passes GLSurfaceView to video renderer. */ /** Passes GLSurfaceView to video renderer. */
public static synchronized void setView(GLSurfaceView surface, public static synchronized void setView(GLSurfaceView surface, Runnable eglContextReadyCallback) {
Runnable eglContextReadyCallback) {
Logging.d(TAG, "VideoRendererGui.setView"); Logging.d(TAG, "VideoRendererGui.setView");
instance = new VideoRendererGui(surface); instance = new VideoRendererGui(surface);
eglContextReady = eglContextReadyCallback; eglContextReady = eglContextReadyCallback;
@ -431,14 +423,12 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
*/ */
public static VideoRenderer createGui(int x, int y, int width, int height, public static VideoRenderer createGui(int x, int y, int width, int height,
RendererCommon.ScalingType scalingType, boolean mirror) throws Exception { RendererCommon.ScalingType scalingType, boolean mirror) throws Exception {
YuvImageRenderer javaGuiRenderer = create( YuvImageRenderer javaGuiRenderer = create(x, y, width, height, scalingType, mirror);
x, y, width, height, scalingType, mirror);
return new VideoRenderer(javaGuiRenderer); return new VideoRenderer(javaGuiRenderer);
} }
public static VideoRenderer.Callbacks createGuiRenderer( public static VideoRenderer.Callbacks createGuiRenderer(
int x, int y, int width, int height, int x, int y, int width, int height, RendererCommon.ScalingType scalingType, boolean mirror) {
RendererCommon.ScalingType scalingType, boolean mirror) {
return create(x, y, width, height, scalingType, mirror); return create(x, y, width, height, scalingType, mirror);
} }
@ -447,8 +437,8 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
* resolution (width, height). All parameters are in percentage of * resolution (width, height). All parameters are in percentage of
* screen resolution. * screen resolution.
*/ */
public static synchronized YuvImageRenderer create(int x, int y, int width, int height, public static synchronized YuvImageRenderer create(
RendererCommon.ScalingType scalingType, boolean mirror) { int x, int y, int width, int height, RendererCommon.ScalingType scalingType, boolean mirror) {
return create(x, y, width, height, scalingType, mirror, new GlRectDrawer()); return create(x, y, width, height, scalingType, mirror, new GlRectDrawer());
} }
@ -460,19 +450,16 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
public static synchronized YuvImageRenderer create(int x, int y, int width, int height, public static synchronized YuvImageRenderer create(int x, int y, int width, int height,
RendererCommon.ScalingType scalingType, boolean mirror, RendererCommon.GlDrawer drawer) { RendererCommon.ScalingType scalingType, boolean mirror, RendererCommon.GlDrawer drawer) {
// Check display region parameters. // Check display region parameters.
if (x < 0 || x > 100 || y < 0 || y > 100 || if (x < 0 || x > 100 || y < 0 || y > 100 || width < 0 || width > 100 || height < 0
width < 0 || width > 100 || height < 0 || height > 100 || || height > 100 || x + width > 100 || y + height > 100) {
x + width > 100 || y + height > 100) {
throw new RuntimeException("Incorrect window parameters."); throw new RuntimeException("Incorrect window parameters.");
} }
if (instance == null) { if (instance == null) {
throw new RuntimeException( throw new RuntimeException("Attempt to create yuv renderer before setting GLSurfaceView");
"Attempt to create yuv renderer before setting GLSurfaceView");
} }
final YuvImageRenderer yuvImageRenderer = new YuvImageRenderer( final YuvImageRenderer yuvImageRenderer = new YuvImageRenderer(instance.surface,
instance.surface, instance.yuvImageRenderers.size(), instance.yuvImageRenderers.size(), x, y, width, height, scalingType, mirror, drawer);
x, y, width, height, scalingType, mirror, drawer);
synchronized (instance.yuvImageRenderers) { synchronized (instance.yuvImageRenderers) {
if (instance.onSurfaceCreatedCalled) { if (instance.onSurfaceCreatedCalled) {
// onSurfaceCreated has already been called for VideoRendererGui - // onSurfaceCreated has already been called for VideoRendererGui -
@ -483,8 +470,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
@Override @Override
public void run() { public void run() {
yuvImageRenderer.createTextures(); yuvImageRenderer.createTextures();
yuvImageRenderer.setScreenSize( yuvImageRenderer.setScreenSize(instance.screenWidth, instance.screenHeight);
instance.screenWidth, instance.screenHeight);
countDownLatch.countDown(); countDownLatch.countDown();
} }
}); });
@ -501,13 +487,11 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
return yuvImageRenderer; return yuvImageRenderer;
} }
public static synchronized void update( public static synchronized void update(VideoRenderer.Callbacks renderer, int x, int y, int width,
VideoRenderer.Callbacks renderer, int x, int y, int width, int height, int height, RendererCommon.ScalingType scalingType, boolean mirror) {
RendererCommon.ScalingType scalingType, boolean mirror) {
Logging.d(TAG, "VideoRendererGui.update"); Logging.d(TAG, "VideoRendererGui.update");
if (instance == null) { if (instance == null) {
throw new RuntimeException( throw new RuntimeException("Attempt to update yuv renderer before setting GLSurfaceView");
"Attempt to update yuv renderer before setting GLSurfaceView");
} }
synchronized (instance.yuvImageRenderers) { synchronized (instance.yuvImageRenderers) {
for (YuvImageRenderer yuvImageRenderer : instance.yuvImageRenderers) { for (YuvImageRenderer yuvImageRenderer : instance.yuvImageRenderers) {
@ -522,8 +506,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
VideoRenderer.Callbacks renderer, RendererCommon.RendererEvents rendererEvents) { VideoRenderer.Callbacks renderer, RendererCommon.RendererEvents rendererEvents) {
Logging.d(TAG, "VideoRendererGui.setRendererEvents"); Logging.d(TAG, "VideoRendererGui.setRendererEvents");
if (instance == null) { if (instance == null) {
throw new RuntimeException( throw new RuntimeException("Attempt to set renderer events before setting GLSurfaceView");
"Attempt to set renderer events before setting GLSurfaceView");
} }
synchronized (instance.yuvImageRenderers) { synchronized (instance.yuvImageRenderers) {
for (YuvImageRenderer yuvImageRenderer : instance.yuvImageRenderers) { for (YuvImageRenderer yuvImageRenderer : instance.yuvImageRenderers) {
@ -537,8 +520,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
public static synchronized void remove(VideoRenderer.Callbacks renderer) { public static synchronized void remove(VideoRenderer.Callbacks renderer) {
Logging.d(TAG, "VideoRendererGui.remove"); Logging.d(TAG, "VideoRendererGui.remove");
if (instance == null) { if (instance == null) {
throw new RuntimeException( throw new RuntimeException("Attempt to remove renderer before setting GLSurfaceView");
"Attempt to remove renderer before setting GLSurfaceView");
} }
synchronized (instance.yuvImageRenderers) { synchronized (instance.yuvImageRenderers) {
final int index = instance.yuvImageRenderers.indexOf(renderer); final int index = instance.yuvImageRenderers.indexOf(renderer);
@ -553,8 +535,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
public static synchronized void reset(VideoRenderer.Callbacks renderer) { public static synchronized void reset(VideoRenderer.Callbacks renderer) {
Logging.d(TAG, "VideoRendererGui.reset"); Logging.d(TAG, "VideoRendererGui.reset");
if (instance == null) { if (instance == null) {
throw new RuntimeException( throw new RuntimeException("Attempt to reset renderer before setting GLSurfaceView");
"Attempt to reset renderer before setting GLSurfaceView");
} }
synchronized (instance.yuvImageRenderers) { synchronized (instance.yuvImageRenderers) {
for (YuvImageRenderer yuvImageRenderer : instance.yuvImageRenderers) { for (YuvImageRenderer yuvImageRenderer : instance.yuvImageRenderers) {
@ -621,8 +602,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
@Override @Override
public void onSurfaceChanged(GL10 unused, int width, int height) { public void onSurfaceChanged(GL10 unused, int width, int height) {
Logging.d(TAG, "VideoRendererGui.onSurfaceChanged: " + Logging.d(TAG, "VideoRendererGui.onSurfaceChanged: " + width + " x " + height + " ");
width + " x " + height + " ");
screenWidth = width; screenWidth = width;
screenHeight = height; screenHeight = height;
synchronized (yuvImageRenderers) { synchronized (yuvImageRenderers) {
@ -645,5 +625,4 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
} }
} }
} }
} }

View File

@ -8,7 +8,6 @@
* be found in the AUTHORS file in the root of the source tree. * be found in the AUTHORS file in the root of the source tree.
*/ */
package org.webrtc; package org.webrtc;
/** /**

View File

@ -14,8 +14,7 @@ import java.util.LinkedList;
/** Java version of VideoTrackInterface. */ /** Java version of VideoTrackInterface. */
public class VideoTrack extends MediaStreamTrack { public class VideoTrack extends MediaStreamTrack {
private final LinkedList<VideoRenderer> renderers = private final LinkedList<VideoRenderer> renderers = new LinkedList<VideoRenderer>();
new LinkedList<VideoRenderer>();
public VideoTrack(long nativeTrack) { public VideoTrack(long nativeTrack) {
super(nativeTrack); super(nativeTrack);
@ -43,9 +42,7 @@ public class VideoTrack extends MediaStreamTrack {
private static native void free(long nativeTrack); private static native void free(long nativeTrack);
private static native void nativeAddRenderer( private static native void nativeAddRenderer(long nativeTrack, long nativeRenderer);
long nativeTrack, long nativeRenderer);
private static native void nativeRemoveRenderer( private static native void nativeRemoveRenderer(long nativeTrack, long nativeRenderer);
long nativeTrack, long nativeRenderer);
} }

View File

@ -19,8 +19,7 @@ import android.test.suitebuilder.annotation.SmallTest;
public class Camera1CapturerUsingByteBufferTest extends InstrumentationTestCase { public class Camera1CapturerUsingByteBufferTest extends InstrumentationTestCase {
static final String TAG = "Camera1CapturerUsingByteBufferTest"; static final String TAG = "Camera1CapturerUsingByteBufferTest";
private class TestObjectFactory private class TestObjectFactory extends CameraVideoCapturerTestFixtures.TestObjectFactory {
extends CameraVideoCapturerTestFixtures.TestObjectFactory {
@Override @Override
public boolean isCapturingToTexture() { public boolean isCapturingToTexture() {
return false; return false;

View File

@ -19,8 +19,7 @@ import android.test.suitebuilder.annotation.SmallTest;
public class Camera1CapturerUsingTextureTest extends InstrumentationTestCase { public class Camera1CapturerUsingTextureTest extends InstrumentationTestCase {
static final String TAG = "Camera1CapturerUsingTextureTest"; static final String TAG = "Camera1CapturerUsingTextureTest";
private class TestObjectFactory private class TestObjectFactory extends CameraVideoCapturerTestFixtures.TestObjectFactory {
extends CameraVideoCapturerTestFixtures.TestObjectFactory {
@Override @Override
public CameraEnumerator getCameraEnumerator() { public CameraEnumerator getCameraEnumerator() {
return new Camera1Enumerator(); return new Camera1Enumerator();

View File

@ -143,8 +143,7 @@ public class Camera2CapturerTest extends InstrumentationTestCase {
} }
} }
private class TestObjectFactory private class TestObjectFactory extends CameraVideoCapturerTestFixtures.TestObjectFactory {
extends CameraVideoCapturerTestFixtures.TestObjectFactory {
@Override @Override
public CameraEnumerator getCameraEnumerator() { public CameraEnumerator getCameraEnumerator() {
return new Camera2Enumerator(getAppContext()); return new Camera2Enumerator(getAppContext());

View File

@ -114,8 +114,8 @@ class CameraVideoCapturerTestFixtures {
} }
@Override @Override
public void onByteBufferFrameCaptured(byte[] frame, int width, int height, int rotation, public void onByteBufferFrameCaptured(
long timeStamp) { byte[] frame, int width, int height, int rotation, long timeStamp) {
synchronized (frameLock) { synchronized (frameLock) {
++framesCaptured; ++framesCaptured;
frameSize = frame.length; frameSize = frame.length;
@ -126,9 +126,8 @@ class CameraVideoCapturerTestFixtures {
} }
} }
@Override @Override
public void onTextureFrameCaptured( public void onTextureFrameCaptured(int width, int height, int oesTextureId,
int width, int height, int oesTextureId, float[] transformMatrix, int rotation, float[] transformMatrix, int rotation, long timeStamp) {
long timeStamp) {
synchronized (frameLock) { synchronized (frameLock) {
++framesCaptured; ++framesCaptured;
frameWidth = width; frameWidth = width;
@ -182,8 +181,7 @@ class CameraVideoCapturerTestFixtures {
} }
} }
static class CameraEvents implements static class CameraEvents implements CameraVideoCapturer.CameraEventsHandler {
CameraVideoCapturer.CameraEventsHandler {
public boolean onCameraOpeningCalled; public boolean onCameraOpeningCalled;
public boolean onFirstFrameAvailableCalled; public boolean onFirstFrameAvailableCalled;
public final Object onCameraFreezedLock = new Object(); public final Object onCameraFreezedLock = new Object();
@ -275,8 +273,7 @@ class CameraVideoCapturerTestFixtures {
} }
public CameraVideoCapturer createCapturer( public CameraVideoCapturer createCapturer(
String name, String name, CameraVideoCapturer.CameraEventsHandler eventsHandler) {
CameraVideoCapturer.CameraEventsHandler eventsHandler) {
return cameraEnumerator.createCapturer(name, eventsHandler); return cameraEnumerator.createCapturer(name, eventsHandler);
} }
@ -374,8 +371,8 @@ class CameraVideoCapturerTestFixtures {
instance.surfaceTextureHelper.dispose(); instance.surfaceTextureHelper.dispose();
} }
private VideoTrackWithRenderer createVideoTrackWithRenderer(CameraVideoCapturer capturer, private VideoTrackWithRenderer createVideoTrackWithRenderer(
VideoRenderer.Callbacks rendererCallbacks) { CameraVideoCapturer capturer, VideoRenderer.Callbacks rendererCallbacks) {
VideoTrackWithRenderer videoTrackWithRenderer = new VideoTrackWithRenderer(); VideoTrackWithRenderer videoTrackWithRenderer = new VideoTrackWithRenderer();
videoTrackWithRenderer.source = peerConnectionFactory.createVideoSource(capturer); videoTrackWithRenderer.source = peerConnectionFactory.createVideoSource(capturer);
capturer.startCapture(DEFAULT_WIDTH, DEFAULT_HEIGHT, DEFAULT_FPS); capturer.startCapture(DEFAULT_WIDTH, DEFAULT_HEIGHT, DEFAULT_FPS);
@ -410,7 +407,8 @@ class CameraVideoCapturerTestFixtures {
private void waitUntilIdle(CapturerInstance capturerInstance) throws InterruptedException { private void waitUntilIdle(CapturerInstance capturerInstance) throws InterruptedException {
final CountDownLatch barrier = new CountDownLatch(1); final CountDownLatch barrier = new CountDownLatch(1);
capturerInstance.surfaceTextureHelper.getHandler().post(new Runnable() { capturerInstance.surfaceTextureHelper.getHandler().post(new Runnable() {
@Override public void run() { @Override
public void run() {
barrier.countDown(); barrier.countDown();
} }
}); });
@ -461,8 +459,8 @@ class CameraVideoCapturerTestFixtures {
public void switchCamera() throws InterruptedException { public void switchCamera() throws InterruptedException {
if (!testObjectFactory.haveTwoCameras()) { if (!testObjectFactory.haveTwoCameras()) {
Logging.w(TAG, Logging.w(
"Skipping test switch video capturer because the device doesn't have two cameras."); TAG, "Skipping test switch video capturer because the device doesn't have two cameras.");
return; return;
} }
@ -553,17 +551,17 @@ class CameraVideoCapturerTestFixtures {
capturerInstance.observer.waitForNextCapturedFrame(); capturerInstance.observer.waitForNextCapturedFrame();
// Check the frame size. The actual width and height depend on how the capturer is mounted. // Check the frame size. The actual width and height depend on how the capturer is mounted.
final boolean identicalResolution = ( final boolean identicalResolution =
capturerInstance.observer.frameWidth() == capturerInstance.format.width (capturerInstance.observer.frameWidth() == capturerInstance.format.width
&& capturerInstance.observer.frameHeight() == capturerInstance.format.height); && capturerInstance.observer.frameHeight() == capturerInstance.format.height);
final boolean flippedResolution = ( final boolean flippedResolution =
capturerInstance.observer.frameWidth() == capturerInstance.format.height (capturerInstance.observer.frameWidth() == capturerInstance.format.height
&& capturerInstance.observer.frameHeight() == capturerInstance.format.width); && capturerInstance.observer.frameHeight() == capturerInstance.format.width);
if (!identicalResolution && !flippedResolution) { if (!identicalResolution && !flippedResolution) {
fail("Wrong resolution, got: " fail("Wrong resolution, got: " + capturerInstance.observer.frameWidth() + "x"
+ capturerInstance.observer.frameWidth() + "x" + capturerInstance.observer.frameHeight() + capturerInstance.observer.frameHeight() + " expected: "
+ " expected: "+ capturerInstance.format.width + "x" + capturerInstance.format.height + capturerInstance.format.width + "x" + capturerInstance.format.height + " or "
+ " or " + capturerInstance.format.height + "x" + capturerInstance.format.width); + capturerInstance.format.height + "x" + capturerInstance.format.width);
} }
if (testObjectFactory.isCapturingToTexture()) { if (testObjectFactory.isCapturingToTexture()) {
@ -600,8 +598,7 @@ class CameraVideoCapturerTestFixtures {
disposeCapturer(capturerInstance); disposeCapturer(capturerInstance);
} }
public void returnBufferLateEndToEnd() public void returnBufferLateEndToEnd() throws InterruptedException {
throws InterruptedException {
final CapturerInstance capturerInstance = createCapturer(false /* initialize */); final CapturerInstance capturerInstance = createCapturer(false /* initialize */);
final VideoTrackWithRenderer videoTrackWithRenderer = final VideoTrackWithRenderer videoTrackWithRenderer =
createVideoTrackWithFakeAsyncRenderer(capturerInstance.capturer); createVideoTrackWithFakeAsyncRenderer(capturerInstance.capturer);

View File

@ -53,7 +53,8 @@ public final class GlRectDrawerTest extends ActivityTestCase {
fail("ByteBuffers of size " + width + "x" + height + " not equal at position " fail("ByteBuffers of size " + width + "x" + height + " not equal at position "
+ "(" + x + ", " + y + "). Expected color (R,G,B): " + "(" + x + ", " + y + "). Expected color (R,G,B): "
+ "(" + expectedR + ", " + expectedG + ", " + expectedB + ")" + "(" + expectedR + ", " + expectedG + ", " + expectedB + ")"
+ " but was: " + "(" + actualR + ", " + actualG + ", " + actualB + ")."); + " but was: "
+ "(" + actualR + ", " + actualG + ", " + actualB + ").");
} }
} }
} }
@ -92,14 +93,14 @@ public final class GlRectDrawerTest extends ActivityTestCase {
final int rgbTexture = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D); final int rgbTexture = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0); GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, rgbTexture); GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, rgbTexture);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGB, WIDTH, GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGB, WIDTH, HEIGHT, 0, GLES20.GL_RGB,
HEIGHT, 0, GLES20.GL_RGB, GLES20.GL_UNSIGNED_BYTE, rgbPlane); GLES20.GL_UNSIGNED_BYTE, rgbPlane);
GlUtil.checkNoGLES2Error("glTexImage2D"); GlUtil.checkNoGLES2Error("glTexImage2D");
// Draw the RGB frame onto the pixel buffer. // Draw the RGB frame onto the pixel buffer.
final GlRectDrawer drawer = new GlRectDrawer(); final GlRectDrawer drawer = new GlRectDrawer();
drawer.drawRgb(rgbTexture, RendererCommon.identityMatrix(), WIDTH, HEIGHT, drawer.drawRgb(rgbTexture, RendererCommon.identityMatrix(), WIDTH, HEIGHT, 0 /* viewportX */,
0 /* viewportX */, 0 /* viewportY */, WIDTH, HEIGHT); 0 /* viewportY */, WIDTH, HEIGHT);
// Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9. // Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9.
final ByteBuffer rgbaData = ByteBuffer.allocateDirect(WIDTH * HEIGHT * 4); final ByteBuffer rgbaData = ByteBuffer.allocateDirect(WIDTH * HEIGHT * 4);
@ -139,15 +140,15 @@ public final class GlRectDrawerTest extends ActivityTestCase {
for (int i = 0; i < 3; ++i) { for (int i = 0; i < 3; ++i) {
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i); GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]); GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, WIDTH, GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, WIDTH, HEIGHT, 0,
HEIGHT, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, yuvPlanes[i]); GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, yuvPlanes[i]);
GlUtil.checkNoGLES2Error("glTexImage2D"); GlUtil.checkNoGLES2Error("glTexImage2D");
} }
// Draw the YUV frame onto the pixel buffer. // Draw the YUV frame onto the pixel buffer.
final GlRectDrawer drawer = new GlRectDrawer(); final GlRectDrawer drawer = new GlRectDrawer();
drawer.drawYuv(yuvTextures, RendererCommon.identityMatrix(), WIDTH, HEIGHT, drawer.drawYuv(yuvTextures, RendererCommon.identityMatrix(), WIDTH, HEIGHT, 0 /* viewportX */,
0 /* viewportX */, 0 /* viewportY */, WIDTH, HEIGHT); 0 /* viewportY */, WIDTH, HEIGHT);
// Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9. // Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9.
final ByteBuffer data = ByteBuffer.allocateDirect(WIDTH * HEIGHT * 4); final ByteBuffer data = ByteBuffer.allocateDirect(WIDTH * HEIGHT * 4);
@ -212,8 +213,7 @@ public final class GlRectDrawerTest extends ActivityTestCase {
private final int rgbTexture; private final int rgbTexture;
public StubOesTextureProducer( public StubOesTextureProducer(
EglBase.Context sharedContext, SurfaceTexture surfaceTexture, int width, EglBase.Context sharedContext, SurfaceTexture surfaceTexture, int width, int height) {
int height) {
eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PLAIN); eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PLAIN);
surfaceTexture.setDefaultBufferSize(width, height); surfaceTexture.setDefaultBufferSize(width, height);
eglBase.createSurface(surfaceTexture); eglBase.createSurface(surfaceTexture);
@ -232,8 +232,8 @@ public final class GlRectDrawerTest extends ActivityTestCase {
// Upload RGB data to texture. // Upload RGB data to texture.
GLES20.glActiveTexture(GLES20.GL_TEXTURE0); GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, rgbTexture); GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, rgbTexture);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGB, WIDTH, GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGB, WIDTH, HEIGHT, 0, GLES20.GL_RGB,
HEIGHT, 0, GLES20.GL_RGB, GLES20.GL_UNSIGNED_BYTE, rgbPlane); GLES20.GL_UNSIGNED_BYTE, rgbPlane);
// Draw the RGB data onto the SurfaceTexture. // Draw the RGB data onto the SurfaceTexture.
drawer.drawRgb(rgbTexture, RendererCommon.identityMatrix(), WIDTH, HEIGHT, drawer.drawRgb(rgbTexture, RendererCommon.identityMatrix(), WIDTH, HEIGHT,
0 /* viewportX */, 0 /* viewportY */, WIDTH, HEIGHT); 0 /* viewportX */, 0 /* viewportY */, WIDTH, HEIGHT);

View File

@ -28,8 +28,7 @@ public final class MediaCodecVideoEncoderTest extends ActivityTestCase {
@SmallTest @SmallTest
public static void testInitializeUsingByteBuffer() { public static void testInitializeUsingByteBuffer() {
if (!MediaCodecVideoEncoder.isVp8HwSupported()) { if (!MediaCodecVideoEncoder.isVp8HwSupported()) {
Log.i(TAG, Log.i(TAG, "Hardware does not support VP8 encoding, skipping testInitReleaseUsingByteBuffer");
"Hardware does not support VP8 encoding, skipping testInitReleaseUsingByteBuffer");
return; return;
} }
MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder(); MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
@ -46,9 +45,8 @@ public final class MediaCodecVideoEncoderTest extends ActivityTestCase {
} }
EglBase14 eglBase = new EglBase14(null, EglBase.CONFIG_PLAIN); EglBase14 eglBase = new EglBase14(null, EglBase.CONFIG_PLAIN);
MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder(); MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
assertTrue(encoder.initEncode( assertTrue(encoder.initEncode(MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, 640, 480,
MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, 640, 480, 300, 30, 300, 30, eglBase.getEglBaseContext()));
eglBase.getEglBaseContext()));
encoder.release(); encoder.release();
eglBase.release(); eglBase.release();
} }
@ -61,13 +59,11 @@ public final class MediaCodecVideoEncoderTest extends ActivityTestCase {
} }
MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder(); MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
assertTrue(encoder.initEncode( assertTrue(encoder.initEncode(
MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, 640, 480, 300, 30, MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, 640, 480, 300, 30, null));
null));
encoder.release(); encoder.release();
EglBase14 eglBase = new EglBase14(null, EglBase.CONFIG_PLAIN); EglBase14 eglBase = new EglBase14(null, EglBase.CONFIG_PLAIN);
assertTrue(encoder.initEncode( assertTrue(encoder.initEncode(MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, 640, 480,
MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, 640, 480, 300, 30, 300, 30, eglBase.getEglBaseContext()));
eglBase.getEglBaseContext()));
encoder.release(); encoder.release();
eglBase.release(); eglBase.release();
} }
@ -137,11 +133,10 @@ public final class MediaCodecVideoEncoderTest extends ActivityTestCase {
MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder(); MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
assertTrue(encoder.initEncode( assertTrue(encoder.initEncode(MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, width,
MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, width, height, 300, 30, height, 300, 30, eglOesBase.getEglBaseContext()));
eglOesBase.getEglBaseContext())); assertTrue(
assertTrue(encoder.encodeTexture(true, oesTextureId, RendererCommon.identityMatrix(), encoder.encodeTexture(true, oesTextureId, RendererCommon.identityMatrix(), presentationTs));
presentationTs));
GlUtil.checkNoGLES2Error("encodeTexture"); GlUtil.checkNoGLES2Error("encodeTexture");
// It should be Ok to delete the texture after calling encodeTexture. // It should be Ok to delete the texture after calling encodeTexture.

View File

@ -37,8 +37,7 @@ public class NetworkMonitorTest extends ActivityTestCase {
/** /**
* Listens for alerts fired by the NetworkMonitor when network status changes. * Listens for alerts fired by the NetworkMonitor when network status changes.
*/ */
private static class NetworkMonitorTestObserver private static class NetworkMonitorTestObserver implements NetworkMonitor.NetworkObserver {
implements NetworkMonitor.NetworkObserver {
private boolean receivedNotification = false; private boolean receivedNotification = false;
@Override @Override
@ -118,7 +117,6 @@ public class NetworkMonitorTest extends ActivityTestCase {
// A dummy NetworkMonitorAutoDetect.Observer. // A dummy NetworkMonitorAutoDetect.Observer.
private static class TestNetworkMonitorAutoDetectObserver private static class TestNetworkMonitorAutoDetectObserver
implements NetworkMonitorAutoDetect.Observer { implements NetworkMonitorAutoDetect.Observer {
@Override @Override
public void onConnectionTypeChanged(ConnectionType newConnectionType) {} public void onConnectionTypeChanged(ConnectionType newConnectionType) {}
@ -165,8 +163,7 @@ public class NetworkMonitorTest extends ActivityTestCase {
} }
private NetworkMonitorAutoDetect.ConnectionType getCurrentConnectionType() { private NetworkMonitorAutoDetect.ConnectionType getCurrentConnectionType() {
final NetworkMonitorAutoDetect.NetworkState networkState = final NetworkMonitorAutoDetect.NetworkState networkState = receiver.getCurrentNetworkState();
receiver.getCurrentNetworkState();
return receiver.getConnectionType(networkState); return receiver.getConnectionType(networkState);
} }

View File

@ -45,8 +45,7 @@ public class PeerConnectionTest extends ActivityTestCase {
private static class ObserverExpectations implements PeerConnection.Observer, private static class ObserverExpectations implements PeerConnection.Observer,
VideoRenderer.Callbacks, VideoRenderer.Callbacks,
DataChannel.Observer, DataChannel.Observer, StatsObserver {
StatsObserver {
private final String name; private final String name;
private int expectedIceCandidates = 0; private int expectedIceCandidates = 0;
private int expectedErrors = 0; private int expectedErrors = 0;
@ -54,32 +53,24 @@ public class PeerConnectionTest extends ActivityTestCase {
private int expectedWidth = 0; private int expectedWidth = 0;
private int expectedHeight = 0; private int expectedHeight = 0;
private int expectedFramesDelivered = 0; private int expectedFramesDelivered = 0;
private LinkedList<SignalingState> expectedSignalingChanges = private LinkedList<SignalingState> expectedSignalingChanges = new LinkedList<SignalingState>();
new LinkedList<SignalingState>();
private LinkedList<IceConnectionState> expectedIceConnectionChanges = private LinkedList<IceConnectionState> expectedIceConnectionChanges =
new LinkedList<IceConnectionState>(); new LinkedList<IceConnectionState>();
private LinkedList<IceGatheringState> expectedIceGatheringChanges = private LinkedList<IceGatheringState> expectedIceGatheringChanges =
new LinkedList<IceGatheringState>(); new LinkedList<IceGatheringState>();
private LinkedList<String> expectedAddStreamLabels = private LinkedList<String> expectedAddStreamLabels = new LinkedList<String>();
new LinkedList<String>(); private LinkedList<String> expectedRemoveStreamLabels = new LinkedList<String>();
private LinkedList<String> expectedRemoveStreamLabels = private final LinkedList<IceCandidate> gotIceCandidates = new LinkedList<IceCandidate>();
new LinkedList<String>();
private final LinkedList<IceCandidate> gotIceCandidates =
new LinkedList<IceCandidate>();
private Map<MediaStream, WeakReference<VideoRenderer>> renderers = private Map<MediaStream, WeakReference<VideoRenderer>> renderers =
new IdentityHashMap<MediaStream, WeakReference<VideoRenderer>>(); new IdentityHashMap<MediaStream, WeakReference<VideoRenderer>>();
private DataChannel dataChannel; private DataChannel dataChannel;
private LinkedList<DataChannel.Buffer> expectedBuffers = private LinkedList<DataChannel.Buffer> expectedBuffers = new LinkedList<DataChannel.Buffer>();
new LinkedList<DataChannel.Buffer>();
private LinkedList<DataChannel.State> expectedStateChanges = private LinkedList<DataChannel.State> expectedStateChanges =
new LinkedList<DataChannel.State>(); new LinkedList<DataChannel.State>();
private LinkedList<String> expectedRemoteDataChannelLabels = private LinkedList<String> expectedRemoteDataChannelLabels = new LinkedList<String>();
new LinkedList<String>();
private int expectedStatsCallbacks = 0; private int expectedStatsCallbacks = 0;
private LinkedList<StatsReport[]> gotStatsReports = private LinkedList<StatsReport[]> gotStatsReports = new LinkedList<StatsReport[]>();
new LinkedList<StatsReport[]>(); private final HashSet<MediaStream> gotRemoteStreams = new HashSet<MediaStream>();
private final HashSet<MediaStream> gotRemoteStreams =
new HashSet<MediaStream>();
public ObserverExpectations(String name) { public ObserverExpectations(String name) {
this.name = name; this.name = name;
@ -140,14 +131,12 @@ public class PeerConnectionTest extends ActivityTestCase {
assertEquals(expectedSignalingChanges.removeFirst(), newState); assertEquals(expectedSignalingChanges.removeFirst(), newState);
} }
public synchronized void expectIceConnectionChange( public synchronized void expectIceConnectionChange(IceConnectionState newState) {
IceConnectionState newState) {
expectedIceConnectionChanges.add(newState); expectedIceConnectionChanges.add(newState);
} }
@Override @Override
public synchronized void onIceConnectionChange( public synchronized void onIceConnectionChange(IceConnectionState newState) {
IceConnectionState newState) {
// TODO(bemasc): remove once delivery of ICECompleted is reliable // TODO(bemasc): remove once delivery of ICECompleted is reliable
// (https://code.google.com/p/webrtc/issues/detail?id=3021). // (https://code.google.com/p/webrtc/issues/detail?id=3021).
if (newState.equals(IceConnectionState.COMPLETED)) { if (newState.equals(IceConnectionState.COMPLETED)) {
@ -167,8 +156,7 @@ public class PeerConnectionTest extends ActivityTestCase {
System.out.println(name + "Got an ice connection receiving change " + receiving); System.out.println(name + "Got an ice connection receiving change " + receiving);
} }
public synchronized void expectIceGatheringChange( public synchronized void expectIceGatheringChange(IceGatheringState newState) {
IceGatheringState newState) {
expectedIceGatheringChanges.add(newState); expectedIceGatheringChanges.add(newState);
} }
@ -198,8 +186,7 @@ public class PeerConnectionTest extends ActivityTestCase {
assertEquals("audio", stream.audioTracks.get(0).kind()); assertEquals("audio", stream.audioTracks.get(0).kind());
VideoRenderer renderer = createVideoRenderer(this); VideoRenderer renderer = createVideoRenderer(this);
stream.videoTracks.get(0).addRenderer(renderer); stream.videoTracks.get(0).addRenderer(renderer);
assertNull(renderers.put( assertNull(renderers.put(stream, new WeakReference<VideoRenderer>(renderer)));
stream, new WeakReference<VideoRenderer>(renderer)));
gotRemoteStreams.add(stream); gotRemoteStreams.add(stream);
} }
@ -224,8 +211,7 @@ public class PeerConnectionTest extends ActivityTestCase {
@Override @Override
public synchronized void onDataChannel(DataChannel remoteDataChannel) { public synchronized void onDataChannel(DataChannel remoteDataChannel) {
assertEquals(expectedRemoteDataChannelLabels.removeFirst(), assertEquals(expectedRemoteDataChannelLabels.removeFirst(), remoteDataChannel.label());
remoteDataChannel.label());
setDataChannel(remoteDataChannel); setDataChannel(remoteDataChannel);
assertEquals(DataChannel.State.CONNECTING, dataChannel.state()); assertEquals(DataChannel.State.CONNECTING, dataChannel.state());
} }
@ -239,10 +225,8 @@ public class PeerConnectionTest extends ActivityTestCase {
assertTrue(--expectedRenegotiations >= 0); assertTrue(--expectedRenegotiations >= 0);
} }
public synchronized void expectMessage(ByteBuffer expectedBuffer, public synchronized void expectMessage(ByteBuffer expectedBuffer, boolean expectedBinary) {
boolean expectedBinary) { expectedBuffers.add(new DataChannel.Buffer(expectedBuffer, expectedBinary));
expectedBuffers.add(
new DataChannel.Buffer(expectedBuffer, expectedBinary));
} }
@Override @Override
@ -299,12 +283,12 @@ public class PeerConnectionTest extends ActivityTestCase {
"expectedSignalingChanges: " + expectedSignalingChanges.size()); "expectedSignalingChanges: " + expectedSignalingChanges.size());
} }
if (expectedIceConnectionChanges.size() != 0) { if (expectedIceConnectionChanges.size() != 0) {
stillWaitingForExpectations.add("expectedIceConnectionChanges: " + stillWaitingForExpectations.add(
expectedIceConnectionChanges.size()); "expectedIceConnectionChanges: " + expectedIceConnectionChanges.size());
} }
if (expectedIceGatheringChanges.size() != 0) { if (expectedIceGatheringChanges.size() != 0) {
stillWaitingForExpectations.add("expectedIceGatheringChanges: " + stillWaitingForExpectations.add(
expectedIceGatheringChanges.size()); "expectedIceGatheringChanges: " + expectedIceGatheringChanges.size());
} }
if (expectedAddStreamLabels.size() != 0) { if (expectedAddStreamLabels.size() != 0) {
stillWaitingForExpectations.add( stillWaitingForExpectations.add(
@ -315,24 +299,20 @@ public class PeerConnectionTest extends ActivityTestCase {
"expectedRemoveStreamLabels: " + expectedRemoveStreamLabels.size()); "expectedRemoveStreamLabels: " + expectedRemoveStreamLabels.size());
} }
if (expectedFramesDelivered > 0) { if (expectedFramesDelivered > 0) {
stillWaitingForExpectations.add( stillWaitingForExpectations.add("expectedFramesDelivered: " + expectedFramesDelivered);
"expectedFramesDelivered: " + expectedFramesDelivered);
} }
if (!expectedBuffers.isEmpty()) { if (!expectedBuffers.isEmpty()) {
stillWaitingForExpectations.add( stillWaitingForExpectations.add("expectedBuffers: " + expectedBuffers.size());
"expectedBuffers: " + expectedBuffers.size());
} }
if (!expectedStateChanges.isEmpty()) { if (!expectedStateChanges.isEmpty()) {
stillWaitingForExpectations.add( stillWaitingForExpectations.add("expectedStateChanges: " + expectedStateChanges.size());
"expectedStateChanges: " + expectedStateChanges.size());
} }
if (!expectedRemoteDataChannelLabels.isEmpty()) { if (!expectedRemoteDataChannelLabels.isEmpty()) {
stillWaitingForExpectations.add("expectedRemoteDataChannelLabels: " + stillWaitingForExpectations.add(
expectedRemoteDataChannelLabels.size()); "expectedRemoteDataChannelLabels: " + expectedRemoteDataChannelLabels.size());
} }
if (expectedStatsCallbacks != 0) { if (expectedStatsCallbacks != 0) {
stillWaitingForExpectations.add( stillWaitingForExpectations.add("expectedStatsCallbacks: " + expectedStatsCallbacks);
"expectedStatsCallbacks: " + expectedStatsCallbacks);
} }
return stillWaitingForExpectations; return stillWaitingForExpectations;
} }
@ -352,11 +332,8 @@ public class PeerConnectionTest extends ActivityTestCase {
TreeSet<String> stillWaitingForExpectations = unsatisfiedExpectations(); TreeSet<String> stillWaitingForExpectations = unsatisfiedExpectations();
while (!stillWaitingForExpectations.isEmpty()) { while (!stillWaitingForExpectations.isEmpty()) {
if (!stillWaitingForExpectations.equals(prev)) { if (!stillWaitingForExpectations.equals(prev)) {
System.out.println( System.out.println(name + " still waiting at\n " + (new Throwable()).getStackTrace()[1]
name + " still waiting at\n " + + "\n for: " + Arrays.toString(stillWaitingForExpectations.toArray()));
(new Throwable()).getStackTrace()[1] +
"\n for: " +
Arrays.toString(stillWaitingForExpectations.toArray()));
} }
if (endTime < System.currentTimeMillis()) { if (endTime < System.currentTimeMillis()) {
System.out.println(name + " timed out waiting for: " System.out.println(name + " timed out waiting for: "
@ -372,8 +349,8 @@ public class PeerConnectionTest extends ActivityTestCase {
stillWaitingForExpectations = unsatisfiedExpectations(); stillWaitingForExpectations = unsatisfiedExpectations();
} }
if (prev == null) { if (prev == null) {
System.out.println(name + " didn't need to wait at\n " + System.out.println(
(new Throwable()).getStackTrace()[1]); name + " didn't need to wait at\n " + (new Throwable()).getStackTrace()[1]);
} }
return true; return true;
} }
@ -464,21 +441,17 @@ public class PeerConnectionTest extends ActivityTestCase {
static int videoWindowsMapped = -1; static int videoWindowsMapped = -1;
private static VideoRenderer createVideoRenderer( private static VideoRenderer createVideoRenderer(VideoRenderer.Callbacks videoCallbacks) {
VideoRenderer.Callbacks videoCallbacks) {
return new VideoRenderer(videoCallbacks); return new VideoRenderer(videoCallbacks);
} }
// Return a weak reference to test that ownership is correctly held by // Return a weak reference to test that ownership is correctly held by
// PeerConnection, not by test code. // PeerConnection, not by test code.
private static WeakReference<MediaStream> addTracksToPC( private static WeakReference<MediaStream> addTracksToPC(PeerConnectionFactory factory,
PeerConnectionFactory factory, PeerConnection pc, PeerConnection pc, VideoSource videoSource, String streamLabel, String videoTrackId,
VideoSource videoSource, String audioTrackId, VideoRenderer.Callbacks videoCallbacks) {
String streamLabel, String videoTrackId, String audioTrackId,
VideoRenderer.Callbacks videoCallbacks) {
MediaStream lMS = factory.createLocalMediaStream(streamLabel); MediaStream lMS = factory.createLocalMediaStream(streamLabel);
VideoTrack videoTrack = VideoTrack videoTrack = factory.createVideoTrack(videoTrackId, videoSource);
factory.createVideoTrack(videoTrackId, videoSource);
assertNotNull(videoTrack); assertNotNull(videoTrack);
VideoRenderer videoRenderer = createVideoRenderer(videoCallbacks); VideoRenderer videoRenderer = createVideoRenderer(videoCallbacks);
assertNotNull(videoRenderer); assertNotNull(videoRenderer);
@ -487,8 +460,8 @@ public class PeerConnectionTest extends ActivityTestCase {
// Just for fun, let's remove and re-add the track. // Just for fun, let's remove and re-add the track.
lMS.removeTrack(videoTrack); lMS.removeTrack(videoTrack);
lMS.addTrack(videoTrack); lMS.addTrack(videoTrack);
lMS.addTrack(factory.createAudioTrack( lMS.addTrack(
audioTrackId, factory.createAudioSource(new MediaConstraints()))); factory.createAudioTrack(audioTrackId, factory.createAudioSource(new MediaConstraints())));
pc.addStream(lMS); pc.addStream(lMS);
return new WeakReference<MediaStream>(lMS); return new WeakReference<MediaStream>(lMS);
} }
@ -544,25 +517,20 @@ public class PeerConnectionTest extends ActivityTestCase {
// Logging.Severity.LS_SENSITIVE); // Logging.Severity.LS_SENSITIVE);
MediaConstraints pcConstraints = new MediaConstraints(); MediaConstraints pcConstraints = new MediaConstraints();
pcConstraints.mandatory.add( pcConstraints.mandatory.add(new MediaConstraints.KeyValuePair("DtlsSrtpKeyAgreement", "true"));
new MediaConstraints.KeyValuePair("DtlsSrtpKeyAgreement", "true"));
LinkedList<PeerConnection.IceServer> iceServers = LinkedList<PeerConnection.IceServer> iceServers = new LinkedList<PeerConnection.IceServer>();
new LinkedList<PeerConnection.IceServer>(); iceServers.add(new PeerConnection.IceServer("stun:stun.l.google.com:19302"));
iceServers.add(new PeerConnection.IceServer( iceServers.add(
"stun:stun.l.google.com:19302")); new PeerConnection.IceServer("turn:fake.example.com", "fakeUsername", "fakePassword"));
iceServers.add(new PeerConnection.IceServer( ObserverExpectations offeringExpectations = new ObserverExpectations("PCTest:offerer");
"turn:fake.example.com", "fakeUsername", "fakePassword")); PeerConnection offeringPC =
ObserverExpectations offeringExpectations = factory.createPeerConnection(iceServers, pcConstraints, offeringExpectations);
new ObserverExpectations("PCTest:offerer");
PeerConnection offeringPC = factory.createPeerConnection(
iceServers, pcConstraints, offeringExpectations);
assertNotNull(offeringPC); assertNotNull(offeringPC);
ObserverExpectations answeringExpectations = ObserverExpectations answeringExpectations = new ObserverExpectations("PCTest:answerer");
new ObserverExpectations("PCTest:answerer"); PeerConnection answeringPC =
PeerConnection answeringPC = factory.createPeerConnection( factory.createPeerConnection(iceServers, pcConstraints, answeringExpectations);
iceServers, pcConstraints, answeringExpectations);
assertNotNull(answeringPC); assertNotNull(answeringPC);
// We want to use the same camera for offerer & answerer, so create it here // We want to use the same camera for offerer & answerer, so create it here
@ -574,14 +542,12 @@ public class PeerConnectionTest extends ActivityTestCase {
videoCapturer.startCapture(640, 480, 30); videoCapturer.startCapture(640, 480, 30);
offeringExpectations.expectRenegotiationNeeded(); offeringExpectations.expectRenegotiationNeeded();
WeakReference<MediaStream> oLMS = addTracksToPC( WeakReference<MediaStream> oLMS =
factory, offeringPC, videoSource, "offeredMediaStream", addTracksToPC(factory, offeringPC, videoSource, "offeredMediaStream", "offeredVideoTrack",
"offeredVideoTrack", "offeredAudioTrack", "offeredAudioTrack", new ExpectedResolutionSetter(answeringExpectations));
new ExpectedResolutionSetter(answeringExpectations));
offeringExpectations.expectRenegotiationNeeded(); offeringExpectations.expectRenegotiationNeeded();
DataChannel offeringDC = offeringPC.createDataChannel( DataChannel offeringDC = offeringPC.createDataChannel("offeringDC", new DataChannel.Init());
"offeringDC", new DataChannel.Init());
assertEquals("offeringDC", offeringDC.label()); assertEquals("offeringDC", offeringDC.label());
offeringExpectations.setDataChannel(offeringDC); offeringExpectations.setDataChannel(offeringDC);
@ -593,22 +559,19 @@ public class PeerConnectionTest extends ActivityTestCase {
assertFalse(offerSdp.description.isEmpty()); assertFalse(offerSdp.description.isEmpty());
sdpLatch = new SdpObserverLatch(); sdpLatch = new SdpObserverLatch();
answeringExpectations.expectSignalingChange( answeringExpectations.expectSignalingChange(SignalingState.HAVE_REMOTE_OFFER);
SignalingState.HAVE_REMOTE_OFFER);
answeringExpectations.expectAddStream("offeredMediaStream"); answeringExpectations.expectAddStream("offeredMediaStream");
// SCTP DataChannels are announced via OPEN messages over the established // SCTP DataChannels are announced via OPEN messages over the established
// connection (not via SDP), so answeringExpectations can only register // connection (not via SDP), so answeringExpectations can only register
// expecting the channel during ICE, below. // expecting the channel during ICE, below.
answeringPC.setRemoteDescription(sdpLatch, offerSdp); answeringPC.setRemoteDescription(sdpLatch, offerSdp);
assertEquals( assertEquals(PeerConnection.SignalingState.STABLE, offeringPC.signalingState());
PeerConnection.SignalingState.STABLE, offeringPC.signalingState());
assertTrue(sdpLatch.await()); assertTrue(sdpLatch.await());
assertNull(sdpLatch.getSdp()); assertNull(sdpLatch.getSdp());
answeringExpectations.expectRenegotiationNeeded(); answeringExpectations.expectRenegotiationNeeded();
WeakReference<MediaStream> aLMS = addTracksToPC( WeakReference<MediaStream> aLMS = addTracksToPC(factory, answeringPC, videoSource,
factory, answeringPC, videoSource, "answeredMediaStream", "answeredMediaStream", "answeredVideoTrack", "answeredAudioTrack",
"answeredVideoTrack", "answeredAudioTrack",
new ExpectedResolutionSetter(offeringExpectations)); new ExpectedResolutionSetter(offeringExpectations));
sdpLatch = new SdpObserverLatch(); sdpLatch = new SdpObserverLatch();
@ -639,19 +602,15 @@ public class PeerConnectionTest extends ActivityTestCase {
offeringExpectations.expectSignalingChange(SignalingState.STABLE); offeringExpectations.expectSignalingChange(SignalingState.STABLE);
offeringExpectations.expectAddStream("answeredMediaStream"); offeringExpectations.expectAddStream("answeredMediaStream");
offeringExpectations.expectIceConnectionChange( offeringExpectations.expectIceConnectionChange(IceConnectionState.CHECKING);
IceConnectionState.CHECKING); offeringExpectations.expectIceConnectionChange(IceConnectionState.CONNECTED);
offeringExpectations.expectIceConnectionChange(
IceConnectionState.CONNECTED);
// TODO(bemasc): uncomment once delivery of ICECompleted is reliable // TODO(bemasc): uncomment once delivery of ICECompleted is reliable
// (https://code.google.com/p/webrtc/issues/detail?id=3021). // (https://code.google.com/p/webrtc/issues/detail?id=3021).
// //
// offeringExpectations.expectIceConnectionChange( // offeringExpectations.expectIceConnectionChange(
// IceConnectionState.COMPLETED); // IceConnectionState.COMPLETED);
answeringExpectations.expectIceConnectionChange( answeringExpectations.expectIceConnectionChange(IceConnectionState.CHECKING);
IceConnectionState.CHECKING); answeringExpectations.expectIceConnectionChange(IceConnectionState.CONNECTED);
answeringExpectations.expectIceConnectionChange(
IceConnectionState.CONNECTED);
offeringPC.setRemoteDescription(sdpLatch, answerSdp); offeringPC.setRemoteDescription(sdpLatch, answerSdp);
assertTrue(sdpLatch.await()); assertTrue(sdpLatch.await());
@ -667,7 +626,6 @@ public class PeerConnectionTest extends ActivityTestCase {
assertEquals(answeringPC.getSenders().size(), 2); assertEquals(answeringPC.getSenders().size(), 2);
assertEquals(answeringPC.getReceivers().size(), 2); assertEquals(answeringPC.getReceivers().size(), 2);
// Wait for at least some frames to be delivered at each end (number // Wait for at least some frames to be delivered at each end (number
// chosen arbitrarily). // chosen arbitrarily).
offeringExpectations.expectFramesDelivered(10); offeringExpectations.expectFramesDelivered(10);
@ -693,10 +651,8 @@ public class PeerConnectionTest extends ActivityTestCase {
assertTrue(offeringExpectations.waitForAllExpectationsToBeSatisfied(TIMEOUT_SECONDS)); assertTrue(offeringExpectations.waitForAllExpectationsToBeSatisfied(TIMEOUT_SECONDS));
assertTrue(answeringExpectations.waitForAllExpectationsToBeSatisfied(TIMEOUT_SECONDS)); assertTrue(answeringExpectations.waitForAllExpectationsToBeSatisfied(TIMEOUT_SECONDS));
assertEquals( assertEquals(PeerConnection.SignalingState.STABLE, offeringPC.signalingState());
PeerConnection.SignalingState.STABLE, offeringPC.signalingState()); assertEquals(PeerConnection.SignalingState.STABLE, answeringPC.signalingState());
assertEquals(
PeerConnection.SignalingState.STABLE, answeringPC.signalingState());
// Set a bitrate limit for the outgoing video stream for the offerer. // Set a bitrate limit for the outgoing video stream for the offerer.
RtpSender videoSender = null; RtpSender videoSender = null;
@ -721,8 +677,8 @@ public class PeerConnectionTest extends ActivityTestCase {
// Test send & receive UTF-8 text. // Test send & receive UTF-8 text.
answeringExpectations.expectMessage( answeringExpectations.expectMessage(
ByteBuffer.wrap("hello!".getBytes(Charset.forName("UTF-8"))), false); ByteBuffer.wrap("hello!".getBytes(Charset.forName("UTF-8"))), false);
DataChannel.Buffer buffer = new DataChannel.Buffer( DataChannel.Buffer buffer =
ByteBuffer.wrap("hello!".getBytes(Charset.forName("UTF-8"))), false); new DataChannel.Buffer(ByteBuffer.wrap("hello!".getBytes(Charset.forName("UTF-8"))), false);
assertTrue(offeringExpectations.dataChannel.send(buffer)); assertTrue(offeringExpectations.dataChannel.send(buffer));
assertTrue(answeringExpectations.waitForAllExpectationsToBeSatisfied(TIMEOUT_SECONDS)); assertTrue(answeringExpectations.waitForAllExpectationsToBeSatisfied(TIMEOUT_SECONDS));
@ -735,8 +691,7 @@ public class PeerConnectionTest extends ActivityTestCase {
expectedBinaryMessage.flip(); expectedBinaryMessage.flip();
offeringExpectations.expectMessage(expectedBinaryMessage, true); offeringExpectations.expectMessage(expectedBinaryMessage, true);
assertTrue(answeringExpectations.dataChannel.send( assertTrue(answeringExpectations.dataChannel.send(
new DataChannel.Buffer( new DataChannel.Buffer(ByteBuffer.wrap(new byte[] {1, 2, 3, 4, 5}), true)));
ByteBuffer.wrap(new byte[] { 1, 2, 3, 4, 5 }), true)));
assertTrue(offeringExpectations.waitForAllExpectationsToBeSatisfied(TIMEOUT_SECONDS)); assertTrue(offeringExpectations.waitForAllExpectationsToBeSatisfied(TIMEOUT_SECONDS));
offeringExpectations.expectStateChange(DataChannel.State.CLOSING); offeringExpectations.expectStateChange(DataChannel.State.CLOSING);
@ -768,24 +723,19 @@ public class PeerConnectionTest extends ActivityTestCase {
PeerConnectionFactory factory = new PeerConnectionFactory(options); PeerConnectionFactory factory = new PeerConnectionFactory(options);
MediaConstraints pcConstraints = new MediaConstraints(); MediaConstraints pcConstraints = new MediaConstraints();
pcConstraints.mandatory.add( pcConstraints.mandatory.add(new MediaConstraints.KeyValuePair("DtlsSrtpKeyAgreement", "true"));
new MediaConstraints.KeyValuePair("DtlsSrtpKeyAgreement", "true"));
LinkedList<PeerConnection.IceServer> iceServers = LinkedList<PeerConnection.IceServer> iceServers = new LinkedList<PeerConnection.IceServer>();
new LinkedList<PeerConnection.IceServer>(); iceServers.add(new PeerConnection.IceServer("stun:stun.l.google.com:19302"));
iceServers.add(new PeerConnection.IceServer(
"stun:stun.l.google.com:19302"));
ObserverExpectations offeringExpectations = ObserverExpectations offeringExpectations = new ObserverExpectations("PCTest:offerer");
new ObserverExpectations("PCTest:offerer"); PeerConnection offeringPC =
PeerConnection offeringPC = factory.createPeerConnection( factory.createPeerConnection(iceServers, pcConstraints, offeringExpectations);
iceServers, pcConstraints, offeringExpectations);
assertNotNull(offeringPC); assertNotNull(offeringPC);
ObserverExpectations answeringExpectations = ObserverExpectations answeringExpectations = new ObserverExpectations("PCTest:answerer");
new ObserverExpectations("PCTest:answerer"); PeerConnection answeringPC =
PeerConnection answeringPC = factory.createPeerConnection( factory.createPeerConnection(iceServers, pcConstraints, answeringExpectations);
iceServers, pcConstraints, answeringExpectations);
assertNotNull(answeringPC); assertNotNull(answeringPC);
// We want to use the same camera for offerer & answerer, so create it here // We want to use the same camera for offerer & answerer, so create it here
@ -798,10 +748,9 @@ public class PeerConnectionTest extends ActivityTestCase {
// Add offerer media stream. // Add offerer media stream.
offeringExpectations.expectRenegotiationNeeded(); offeringExpectations.expectRenegotiationNeeded();
WeakReference<MediaStream> oLMS = addTracksToPC( WeakReference<MediaStream> oLMS =
factory, offeringPC, videoSource, "offeredMediaStream", addTracksToPC(factory, offeringPC, videoSource, "offeredMediaStream", "offeredVideoTrack",
"offeredVideoTrack", "offeredAudioTrack", "offeredAudioTrack", new ExpectedResolutionSetter(answeringExpectations));
new ExpectedResolutionSetter(answeringExpectations));
// Create offer. // Create offer.
SdpObserverLatch sdpLatch = new SdpObserverLatch(); SdpObserverLatch sdpLatch = new SdpObserverLatch();
@ -822,8 +771,7 @@ public class PeerConnectionTest extends ActivityTestCase {
// Set remote description for answerer. // Set remote description for answerer.
sdpLatch = new SdpObserverLatch(); sdpLatch = new SdpObserverLatch();
answeringExpectations.expectSignalingChange( answeringExpectations.expectSignalingChange(SignalingState.HAVE_REMOTE_OFFER);
SignalingState.HAVE_REMOTE_OFFER);
answeringExpectations.expectAddStream("offeredMediaStream"); answeringExpectations.expectAddStream("offeredMediaStream");
answeringPC.setRemoteDescription(sdpLatch, offerSdp); answeringPC.setRemoteDescription(sdpLatch, offerSdp);
assertTrue(sdpLatch.await()); assertTrue(sdpLatch.await());
@ -831,9 +779,8 @@ public class PeerConnectionTest extends ActivityTestCase {
// Add answerer media stream. // Add answerer media stream.
answeringExpectations.expectRenegotiationNeeded(); answeringExpectations.expectRenegotiationNeeded();
WeakReference<MediaStream> aLMS = addTracksToPC( WeakReference<MediaStream> aLMS = addTracksToPC(factory, answeringPC, videoSource,
factory, answeringPC, videoSource, "answeredMediaStream", "answeredMediaStream", "answeredVideoTrack", "answeredAudioTrack",
"answeredVideoTrack", "answeredAudioTrack",
new ExpectedResolutionSetter(offeringExpectations)); new ExpectedResolutionSetter(offeringExpectations));
// Create answer. // Create answer.
@ -858,19 +805,15 @@ public class PeerConnectionTest extends ActivityTestCase {
offeringExpectations.expectSignalingChange(SignalingState.STABLE); offeringExpectations.expectSignalingChange(SignalingState.STABLE);
offeringExpectations.expectAddStream("answeredMediaStream"); offeringExpectations.expectAddStream("answeredMediaStream");
offeringExpectations.expectIceConnectionChange( offeringExpectations.expectIceConnectionChange(IceConnectionState.CHECKING);
IceConnectionState.CHECKING); offeringExpectations.expectIceConnectionChange(IceConnectionState.CONNECTED);
offeringExpectations.expectIceConnectionChange(
IceConnectionState.CONNECTED);
// TODO(bemasc): uncomment once delivery of ICECompleted is reliable // TODO(bemasc): uncomment once delivery of ICECompleted is reliable
// (https://code.google.com/p/webrtc/issues/detail?id=3021). // (https://code.google.com/p/webrtc/issues/detail?id=3021).
// //
// offeringExpectations.expectIceConnectionChange( // offeringExpectations.expectIceConnectionChange(
// IceConnectionState.COMPLETED); // IceConnectionState.COMPLETED);
answeringExpectations.expectIceConnectionChange( answeringExpectations.expectIceConnectionChange(IceConnectionState.CHECKING);
IceConnectionState.CHECKING); answeringExpectations.expectIceConnectionChange(IceConnectionState.CONNECTED);
answeringExpectations.expectIceConnectionChange(
IceConnectionState.CONNECTED);
offeringPC.setRemoteDescription(sdpLatch, answerSdp); offeringPC.setRemoteDescription(sdpLatch, answerSdp);
assertTrue(sdpLatch.await()); assertTrue(sdpLatch.await());
@ -897,10 +840,8 @@ public class PeerConnectionTest extends ActivityTestCase {
assertTrue(offeringExpectations.waitForAllExpectationsToBeSatisfied(TIMEOUT_SECONDS)); assertTrue(offeringExpectations.waitForAllExpectationsToBeSatisfied(TIMEOUT_SECONDS));
assertTrue(answeringExpectations.waitForAllExpectationsToBeSatisfied(TIMEOUT_SECONDS)); assertTrue(answeringExpectations.waitForAllExpectationsToBeSatisfied(TIMEOUT_SECONDS));
assertEquals( assertEquals(PeerConnection.SignalingState.STABLE, offeringPC.signalingState());
PeerConnection.SignalingState.STABLE, offeringPC.signalingState()); assertEquals(PeerConnection.SignalingState.STABLE, answeringPC.signalingState());
assertEquals(
PeerConnection.SignalingState.STABLE, answeringPC.signalingState());
// Now do another negotiation, removing the video track from one peer. // Now do another negotiation, removing the video track from one peer.
// This previously caused a crash on pc.dispose(). // This previously caused a crash on pc.dispose().
@ -927,8 +868,7 @@ public class PeerConnectionTest extends ActivityTestCase {
// Set remote description for answerer. // Set remote description for answerer.
sdpLatch = new SdpObserverLatch(); sdpLatch = new SdpObserverLatch();
answeringExpectations.expectSignalingChange( answeringExpectations.expectSignalingChange(SignalingState.HAVE_REMOTE_OFFER);
SignalingState.HAVE_REMOTE_OFFER);
answeringPC.setRemoteDescription(sdpLatch, offerSdp); answeringPC.setRemoteDescription(sdpLatch, offerSdp);
assertTrue(sdpLatch.await()); assertTrue(sdpLatch.await());
assertNull(sdpLatch.getSdp()); assertNull(sdpLatch.getSdp());
@ -1041,8 +981,7 @@ public class PeerConnectionTest extends ActivityTestCase {
assertTrue(info.samples.size() > 0); assertTrue(info.samples.size() > 0);
} }
private static void shutdownPC( private static void shutdownPC(PeerConnection pc, ObserverExpectations expectations) {
PeerConnection pc, ObserverExpectations expectations) {
if (expectations.dataChannel != null) { if (expectations.dataChannel != null) {
expectations.dataChannel.unregisterObserver(); expectations.dataChannel.unregisterObserver();
expectations.dataChannel.dispose(); expectations.dataChannel.dispose();

View File

@ -95,11 +95,13 @@ public final class RendererCommonTest extends ActivityTestCase {
// Assert: // Assert:
// u' = u. // u' = u.
// v' = v. // v' = v.
// clang-format off
MoreAsserts.assertEquals(new double[] { MoreAsserts.assertEquals(new double[] {
1, 0, 0, 0, 1, 0, 0, 0,
0, 1, 0, 0, 0, 1, 0, 0,
0, 0, 1, 0, 0, 0, 1, 0,
0, 0, 0, 1}, round(layoutMatrix)); 0, 0, 0, 1}, round(layoutMatrix));
// clang-format on
} }
@SmallTest @SmallTest
@ -108,11 +110,13 @@ public final class RendererCommonTest extends ActivityTestCase {
// Assert: // Assert:
// u' = 1 - u. // u' = 1 - u.
// v' = v. // v' = v.
// clang-format off
MoreAsserts.assertEquals(new double[] { MoreAsserts.assertEquals(new double[] {
-1, 0, 0, 0, -1, 0, 0, 0,
0, 1, 0, 0, 0, 1, 0, 0,
0, 0, 1, 0, 0, 0, 1, 0,
1, 0, 0, 1}, round(layoutMatrix)); 1, 0, 0, 1}, round(layoutMatrix));
// clang-format on
} }
@SmallTest @SmallTest
@ -123,22 +127,26 @@ public final class RendererCommonTest extends ActivityTestCase {
// Assert: // Assert:
// u' = 0.25 + 0.5 u. // u' = 0.25 + 0.5 u.
// v' = v. // v' = v.
// clang-format off
MoreAsserts.assertEquals(new double[] { MoreAsserts.assertEquals(new double[] {
0.5, 0, 0, 0, 0.5, 0, 0, 0,
0, 1, 0, 0, 0, 1, 0, 0,
0, 0, 1, 0, 0, 0, 1, 0,
0.25, 0, 0, 1}, round(layoutMatrix)); 0.25, 0, 0, 1}, round(layoutMatrix));
// clang-format on
} }
@SmallTest @SmallTest
public static void testRotateTextureMatrixDefault() { public static void testRotateTextureMatrixDefault() {
// Test that rotation with 0 degrees returns an identical matrix. // Test that rotation with 0 degrees returns an identical matrix.
// clang-format off
final float[] matrix = new float[] { final float[] matrix = new float[] {
1, 2, 3, 4, 1, 2, 3, 4,
5, 6, 7, 8, 5, 6, 7, 8,
9, 0, 1, 2, 9, 0, 1, 2,
3, 4, 5, 6 3, 4, 5, 6
}; };
// clang-format on
final float rotatedMatrix[] = rotateTextureMatrix(matrix, 0); final float rotatedMatrix[] = rotateTextureMatrix(matrix, 0);
MoreAsserts.assertEquals(round(matrix), round(rotatedMatrix)); MoreAsserts.assertEquals(round(matrix), round(rotatedMatrix));
} }
@ -149,11 +157,13 @@ public final class RendererCommonTest extends ActivityTestCase {
// Assert: // Assert:
// u' = 1 - v. // u' = 1 - v.
// v' = u. // v' = u.
// clang-format off
MoreAsserts.assertEquals(new double[] { MoreAsserts.assertEquals(new double[] {
0, 1, 0, 0, 0, 1, 0, 0,
-1, 0, 0, 0, -1, 0, 0, 0,
0, 0, 1, 0, 0, 0, 1, 0,
1, 0, 0, 1}, round(samplingMatrix)); 1, 0, 0, 1}, round(samplingMatrix));
// clang-format on
} }
@SmallTest @SmallTest
@ -162,10 +172,12 @@ public final class RendererCommonTest extends ActivityTestCase {
// Assert: // Assert:
// u' = 1 - u. // u' = 1 - u.
// v' = 1 - v. // v' = 1 - v.
// clang-format off
MoreAsserts.assertEquals(new double[] { MoreAsserts.assertEquals(new double[] {
-1, 0, 0, 0, -1, 0, 0, 0,
0, -1, 0, 0, 0, -1, 0, 0,
0, 0, 1, 0, 0, 0, 1, 0,
1, 1, 0, 1}, round(samplingMatrix)); 1, 1, 0, 1}, round(samplingMatrix));
// clang-format on
} }
} }

View File

@ -110,8 +110,7 @@ public final class SurfaceTextureHelperTest extends ActivityTestCase {
// Create resources for stubbing an OES texture producer. |eglOesBase| has the SurfaceTexture in // Create resources for stubbing an OES texture producer. |eglOesBase| has the SurfaceTexture in
// |surfaceTextureHelper| as the target EGLSurface. // |surfaceTextureHelper| as the target EGLSurface.
final EglBase eglOesBase = final EglBase eglOesBase = EglBase.create(eglBase.getEglBaseContext(), EglBase.CONFIG_PLAIN);
EglBase.create(eglBase.getEglBaseContext(), EglBase.CONFIG_PLAIN);
eglOesBase.createSurface(surfaceTextureHelper.getSurfaceTexture()); eglOesBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
assertEquals(eglOesBase.surfaceWidth(), width); assertEquals(eglOesBase.surfaceWidth(), width);
assertEquals(eglOesBase.surfaceHeight(), height); assertEquals(eglOesBase.surfaceHeight(), height);
@ -131,8 +130,8 @@ public final class SurfaceTextureHelperTest extends ActivityTestCase {
// Wait for an OES texture to arrive and draw it onto the pixel buffer. // Wait for an OES texture to arrive and draw it onto the pixel buffer.
listener.waitForNewFrame(); listener.waitForNewFrame();
eglBase.makeCurrent(); eglBase.makeCurrent();
drawer.drawOes(listener.oesTextureId, listener.transformMatrix, width, height, drawer.drawOes(
0, 0, width, height); listener.oesTextureId, listener.transformMatrix, width, height, 0, 0, width, height);
surfaceTextureHelper.returnTextureFrame(); surfaceTextureHelper.returnTextureFrame();
@ -178,8 +177,7 @@ public final class SurfaceTextureHelperTest extends ActivityTestCase {
// Create resources for stubbing an OES texture producer. |eglOesBase| has the SurfaceTexture in // Create resources for stubbing an OES texture producer. |eglOesBase| has the SurfaceTexture in
// |surfaceTextureHelper| as the target EGLSurface. // |surfaceTextureHelper| as the target EGLSurface.
final EglBase eglOesBase = final EglBase eglOesBase = EglBase.create(eglBase.getEglBaseContext(), EglBase.CONFIG_PLAIN);
EglBase.create(eglBase.getEglBaseContext(), EglBase.CONFIG_PLAIN);
eglOesBase.createSurface(surfaceTextureHelper.getSurfaceTexture()); eglOesBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
assertEquals(eglOesBase.surfaceWidth(), width); assertEquals(eglOesBase.surfaceWidth(), width);
assertEquals(eglOesBase.surfaceHeight(), height); assertEquals(eglOesBase.surfaceHeight(), height);
@ -203,8 +201,8 @@ public final class SurfaceTextureHelperTest extends ActivityTestCase {
// Draw the pending texture frame onto the pixel buffer. // Draw the pending texture frame onto the pixel buffer.
eglBase.makeCurrent(); eglBase.makeCurrent();
final GlRectDrawer drawer = new GlRectDrawer(); final GlRectDrawer drawer = new GlRectDrawer();
drawer.drawOes(listener.oesTextureId, listener.transformMatrix, width, height, drawer.drawOes(
0, 0, width, height); listener.oesTextureId, listener.transformMatrix, width, height, 0, 0, width, height);
drawer.release(); drawer.release();
// Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9. // Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9.
@ -231,8 +229,8 @@ public final class SurfaceTextureHelperTest extends ActivityTestCase {
@MediumTest @MediumTest
public static void testDispose() throws InterruptedException { public static void testDispose() throws InterruptedException {
// Create SurfaceTextureHelper and listener. // Create SurfaceTextureHelper and listener.
final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create( final SurfaceTextureHelper surfaceTextureHelper =
"SurfaceTextureHelper test" /* threadName */, null); SurfaceTextureHelper.create("SurfaceTextureHelper test" /* threadName */, null);
final MockTextureListener listener = new MockTextureListener(); final MockTextureListener listener = new MockTextureListener();
surfaceTextureHelper.startListening(listener); surfaceTextureHelper.startListening(listener);
// Create EglBase with the SurfaceTexture as target EGLSurface. // Create EglBase with the SurfaceTexture as target EGLSurface.
@ -267,8 +265,8 @@ public final class SurfaceTextureHelperTest extends ActivityTestCase {
*/ */
@SmallTest @SmallTest
public static void testDisposeImmediately() { public static void testDisposeImmediately() {
final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create( final SurfaceTextureHelper surfaceTextureHelper =
"SurfaceTextureHelper test" /* threadName */, null); SurfaceTextureHelper.create("SurfaceTextureHelper test" /* threadName */, null);
surfaceTextureHelper.dispose(); surfaceTextureHelper.dispose();
} }
@ -279,8 +277,8 @@ public final class SurfaceTextureHelperTest extends ActivityTestCase {
@MediumTest @MediumTest
public static void testStopListening() throws InterruptedException { public static void testStopListening() throws InterruptedException {
// Create SurfaceTextureHelper and listener. // Create SurfaceTextureHelper and listener.
final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create( final SurfaceTextureHelper surfaceTextureHelper =
"SurfaceTextureHelper test" /* threadName */, null); SurfaceTextureHelper.create("SurfaceTextureHelper test" /* threadName */, null);
final MockTextureListener listener = new MockTextureListener(); final MockTextureListener listener = new MockTextureListener();
surfaceTextureHelper.startListening(listener); surfaceTextureHelper.startListening(listener);
// Create EglBase with the SurfaceTexture as target EGLSurface. // Create EglBase with the SurfaceTexture as target EGLSurface.
@ -315,8 +313,8 @@ public final class SurfaceTextureHelperTest extends ActivityTestCase {
*/ */
@SmallTest @SmallTest
public static void testStopListeningImmediately() throws InterruptedException { public static void testStopListeningImmediately() throws InterruptedException {
final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create( final SurfaceTextureHelper surfaceTextureHelper =
"SurfaceTextureHelper test" /* threadName */, null); SurfaceTextureHelper.create("SurfaceTextureHelper test" /* threadName */, null);
final MockTextureListener listener = new MockTextureListener(); final MockTextureListener listener = new MockTextureListener();
surfaceTextureHelper.startListening(listener); surfaceTextureHelper.startListening(listener);
surfaceTextureHelper.stopListening(); surfaceTextureHelper.stopListening();
@ -329,8 +327,8 @@ public final class SurfaceTextureHelperTest extends ActivityTestCase {
*/ */
@SmallTest @SmallTest
public static void testStopListeningImmediatelyOnHandlerThread() throws InterruptedException { public static void testStopListeningImmediatelyOnHandlerThread() throws InterruptedException {
final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create( final SurfaceTextureHelper surfaceTextureHelper =
"SurfaceTextureHelper test" /* threadName */, null); SurfaceTextureHelper.create("SurfaceTextureHelper test" /* threadName */, null);
final MockTextureListener listener = new MockTextureListener(); final MockTextureListener listener = new MockTextureListener();
final CountDownLatch stopListeningBarrier = new CountDownLatch(1); final CountDownLatch stopListeningBarrier = new CountDownLatch(1);
@ -353,7 +351,8 @@ public final class SurfaceTextureHelperTest extends ActivityTestCase {
// Wait until handler thread is idle to try to catch late startListening() call. // Wait until handler thread is idle to try to catch late startListening() call.
final CountDownLatch barrier = new CountDownLatch(1); final CountDownLatch barrier = new CountDownLatch(1);
surfaceTextureHelper.getHandler().post(new Runnable() { surfaceTextureHelper.getHandler().post(new Runnable() {
@Override public void run() { @Override
public void run() {
barrier.countDown(); barrier.countDown();
} }
}); });
@ -371,8 +370,8 @@ public final class SurfaceTextureHelperTest extends ActivityTestCase {
@MediumTest @MediumTest
public static void testRestartListeningWithNewListener() throws InterruptedException { public static void testRestartListeningWithNewListener() throws InterruptedException {
// Create SurfaceTextureHelper and listener. // Create SurfaceTextureHelper and listener.
final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create( final SurfaceTextureHelper surfaceTextureHelper =
"SurfaceTextureHelper test" /* threadName */, null); SurfaceTextureHelper.create("SurfaceTextureHelper test" /* threadName */, null);
final MockTextureListener listener1 = new MockTextureListener(); final MockTextureListener listener1 = new MockTextureListener();
surfaceTextureHelper.startListening(listener1); surfaceTextureHelper.startListening(listener1);
// Create EglBase with the SurfaceTexture as target EGLSurface. // Create EglBase with the SurfaceTexture as target EGLSurface.
@ -467,8 +466,8 @@ public final class SurfaceTextureHelperTest extends ActivityTestCase {
// 368 UUUUUUUU VVVVVVVV // 368 UUUUUUUU VVVVVVVV
// 384 buffer end // 384 buffer end
ByteBuffer buffer = ByteBuffer.allocateDirect(width * height * 3 / 2); ByteBuffer buffer = ByteBuffer.allocateDirect(width * height * 3 / 2);
surfaceTextureHelper.textureToYUV(buffer, width, height, width, surfaceTextureHelper.textureToYUV(
listener.oesTextureId, listener.transformMatrix); buffer, width, height, width, listener.oesTextureId, listener.transformMatrix);
surfaceTextureHelper.returnTextureFrame(); surfaceTextureHelper.returnTextureFrame();

View File

@ -24,8 +24,7 @@ public final class SurfaceViewRendererOnMeasureTest extends ActivityTestCase {
* List with all possible scaling types. * List with all possible scaling types.
*/ */
private static final List<RendererCommon.ScalingType> scalingTypes = Arrays.asList( private static final List<RendererCommon.ScalingType> scalingTypes = Arrays.asList(
RendererCommon.ScalingType.SCALE_ASPECT_FIT, RendererCommon.ScalingType.SCALE_ASPECT_FIT, RendererCommon.ScalingType.SCALE_ASPECT_FILL,
RendererCommon.ScalingType.SCALE_ASPECT_FILL,
RendererCommon.ScalingType.SCALE_ASPECT_BALANCED); RendererCommon.ScalingType.SCALE_ASPECT_BALANCED);
/** /**
@ -50,22 +49,19 @@ public final class SurfaceViewRendererOnMeasureTest extends ActivityTestCase {
/** /**
* Assert onMeasure() with given parameters will result in expected measured size. * Assert onMeasure() with given parameters will result in expected measured size.
*/ */
private static void assertMeasuredSize( private static void assertMeasuredSize(SurfaceViewRenderer surfaceViewRenderer,
SurfaceViewRenderer surfaceViewRenderer, RendererCommon.ScalingType scalingType, RendererCommon.ScalingType scalingType, String frameDimensions, int expectedWidth,
String frameDimensions, int expectedHeight, int widthSpec, int heightSpec) {
int expectedWidth, int expectedHeight,
int widthSpec, int heightSpec) {
surfaceViewRenderer.setScalingType(scalingType); surfaceViewRenderer.setScalingType(scalingType);
surfaceViewRenderer.onMeasure(widthSpec, heightSpec); surfaceViewRenderer.onMeasure(widthSpec, heightSpec);
final int measuredWidth = surfaceViewRenderer.getMeasuredWidth(); final int measuredWidth = surfaceViewRenderer.getMeasuredWidth();
final int measuredHeight = surfaceViewRenderer.getMeasuredHeight(); final int measuredHeight = surfaceViewRenderer.getMeasuredHeight();
if (measuredWidth != expectedWidth || measuredHeight != expectedHeight) { if (measuredWidth != expectedWidth || measuredHeight != expectedHeight) {
fail("onMeasure(" fail("onMeasure(" + MeasureSpec.toString(widthSpec) + ", " + MeasureSpec.toString(heightSpec)
+ MeasureSpec.toString(widthSpec) + ", " + MeasureSpec.toString(heightSpec) + ")" + ")"
+ " with scaling type " + scalingType + " with scaling type " + scalingType + " and frame: " + frameDimensions
+ " and frame: " + frameDimensions + " expected measured size " + expectedWidth + "x" + expectedHeight + ", but was "
+ " expected measured size " + expectedWidth + "x" + expectedHeight + measuredWidth + "x" + measuredHeight);
+ ", but was " + measuredWidth + "x" + measuredHeight);
} }
} }
@ -82,10 +78,9 @@ public final class SurfaceViewRendererOnMeasureTest extends ActivityTestCase {
for (RendererCommon.ScalingType scalingType : scalingTypes) { for (RendererCommon.ScalingType scalingType : scalingTypes) {
for (int measureSpecMode : measureSpecModes) { for (int measureSpecMode : measureSpecModes) {
final int zeroMeasureSize = MeasureSpec.makeMeasureSpec(0, measureSpecMode); final int zeroMeasureSize = MeasureSpec.makeMeasureSpec(0, measureSpecMode);
assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, 0, 0, zeroMeasureSize,
0, 0, zeroMeasureSize, zeroMeasureSize); zeroMeasureSize);
assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, 1280, 720,
1280, 720,
MeasureSpec.makeMeasureSpec(1280, measureSpecMode), MeasureSpec.makeMeasureSpec(1280, measureSpecMode),
MeasureSpec.makeMeasureSpec(720, measureSpecMode)); MeasureSpec.makeMeasureSpec(720, measureSpecMode));
} }
@ -96,10 +91,9 @@ public final class SurfaceViewRendererOnMeasureTest extends ActivityTestCase {
for (RendererCommon.ScalingType scalingType : scalingTypes) { for (RendererCommon.ScalingType scalingType : scalingTypes) {
for (int measureSpecMode : measureSpecModes) { for (int measureSpecMode : measureSpecModes) {
final int zeroMeasureSize = MeasureSpec.makeMeasureSpec(0, measureSpecMode); final int zeroMeasureSize = MeasureSpec.makeMeasureSpec(0, measureSpecMode);
assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, 0, 0, zeroMeasureSize,
0, 0, zeroMeasureSize, zeroMeasureSize); zeroMeasureSize);
assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, 1280, 720,
1280, 720,
MeasureSpec.makeMeasureSpec(1280, measureSpecMode), MeasureSpec.makeMeasureSpec(1280, measureSpecMode),
MeasureSpec.makeMeasureSpec(720, measureSpecMode)); MeasureSpec.makeMeasureSpec(720, measureSpecMode));
} }
@ -163,25 +157,23 @@ public final class SurfaceViewRendererOnMeasureTest extends ActivityTestCase {
for (RendererCommon.ScalingType scalingType : scalingTypes) { for (RendererCommon.ScalingType scalingType : scalingTypes) {
for (int measureSpecMode : measureSpecModes) { for (int measureSpecMode : measureSpecModes) {
final int zeroMeasureSize = MeasureSpec.makeMeasureSpec(0, measureSpecMode); final int zeroMeasureSize = MeasureSpec.makeMeasureSpec(0, measureSpecMode);
assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, 0, 0,
0, 0, zeroMeasureSize, zeroMeasureSize); zeroMeasureSize, zeroMeasureSize);
} }
} }
// Test perfect fit. // Test perfect fit.
for (RendererCommon.ScalingType scalingType : scalingTypes) { for (RendererCommon.ScalingType scalingType : scalingTypes) {
for (int measureSpecMode : measureSpecModes) { for (int measureSpecMode : measureSpecModes) {
assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, rotatedWidth,
rotatedWidth, rotatedHeight, rotatedHeight, MeasureSpec.makeMeasureSpec(rotatedWidth, measureSpecMode),
MeasureSpec.makeMeasureSpec(rotatedWidth, measureSpecMode),
MeasureSpec.makeMeasureSpec(rotatedHeight, measureSpecMode)); MeasureSpec.makeMeasureSpec(rotatedHeight, measureSpecMode));
} }
} }
// Force spec size with different aspect ratio than frame aspect ratio. // Force spec size with different aspect ratio than frame aspect ratio.
for (RendererCommon.ScalingType scalingType : scalingTypes) { for (RendererCommon.ScalingType scalingType : scalingTypes) {
assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, 720, 1280,
720, 1280,
MeasureSpec.makeMeasureSpec(720, MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(720, MeasureSpec.EXACTLY),
MeasureSpec.makeMeasureSpec(1280, MeasureSpec.EXACTLY)); MeasureSpec.makeMeasureSpec(1280, MeasureSpec.EXACTLY));
} }
@ -194,8 +186,8 @@ public final class SurfaceViewRendererOnMeasureTest extends ActivityTestCase {
for (RendererCommon.ScalingType scalingType : scalingTypes) { for (RendererCommon.ScalingType scalingType : scalingTypes) {
final Point expectedSize = final Point expectedSize =
RendererCommon.getDisplaySize(scalingType, videoAspectRatio, 720, 1280); RendererCommon.getDisplaySize(scalingType, videoAspectRatio, 720, 1280);
assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, expectedSize.x,
expectedSize.x, expectedSize.y, widthSpec, heightSpec); expectedSize.y, widthSpec, heightSpec);
} }
} }
{ {
@ -206,8 +198,8 @@ public final class SurfaceViewRendererOnMeasureTest extends ActivityTestCase {
for (RendererCommon.ScalingType scalingType : scalingTypes) { for (RendererCommon.ScalingType scalingType : scalingTypes) {
final Point expectedSize = final Point expectedSize =
RendererCommon.getDisplaySize(scalingType, videoAspectRatio, 720, 1280); RendererCommon.getDisplaySize(scalingType, videoAspectRatio, 720, 1280);
assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, expectedSize.x,
expectedSize.x, expectedSize.y, widthSpec, heightSpec); expectedSize.y, widthSpec, heightSpec);
} }
} }
{ {
@ -215,8 +207,8 @@ public final class SurfaceViewRendererOnMeasureTest extends ActivityTestCase {
final int widthSpec = MeasureSpec.makeMeasureSpec(720, MeasureSpec.AT_MOST); final int widthSpec = MeasureSpec.makeMeasureSpec(720, MeasureSpec.AT_MOST);
final int heightSpec = MeasureSpec.makeMeasureSpec(1280, MeasureSpec.EXACTLY); final int heightSpec = MeasureSpec.makeMeasureSpec(1280, MeasureSpec.EXACTLY);
for (RendererCommon.ScalingType scalingType : scalingTypes) { for (RendererCommon.ScalingType scalingType : scalingTypes) {
assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, assertMeasuredSize(
720, 1280, widthSpec, heightSpec); surfaceViewRenderer, scalingType, frameDimensions, 720, 1280, widthSpec, heightSpec);
} }
} }
} }

View File

@ -57,12 +57,10 @@ public class Logging {
TraceLevel(int level) { TraceLevel(int level) {
this.level = level; this.level = level;
} }
}; }
// Keep in sync with webrtc/base/logging.h:LoggingSeverity. // Keep in sync with webrtc/base/logging.h:LoggingSeverity.
public enum Severity { public enum Severity { LS_SENSITIVE, LS_VERBOSE, LS_INFO, LS_WARNING, LS_ERROR, LS_NONE }
LS_SENSITIVE, LS_VERBOSE, LS_INFO, LS_WARNING, LS_ERROR, LS_NONE
};
public static void enableLogThreads() { public static void enableLogThreads() {
if (!nativeLibLoaded) { if (!nativeLibLoaded) {
@ -74,8 +72,8 @@ public class Logging {
public static void enableLogTimeStamps() { public static void enableLogTimeStamps() {
if (!nativeLibLoaded) { if (!nativeLibLoaded) {
fallbackLogger.log(Level.WARNING, fallbackLogger.log(
"Cannot enable log timestamps because native lib not loaded."); Level.WARNING, "Cannot enable log timestamps because native lib not loaded.");
return; return;
} }
nativeEnableLogTimeStamps(); nativeEnableLogTimeStamps();
@ -84,8 +82,7 @@ public class Logging {
// Enable tracing to |path| of messages of |levels|. // Enable tracing to |path| of messages of |levels|.
// On Android, use "logcat:" for |path| to send output there. // On Android, use "logcat:" for |path| to send output there.
// Note: this function controls the output of the WEBRTC_TRACE() macros. // Note: this function controls the output of the WEBRTC_TRACE() macros.
public static synchronized void enableTracing( public static synchronized void enableTracing(String path, EnumSet<TraceLevel> levels) {
String path, EnumSet<TraceLevel> levels) {
if (!nativeLibLoaded) { if (!nativeLibLoaded) {
fallbackLogger.log(Level.WARNING, "Cannot enable tracing because native lib not loaded."); fallbackLogger.log(Level.WARNING, "Cannot enable tracing because native lib not loaded.");
return; return;
@ -178,8 +175,7 @@ public class Logging {
return sw.toString(); return sw.toString();
} }
private static native void nativeEnableTracing( private static native void nativeEnableTracing(String path, int nativeLevels);
String path, int nativeLevels);
private static native void nativeEnableLogToDebugOutput(int nativeSeverity); private static native void nativeEnableLogToDebugOutput(int nativeSeverity);
private static native void nativeEnableLogThreads(); private static native void nativeEnableLogThreads();
private static native void nativeEnableLogTimeStamps(); private static native void nativeEnableLogTimeStamps();

View File

@ -42,9 +42,7 @@ public class ThreadUtils {
* Utility interface to be used with executeUninterruptibly() to wait for blocking operations * Utility interface to be used with executeUninterruptibly() to wait for blocking operations
* to complete without getting interrupted.. * to complete without getting interrupted..
*/ */
public interface BlockingOperation { public interface BlockingOperation { void run() throws InterruptedException; }
void run() throws InterruptedException;
}
/** /**
* Utility method to make sure a blocking operation is executed to completion without getting * Utility method to make sure a blocking operation is executed to completion without getting
@ -168,7 +166,8 @@ public class ThreadUtils {
final CaughtException caughtException = new CaughtException(); final CaughtException caughtException = new CaughtException();
final CountDownLatch barrier = new CountDownLatch(1); final CountDownLatch barrier = new CountDownLatch(1);
handler.post(new Runnable() { handler.post(new Runnable() {
@Override public void run() { @Override
public void run() {
try { try {
result.value = callable.call(); result.value = callable.call();
} catch (Exception e) { } catch (Exception e) {
@ -182,9 +181,8 @@ public class ThreadUtils {
// stack trace for the waiting thread as well. // stack trace for the waiting thread as well.
if (caughtException.e != null) { if (caughtException.e != null) {
final RuntimeException runtimeException = new RuntimeException(caughtException.e); final RuntimeException runtimeException = new RuntimeException(caughtException.e);
runtimeException.setStackTrace(concatStackTraces( runtimeException.setStackTrace(
caughtException.e.getStackTrace(), concatStackTraces(caughtException.e.getStackTrace(), runtimeException.getStackTrace()));
runtimeException.getStackTrace()));
throw runtimeException; throw runtimeException;
} }
return result.value; return result.value;

View File

@ -84,10 +84,8 @@ public class AppRTCAudioManager {
// The proximity sensor should only be activated when there are exactly two // The proximity sensor should only be activated when there are exactly two
// available audio devices. // available audio devices.
if (audioDevices.size() == 2 if (audioDevices.size() == 2 && audioDevices.contains(AppRTCAudioManager.AudioDevice.EARPIECE)
&& audioDevices.contains(AppRTCAudioManager.AudioDevice.EARPIECE) && audioDevices.contains(AppRTCAudioManager.AudioDevice.SPEAKER_PHONE)) {
&& audioDevices.contains(
AppRTCAudioManager.AudioDevice.SPEAKER_PHONE)) {
if (proximitySensor.sensorReportsNearState()) { if (proximitySensor.sensorReportsNearState()) {
// Sensor reports that a "handset is being held up to a person's ear", // Sensor reports that a "handset is being held up to a person's ear",
// or "something is covering the light sensor". // or "something is covering the light sensor".
@ -101,17 +99,14 @@ public class AppRTCAudioManager {
} }
/** Construction */ /** Construction */
static AppRTCAudioManager create(Context context, static AppRTCAudioManager create(Context context, Runnable deviceStateChangeListener) {
Runnable deviceStateChangeListener) {
return new AppRTCAudioManager(context, deviceStateChangeListener); return new AppRTCAudioManager(context, deviceStateChangeListener);
} }
private AppRTCAudioManager(Context context, private AppRTCAudioManager(Context context, Runnable deviceStateChangeListener) {
Runnable deviceStateChangeListener) {
apprtcContext = context; apprtcContext = context;
onStateChangeListener = deviceStateChangeListener; onStateChangeListener = deviceStateChangeListener;
audioManager = ((AudioManager) context.getSystemService( audioManager = ((AudioManager) context.getSystemService(Context.AUDIO_SERVICE));
Context.AUDIO_SERVICE));
SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(context); SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(context);
useSpeakerphone = sharedPreferences.getString(context.getString(R.string.pref_speakerphone_key), useSpeakerphone = sharedPreferences.getString(context.getString(R.string.pref_speakerphone_key),
@ -149,8 +144,8 @@ public class AppRTCAudioManager {
savedIsMicrophoneMute = audioManager.isMicrophoneMute(); savedIsMicrophoneMute = audioManager.isMicrophoneMute();
// Request audio focus before making any device switch. // Request audio focus before making any device switch.
audioManager.requestAudioFocus(null, AudioManager.STREAM_VOICE_CALL, audioManager.requestAudioFocus(
AudioManager.AUDIOFOCUS_GAIN_TRANSIENT); null, AudioManager.STREAM_VOICE_CALL, AudioManager.AUDIOFOCUS_GAIN_TRANSIENT);
// Start by setting MODE_IN_COMMUNICATION as default audio mode. It is // Start by setting MODE_IN_COMMUNICATION as default audio mode. It is
// required to be in this mode when playout and/or recording starts for // required to be in this mode when playout and/or recording starts for
@ -250,13 +245,11 @@ public class AppRTCAudioManager {
int state = intent.getIntExtra("state", STATE_UNPLUGGED); int state = intent.getIntExtra("state", STATE_UNPLUGGED);
int microphone = intent.getIntExtra("microphone", HAS_NO_MIC); int microphone = intent.getIntExtra("microphone", HAS_NO_MIC);
String name = intent.getStringExtra("name"); String name = intent.getStringExtra("name");
Log.d(TAG, "BroadcastReceiver.onReceive" + AppRTCUtils.getThreadInfo() Log.d(TAG, "BroadcastReceiver.onReceive" + AppRTCUtils.getThreadInfo() + ": "
+ ": " + "a=" + intent.getAction() + ", s="
+ "a=" + intent.getAction() + (state == STATE_UNPLUGGED ? "unplugged" : "plugged") + ", m="
+ ", s=" + (state == STATE_UNPLUGGED ? "unplugged" : "plugged") + (microphone == HAS_MIC ? "mic" : "no mic") + ", n=" + name + ", sb="
+ ", m=" + (microphone == HAS_MIC ? "mic" : "no mic") + isInitialStickyBroadcast());
+ ", n=" + name
+ ", sb=" + isInitialStickyBroadcast());
boolean hasWiredHeadset = (state == STATE_PLUGGED); boolean hasWiredHeadset = (state == STATE_PLUGGED);
switch (state) { switch (state) {
@ -304,8 +297,7 @@ public class AppRTCAudioManager {
/** Gets the current earpiece state. */ /** Gets the current earpiece state. */
private boolean hasEarpiece() { private boolean hasEarpiece() {
return apprtcContext.getPackageManager().hasSystemFeature( return apprtcContext.getPackageManager().hasSystemFeature(PackageManager.FEATURE_TELEPHONY);
PackageManager.FEATURE_TELEPHONY);
} }
/** /**
@ -347,8 +339,8 @@ public class AppRTCAudioManager {
/** Called each time a new audio device has been added or removed. */ /** Called each time a new audio device has been added or removed. */
private void onAudioManagerChangedState() { private void onAudioManagerChangedState() {
Log.d(TAG, "onAudioManagerChangedState: devices=" + audioDevices Log.d(TAG, "onAudioManagerChangedState: devices=" + audioDevices + ", selected="
+ ", selected=" + selectedAudioDevice); + selectedAudioDevice);
// Enable the proximity sensor if there are two available audio devices // Enable the proximity sensor if there are two available audio devices
// in the list. Given the current implementation, we know that the choice // in the list. Given the current implementation, we know that the choice

View File

@ -20,7 +20,6 @@ import java.util.List;
* AppRTCClient is the interface representing an AppRTC client. * AppRTCClient is the interface representing an AppRTC client.
*/ */
public interface AppRTCClient { public interface AppRTCClient {
/** /**
* Struct holding the connection parameters of an AppRTC room. * Struct holding the connection parameters of an AppRTC room.
*/ */
@ -28,8 +27,7 @@ public interface AppRTCClient {
public final String roomUrl; public final String roomUrl;
public final String roomId; public final String roomId;
public final boolean loopback; public final boolean loopback;
public RoomConnectionParameters( public RoomConnectionParameters(String roomUrl, String roomId, boolean loopback) {
String roomUrl, String roomId, boolean loopback) {
this.roomUrl = roomUrl; this.roomUrl = roomUrl;
this.roomId = roomId; this.roomId = roomId;
this.loopback = loopback; this.loopback = loopback;
@ -80,11 +78,9 @@ public interface AppRTCClient {
public final SessionDescription offerSdp; public final SessionDescription offerSdp;
public final List<IceCandidate> iceCandidates; public final List<IceCandidate> iceCandidates;
public SignalingParameters( public SignalingParameters(List<PeerConnection.IceServer> iceServers, boolean initiator,
List<PeerConnection.IceServer> iceServers, String clientId, String wssUrl, String wssPostUrl, SessionDescription offerSdp,
boolean initiator, String clientId, List<IceCandidate> iceCandidates) {
String wssUrl, String wssPostUrl,
SessionDescription offerSdp, List<IceCandidate> iceCandidates) {
this.iceServers = iceServers; this.iceServers = iceServers;
this.initiator = initiator; this.initiator = initiator;
this.clientId = clientId; this.clientId = clientId;

View File

@ -45,16 +45,14 @@ public class AppRTCProximitySensor implements SensorEventListener {
private boolean lastStateReportIsNear = false; private boolean lastStateReportIsNear = false;
/** Construction */ /** Construction */
static AppRTCProximitySensor create(Context context, static AppRTCProximitySensor create(Context context, Runnable sensorStateListener) {
Runnable sensorStateListener) {
return new AppRTCProximitySensor(context, sensorStateListener); return new AppRTCProximitySensor(context, sensorStateListener);
} }
private AppRTCProximitySensor(Context context, Runnable sensorStateListener) { private AppRTCProximitySensor(Context context, Runnable sensorStateListener) {
Log.d(TAG, "AppRTCProximitySensor" + AppRTCUtils.getThreadInfo()); Log.d(TAG, "AppRTCProximitySensor" + AppRTCUtils.getThreadInfo());
onSensorStateListener = sensorStateListener; onSensorStateListener = sensorStateListener;
sensorManager = ((SensorManager) context.getSystemService( sensorManager = ((SensorManager) context.getSystemService(Context.SENSOR_SERVICE));
Context.SENSOR_SERVICE));
} }
/** /**
@ -68,8 +66,7 @@ public class AppRTCProximitySensor implements SensorEventListener {
// Proximity sensor is not supported on this device. // Proximity sensor is not supported on this device.
return false; return false;
} }
sensorManager.registerListener( sensorManager.registerListener(this, proximitySensor, SensorManager.SENSOR_DELAY_NORMAL);
this, proximitySensor, SensorManager.SENSOR_DELAY_NORMAL);
return true; return true;
} }
@ -120,8 +117,8 @@ public class AppRTCProximitySensor implements SensorEventListener {
} }
Log.d(TAG, "onSensorChanged" + AppRTCUtils.getThreadInfo() + ": " Log.d(TAG, "onSensorChanged" + AppRTCUtils.getThreadInfo() + ": "
+ "accuracy=" + event.accuracy + "accuracy=" + event.accuracy + ", timestamp=" + event.timestamp + ", distance="
+ ", timestamp=" + event.timestamp + ", distance=" + event.values[0]); + event.values[0]);
} }
/** /**
@ -168,5 +165,4 @@ public class AppRTCProximitySensor implements SensorEventListener {
} }
Log.d(TAG, info.toString()); Log.d(TAG, info.toString());
} }
} }

View File

@ -42,68 +42,41 @@ import org.webrtc.SurfaceViewRenderer;
* Activity for peer connection call setup, call waiting * Activity for peer connection call setup, call waiting
* and call view. * and call view.
*/ */
public class CallActivity extends Activity public class CallActivity extends Activity implements AppRTCClient.SignalingEvents,
implements AppRTCClient.SignalingEvents,
PeerConnectionClient.PeerConnectionEvents, PeerConnectionClient.PeerConnectionEvents,
CallFragment.OnCallEvents { CallFragment.OnCallEvents {
public static final String EXTRA_ROOMID = "org.appspot.apprtc.ROOMID";
public static final String EXTRA_ROOMID = public static final String EXTRA_LOOPBACK = "org.appspot.apprtc.LOOPBACK";
"org.appspot.apprtc.ROOMID"; public static final String EXTRA_VIDEO_CALL = "org.appspot.apprtc.VIDEO_CALL";
public static final String EXTRA_LOOPBACK = public static final String EXTRA_CAMERA2 = "org.appspot.apprtc.CAMERA2";
"org.appspot.apprtc.LOOPBACK"; public static final String EXTRA_VIDEO_WIDTH = "org.appspot.apprtc.VIDEO_WIDTH";
public static final String EXTRA_VIDEO_CALL = public static final String EXTRA_VIDEO_HEIGHT = "org.appspot.apprtc.VIDEO_HEIGHT";
"org.appspot.apprtc.VIDEO_CALL"; public static final String EXTRA_VIDEO_FPS = "org.appspot.apprtc.VIDEO_FPS";
public static final String EXTRA_CAMERA2 =
"org.appspot.apprtc.CAMERA2";
public static final String EXTRA_VIDEO_WIDTH =
"org.appspot.apprtc.VIDEO_WIDTH";
public static final String EXTRA_VIDEO_HEIGHT =
"org.appspot.apprtc.VIDEO_HEIGHT";
public static final String EXTRA_VIDEO_FPS =
"org.appspot.apprtc.VIDEO_FPS";
public static final String EXTRA_VIDEO_CAPTUREQUALITYSLIDER_ENABLED = public static final String EXTRA_VIDEO_CAPTUREQUALITYSLIDER_ENABLED =
"org.appsopt.apprtc.VIDEO_CAPTUREQUALITYSLIDER"; "org.appsopt.apprtc.VIDEO_CAPTUREQUALITYSLIDER";
public static final String EXTRA_VIDEO_BITRATE = public static final String EXTRA_VIDEO_BITRATE = "org.appspot.apprtc.VIDEO_BITRATE";
"org.appspot.apprtc.VIDEO_BITRATE"; public static final String EXTRA_VIDEOCODEC = "org.appspot.apprtc.VIDEOCODEC";
public static final String EXTRA_VIDEOCODEC = public static final String EXTRA_HWCODEC_ENABLED = "org.appspot.apprtc.HWCODEC";
"org.appspot.apprtc.VIDEOCODEC"; public static final String EXTRA_CAPTURETOTEXTURE_ENABLED = "org.appspot.apprtc.CAPTURETOTEXTURE";
public static final String EXTRA_HWCODEC_ENABLED = public static final String EXTRA_AUDIO_BITRATE = "org.appspot.apprtc.AUDIO_BITRATE";
"org.appspot.apprtc.HWCODEC"; public static final String EXTRA_AUDIOCODEC = "org.appspot.apprtc.AUDIOCODEC";
public static final String EXTRA_CAPTURETOTEXTURE_ENABLED =
"org.appspot.apprtc.CAPTURETOTEXTURE";
public static final String EXTRA_AUDIO_BITRATE =
"org.appspot.apprtc.AUDIO_BITRATE";
public static final String EXTRA_AUDIOCODEC =
"org.appspot.apprtc.AUDIOCODEC";
public static final String EXTRA_NOAUDIOPROCESSING_ENABLED = public static final String EXTRA_NOAUDIOPROCESSING_ENABLED =
"org.appspot.apprtc.NOAUDIOPROCESSING"; "org.appspot.apprtc.NOAUDIOPROCESSING";
public static final String EXTRA_AECDUMP_ENABLED = public static final String EXTRA_AECDUMP_ENABLED = "org.appspot.apprtc.AECDUMP";
"org.appspot.apprtc.AECDUMP"; public static final String EXTRA_OPENSLES_ENABLED = "org.appspot.apprtc.OPENSLES";
public static final String EXTRA_OPENSLES_ENABLED = public static final String EXTRA_DISABLE_BUILT_IN_AEC = "org.appspot.apprtc.DISABLE_BUILT_IN_AEC";
"org.appspot.apprtc.OPENSLES"; public static final String EXTRA_DISABLE_BUILT_IN_AGC = "org.appspot.apprtc.DISABLE_BUILT_IN_AGC";
public static final String EXTRA_DISABLE_BUILT_IN_AEC = public static final String EXTRA_DISABLE_BUILT_IN_NS = "org.appspot.apprtc.DISABLE_BUILT_IN_NS";
"org.appspot.apprtc.DISABLE_BUILT_IN_AEC"; public static final String EXTRA_ENABLE_LEVEL_CONTROL = "org.appspot.apprtc.ENABLE_LEVEL_CONTROL";
public static final String EXTRA_DISABLE_BUILT_IN_AGC = public static final String EXTRA_DISPLAY_HUD = "org.appspot.apprtc.DISPLAY_HUD";
"org.appspot.apprtc.DISABLE_BUILT_IN_AGC";
public static final String EXTRA_DISABLE_BUILT_IN_NS =
"org.appspot.apprtc.DISABLE_BUILT_IN_NS";
public static final String EXTRA_ENABLE_LEVEL_CONTROL =
"org.appspot.apprtc.ENABLE_LEVEL_CONTROL";
public static final String EXTRA_DISPLAY_HUD =
"org.appspot.apprtc.DISPLAY_HUD";
public static final String EXTRA_TRACING = "org.appspot.apprtc.TRACING"; public static final String EXTRA_TRACING = "org.appspot.apprtc.TRACING";
public static final String EXTRA_CMDLINE = public static final String EXTRA_CMDLINE = "org.appspot.apprtc.CMDLINE";
"org.appspot.apprtc.CMDLINE"; public static final String EXTRA_RUNTIME = "org.appspot.apprtc.RUNTIME";
public static final String EXTRA_RUNTIME =
"org.appspot.apprtc.RUNTIME";
private static final String TAG = "CallRTCClient"; private static final String TAG = "CallRTCClient";
// List of mandatory application permissions. // List of mandatory application permissions.
private static final String[] MANDATORY_PERMISSIONS = { private static final String[] MANDATORY_PERMISSIONS = {"android.permission.MODIFY_AUDIO_SETTINGS",
"android.permission.MODIFY_AUDIO_SETTINGS", "android.permission.RECORD_AUDIO", "android.permission.INTERNET"};
"android.permission.RECORD_AUDIO",
"android.permission.INTERNET"
};
// Peer connection statistics callback period in ms. // Peer connection statistics callback period in ms.
private static final int STAT_CALLBACK_PERIOD = 1000; private static final int STAT_CALLBACK_PERIOD = 1000;
@ -152,22 +125,16 @@ public class CallActivity extends Activity
@Override @Override
public void onCreate(Bundle savedInstanceState) { public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState); super.onCreate(savedInstanceState);
Thread.setDefaultUncaughtExceptionHandler( Thread.setDefaultUncaughtExceptionHandler(new UnhandledExceptionHandler(this));
new UnhandledExceptionHandler(this));
// Set window styles for fullscreen-window size. Needs to be done before // Set window styles for fullscreen-window size. Needs to be done before
// adding content. // adding content.
requestWindowFeature(Window.FEATURE_NO_TITLE); requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().addFlags( getWindow().addFlags(LayoutParams.FLAG_FULLSCREEN | LayoutParams.FLAG_KEEP_SCREEN_ON
LayoutParams.FLAG_FULLSCREEN | LayoutParams.FLAG_DISMISS_KEYGUARD | LayoutParams.FLAG_SHOW_WHEN_LOCKED
| LayoutParams.FLAG_KEEP_SCREEN_ON
| LayoutParams.FLAG_DISMISS_KEYGUARD
| LayoutParams.FLAG_SHOW_WHEN_LOCKED
| LayoutParams.FLAG_TURN_SCREEN_ON); | LayoutParams.FLAG_TURN_SCREEN_ON);
getWindow().getDecorView().setSystemUiVisibility( getWindow().getDecorView().setSystemUiVisibility(View.SYSTEM_UI_FLAG_HIDE_NAVIGATION
View.SYSTEM_UI_FLAG_HIDE_NAVIGATION | View.SYSTEM_UI_FLAG_FULLSCREEN | View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY);
| View.SYSTEM_UI_FLAG_FULLSCREEN
| View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY);
setContentView(R.layout.activity_call); setContentView(R.layout.activity_call);
iceConnected = false; iceConnected = false;
@ -232,23 +199,17 @@ public class CallActivity extends Activity
boolean loopback = intent.getBooleanExtra(EXTRA_LOOPBACK, false); boolean loopback = intent.getBooleanExtra(EXTRA_LOOPBACK, false);
boolean tracing = intent.getBooleanExtra(EXTRA_TRACING, false); boolean tracing = intent.getBooleanExtra(EXTRA_TRACING, false);
boolean useCamera2 = Camera2Enumerator.isSupported(this) boolean useCamera2 =
&& intent.getBooleanExtra(EXTRA_CAMERA2, true); Camera2Enumerator.isSupported(this) && intent.getBooleanExtra(EXTRA_CAMERA2, true);
peerConnectionParameters = new PeerConnectionParameters( peerConnectionParameters =
intent.getBooleanExtra(EXTRA_VIDEO_CALL, true), new PeerConnectionParameters(intent.getBooleanExtra(EXTRA_VIDEO_CALL, true), loopback,
loopback, tracing, useCamera2, intent.getIntExtra(EXTRA_VIDEO_WIDTH, 0),
tracing, intent.getIntExtra(EXTRA_VIDEO_HEIGHT, 0), intent.getIntExtra(EXTRA_VIDEO_FPS, 0),
useCamera2, intent.getIntExtra(EXTRA_VIDEO_BITRATE, 0), intent.getStringExtra(EXTRA_VIDEOCODEC),
intent.getIntExtra(EXTRA_VIDEO_WIDTH, 0),
intent.getIntExtra(EXTRA_VIDEO_HEIGHT, 0),
intent.getIntExtra(EXTRA_VIDEO_FPS, 0),
intent.getIntExtra(EXTRA_VIDEO_BITRATE, 0),
intent.getStringExtra(EXTRA_VIDEOCODEC),
intent.getBooleanExtra(EXTRA_HWCODEC_ENABLED, true), intent.getBooleanExtra(EXTRA_HWCODEC_ENABLED, true),
intent.getBooleanExtra(EXTRA_CAPTURETOTEXTURE_ENABLED, false), intent.getBooleanExtra(EXTRA_CAPTURETOTEXTURE_ENABLED, false),
intent.getIntExtra(EXTRA_AUDIO_BITRATE, 0), intent.getIntExtra(EXTRA_AUDIO_BITRATE, 0), intent.getStringExtra(EXTRA_AUDIOCODEC),
intent.getStringExtra(EXTRA_AUDIOCODEC),
intent.getBooleanExtra(EXTRA_NOAUDIOPROCESSING_ENABLED, false), intent.getBooleanExtra(EXTRA_NOAUDIOPROCESSING_ENABLED, false),
intent.getBooleanExtra(EXTRA_AECDUMP_ENABLED, false), intent.getBooleanExtra(EXTRA_AECDUMP_ENABLED, false),
intent.getBooleanExtra(EXTRA_OPENSLES_ENABLED, false), intent.getBooleanExtra(EXTRA_OPENSLES_ENABLED, false),
@ -268,8 +229,7 @@ public class CallActivity extends Activity
appRtcClient = new DirectRTCClient(this); appRtcClient = new DirectRTCClient(this);
} }
// Create connection parameters. // Create connection parameters.
roomConnectionParameters = new RoomConnectionParameters( roomConnectionParameters = new RoomConnectionParameters(roomUri.toString(), roomId, loopback);
roomUri.toString(), roomId, loopback);
// Create CPU monitor // Create CPU monitor
cpuMonitor = new CpuMonitor(this); cpuMonitor = new CpuMonitor(this);
@ -419,8 +379,7 @@ public class CallActivity extends Activity
callStartedTimeMs = System.currentTimeMillis(); callStartedTimeMs = System.currentTimeMillis();
// Start room connection. // Start room connection.
logAndToast(getString(R.string.connecting_to, logAndToast(getString(R.string.connecting_to, roomConnectionParameters.roomUrl));
roomConnectionParameters.roomUrl));
appRtcClient.connectToRoom(roomConnectionParameters); appRtcClient.connectToRoom(roomConnectionParameters);
// Create and audio manager that will take care of audio routing, // Create and audio manager that will take care of audio routing,
@ -432,8 +391,7 @@ public class CallActivity extends Activity
public void run() { public void run() {
onAudioManagerChangedState(); onAudioManagerChangedState();
} }
} });
);
// Store existing audio settings and change audio mode to // Store existing audio settings and change audio mode to
// MODE_IN_COMMUNICATION for best possible VoIP performance. // MODE_IN_COMMUNICATION for best possible VoIP performance.
Log.d(TAG, "Initializing the audio manager..."); Log.d(TAG, "Initializing the audio manager...");
@ -499,13 +457,16 @@ public class CallActivity extends Activity
.setTitle(getText(R.string.channel_error_title)) .setTitle(getText(R.string.channel_error_title))
.setMessage(errorMessage) .setMessage(errorMessage)
.setCancelable(false) .setCancelable(false)
.setNeutralButton(R.string.ok, new DialogInterface.OnClickListener() { .setNeutralButton(R.string.ok,
new DialogInterface.OnClickListener() {
@Override @Override
public void onClick(DialogInterface dialog, int id) { public void onClick(DialogInterface dialog, int id) {
dialog.cancel(); dialog.cancel();
disconnect(); disconnect();
} }
}).create().show(); })
.create()
.show();
} }
} }
@ -539,8 +500,8 @@ public class CallActivity extends Activity
signalingParameters = params; signalingParameters = params;
logAndToast("Creating peer connection, delay=" + delta + "ms"); logAndToast("Creating peer connection, delay=" + delta + "ms");
peerConnectionClient.createPeerConnection(rootEglBase.getEglBaseContext(), peerConnectionClient.createPeerConnection(
localRender, remoteRender, signalingParameters); rootEglBase.getEglBaseContext(), localRender, remoteRender, signalingParameters);
if (signalingParameters.initiator) { if (signalingParameters.initiator) {
logAndToast("Creating OFFER..."); logAndToast("Creating OFFER...");
@ -716,8 +677,7 @@ public class CallActivity extends Activity
} }
@Override @Override
public void onPeerConnectionClosed() { public void onPeerConnectionClosed() {}
}
@Override @Override
public void onPeerConnectionStatsReady(final StatsReport[] reports) { public void onPeerConnectionStatsReady(final StatsReport[] reports) {

View File

@ -50,26 +50,18 @@ public class CallFragment extends Fragment {
} }
@Override @Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, public View onCreateView(
Bundle savedInstanceState) { LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
controlView = controlView = inflater.inflate(R.layout.fragment_call, container, false);
inflater.inflate(R.layout.fragment_call, container, false);
// Create UI controls. // Create UI controls.
contactView = contactView = (TextView) controlView.findViewById(R.id.contact_name_call);
(TextView) controlView.findViewById(R.id.contact_name_call); disconnectButton = (ImageButton) controlView.findViewById(R.id.button_call_disconnect);
disconnectButton = cameraSwitchButton = (ImageButton) controlView.findViewById(R.id.button_call_switch_camera);
(ImageButton) controlView.findViewById(R.id.button_call_disconnect); videoScalingButton = (ImageButton) controlView.findViewById(R.id.button_call_scaling_mode);
cameraSwitchButton = toggleMuteButton = (ImageButton) controlView.findViewById(R.id.button_call_toggle_mic);
(ImageButton) controlView.findViewById(R.id.button_call_switch_camera); captureFormatText = (TextView) controlView.findViewById(R.id.capture_format_text_call);
videoScalingButton = captureFormatSlider = (SeekBar) controlView.findViewById(R.id.capture_format_slider_call);
(ImageButton) controlView.findViewById(R.id.button_call_scaling_mode);
toggleMuteButton =
(ImageButton) controlView.findViewById(R.id.button_call_toggle_mic);
captureFormatText =
(TextView) controlView.findViewById(R.id.capture_format_text_call);
captureFormatSlider =
(SeekBar) controlView.findViewById(R.id.capture_format_slider_call);
// Add buttons click events. // Add buttons click events.
disconnectButton.setOnClickListener(new View.OnClickListener() { disconnectButton.setOnClickListener(new View.OnClickListener() {
@ -90,12 +82,10 @@ public class CallFragment extends Fragment {
@Override @Override
public void onClick(View view) { public void onClick(View view) {
if (scalingType == ScalingType.SCALE_ASPECT_FILL) { if (scalingType == ScalingType.SCALE_ASPECT_FILL) {
videoScalingButton.setBackgroundResource( videoScalingButton.setBackgroundResource(R.drawable.ic_action_full_screen);
R.drawable.ic_action_full_screen);
scalingType = ScalingType.SCALE_ASPECT_FIT; scalingType = ScalingType.SCALE_ASPECT_FIT;
} else { } else {
videoScalingButton.setBackgroundResource( videoScalingButton.setBackgroundResource(R.drawable.ic_action_return_from_full_screen);
R.drawable.ic_action_return_from_full_screen);
scalingType = ScalingType.SCALE_ASPECT_FILL; scalingType = ScalingType.SCALE_ASPECT_FILL;
} }
callEvents.onVideoScalingSwitch(scalingType); callEvents.onVideoScalingSwitch(scalingType);
@ -144,5 +134,4 @@ public class CallFragment extends Fragment {
super.onAttach(activity); super.onAttach(activity);
callEvents = (OnCallEvents) activity; callEvents = (OnCallEvents) activity;
} }
} }

View File

@ -24,13 +24,10 @@ import java.util.List;
* Control capture format based on a seekbar listener. * Control capture format based on a seekbar listener.
*/ */
public class CaptureQualityController implements SeekBar.OnSeekBarChangeListener { public class CaptureQualityController implements SeekBar.OnSeekBarChangeListener {
private final List<CaptureFormat> formats = Arrays.asList( private final List<CaptureFormat> formats =
new CaptureFormat(1280, 720, 0, 30000), Arrays.asList(new CaptureFormat(1280, 720, 0, 30000), new CaptureFormat(960, 540, 0, 30000),
new CaptureFormat(960, 540, 0, 30000), new CaptureFormat(640, 480, 0, 30000), new CaptureFormat(480, 360, 0, 30000),
new CaptureFormat(640, 480, 0, 30000), new CaptureFormat(320, 240, 0, 30000), new CaptureFormat(256, 144, 0, 30000));
new CaptureFormat(480, 360, 0, 30000),
new CaptureFormat(320, 240, 0, 30000),
new CaptureFormat(256, 144, 0, 30000));
// Prioritize framerate below this threshold and resolution above the threshold. // Prioritize framerate below this threshold and resolution above the threshold.
private static final int FRAMERATE_THRESHOLD = 15; private static final int FRAMERATE_THRESHOLD = 15;
private TextView captureFormatText; private TextView captureFormatText;
@ -76,8 +73,8 @@ public class CaptureQualityController implements SeekBar.OnSeekBarChangeListener
// Extract max bandwidth (in millipixels / second). // Extract max bandwidth (in millipixels / second).
long maxCaptureBandwidth = java.lang.Long.MIN_VALUE; long maxCaptureBandwidth = java.lang.Long.MIN_VALUE;
for (CaptureFormat format : formats) { for (CaptureFormat format : formats) {
maxCaptureBandwidth = Math.max(maxCaptureBandwidth, maxCaptureBandwidth =
(long) format.width * format.height * format.framerate.max); Math.max(maxCaptureBandwidth, (long) format.width * format.height * format.framerate.max);
} }
// Fraction between 0 and 1. // Fraction between 0 and 1.
@ -97,8 +94,7 @@ public class CaptureQualityController implements SeekBar.OnSeekBarChangeListener
} }
@Override @Override
public void onStartTrackingTouch(SeekBar seekBar) { public void onStartTrackingTouch(SeekBar seekBar) {}
}
@Override @Override
public void onStopTrackingTouch(SeekBar seekBar) { public void onStopTrackingTouch(SeekBar seekBar) {
@ -107,8 +103,8 @@ public class CaptureQualityController implements SeekBar.OnSeekBarChangeListener
// Return the highest frame rate possible based on bandwidth and format. // Return the highest frame rate possible based on bandwidth and format.
private int calculateFramerate(double bandwidth, CaptureFormat format) { private int calculateFramerate(double bandwidth, CaptureFormat format) {
return (int) Math.round(Math.min(format.framerate.max, return (int) Math.round(
(int) Math.round(bandwidth / (format.width * format.height))) / 1000.0); Math.min(format.framerate.max, (int) Math.round(bandwidth / (format.width * format.height)))
/ 1000.0);
} }
} }

View File

@ -118,11 +118,9 @@ public class ConnectActivity extends Activity {
setContentView(R.layout.activity_connect); setContentView(R.layout.activity_connect);
roomEditText = (EditText) findViewById(R.id.room_edittext); roomEditText = (EditText) findViewById(R.id.room_edittext);
roomEditText.setOnEditorActionListener( roomEditText.setOnEditorActionListener(new TextView.OnEditorActionListener() {
new TextView.OnEditorActionListener() {
@Override @Override
public boolean onEditorAction( public boolean onEditorAction(TextView textView, int i, KeyEvent keyEvent) {
TextView textView, int i, KeyEvent keyEvent) {
if (i == EditorInfo.IME_ACTION_DONE) { if (i == EditorInfo.IME_ACTION_DONE) {
addFavoriteButton.performClick(); addFavoriteButton.performClick();
return true; return true;
@ -143,12 +141,9 @@ public class ConnectActivity extends Activity {
// If an implicit VIEW intent is launching the app, go directly to that URL. // If an implicit VIEW intent is launching the app, go directly to that URL.
final Intent intent = getIntent(); final Intent intent = getIntent();
if ("android.intent.action.VIEW".equals(intent.getAction()) if ("android.intent.action.VIEW".equals(intent.getAction()) && !commandLineRun) {
&& !commandLineRun) { boolean loopback = intent.getBooleanExtra(CallActivity.EXTRA_LOOPBACK, false);
boolean loopback = intent.getBooleanExtra( int runTimeMs = intent.getIntExtra(CallActivity.EXTRA_RUNTIME, 0);
CallActivity.EXTRA_LOOPBACK, false);
int runTimeMs = intent.getIntExtra(
CallActivity.EXTRA_RUNTIME, 0);
String room = sharedPref.getString(keyprefRoom, ""); String room = sharedPref.getString(keyprefRoom, "");
connectToRoom(room, true, loopback, runTimeMs); connectToRoom(room, true, loopback, runTimeMs);
} }
@ -230,8 +225,7 @@ public class ConnectActivity extends Activity {
Log.e(TAG, "Failed to load room list: " + e.toString()); Log.e(TAG, "Failed to load room list: " + e.toString());
} }
} }
adapter = new ArrayAdapter<String>( adapter = new ArrayAdapter<String>(this, android.R.layout.simple_list_item_1, roomList);
this, android.R.layout.simple_list_item_1, roomList);
roomListView.setAdapter(adapter); roomListView.setAdapter(adapter);
if (adapter.getCount() > 0) { if (adapter.getCount() > 0) {
roomListView.requestFocus(); roomListView.requestFocus();
@ -240,8 +234,7 @@ public class ConnectActivity extends Activity {
} }
@Override @Override
protected void onActivityResult( protected void onActivityResult(int requestCode, int resultCode, Intent data) {
int requestCode, int resultCode, Intent data) {
if (requestCode == CONNECTION_REQUEST && commandLineRun) { if (requestCode == CONNECTION_REQUEST && commandLineRun) {
Log.d(TAG, "Return: " + resultCode); Log.d(TAG, "Return: " + resultCode);
setResult(resultCode); setResult(resultCode);
@ -260,71 +253,63 @@ public class ConnectActivity extends Activity {
} }
String roomUrl = sharedPref.getString( String roomUrl = sharedPref.getString(
keyprefRoomServerUrl, keyprefRoomServerUrl, getString(R.string.pref_room_server_url_default));
getString(R.string.pref_room_server_url_default));
// Video call enabled flag. // Video call enabled flag.
boolean videoCallEnabled = sharedPref.getBoolean(keyprefVideoCallEnabled, boolean videoCallEnabled = sharedPref.getBoolean(
Boolean.valueOf(getString(R.string.pref_videocall_default))); keyprefVideoCallEnabled, Boolean.valueOf(getString(R.string.pref_videocall_default)));
// Use Camera2 option. // Use Camera2 option.
boolean useCamera2 = sharedPref.getBoolean(keyprefCamera2, boolean useCamera2 = sharedPref.getBoolean(
Boolean.valueOf(getString(R.string.pref_camera2_default))); keyprefCamera2, Boolean.valueOf(getString(R.string.pref_camera2_default)));
// Get default codecs. // Get default codecs.
String videoCodec = sharedPref.getString(keyprefVideoCodec, String videoCodec =
getString(R.string.pref_videocodec_default)); sharedPref.getString(keyprefVideoCodec, getString(R.string.pref_videocodec_default));
String audioCodec = sharedPref.getString(keyprefAudioCodec, String audioCodec =
getString(R.string.pref_audiocodec_default)); sharedPref.getString(keyprefAudioCodec, getString(R.string.pref_audiocodec_default));
// Check HW codec flag. // Check HW codec flag.
boolean hwCodec = sharedPref.getBoolean(keyprefHwCodecAcceleration, boolean hwCodec = sharedPref.getBoolean(
Boolean.valueOf(getString(R.string.pref_hwcodec_default))); keyprefHwCodecAcceleration, Boolean.valueOf(getString(R.string.pref_hwcodec_default)));
// Check Capture to texture. // Check Capture to texture.
boolean captureToTexture = sharedPref.getBoolean(keyprefCaptureToTexture, boolean captureToTexture = sharedPref.getBoolean(keyprefCaptureToTexture,
Boolean.valueOf(getString(R.string.pref_capturetotexture_default))); Boolean.valueOf(getString(R.string.pref_capturetotexture_default)));
// Check Disable Audio Processing flag. // Check Disable Audio Processing flag.
boolean noAudioProcessing = sharedPref.getBoolean( boolean noAudioProcessing = sharedPref.getBoolean(keyprefNoAudioProcessingPipeline,
keyprefNoAudioProcessingPipeline,
Boolean.valueOf(getString(R.string.pref_noaudioprocessing_default))); Boolean.valueOf(getString(R.string.pref_noaudioprocessing_default)));
// Check Disable Audio Processing flag. // Check Disable Audio Processing flag.
boolean aecDump = sharedPref.getBoolean( boolean aecDump = sharedPref.getBoolean(
keyprefAecDump, keyprefAecDump, Boolean.valueOf(getString(R.string.pref_aecdump_default)));
Boolean.valueOf(getString(R.string.pref_aecdump_default)));
// Check OpenSL ES enabled flag. // Check OpenSL ES enabled flag.
boolean useOpenSLES = sharedPref.getBoolean( boolean useOpenSLES = sharedPref.getBoolean(
keyprefOpenSLES, keyprefOpenSLES, Boolean.valueOf(getString(R.string.pref_opensles_default)));
Boolean.valueOf(getString(R.string.pref_opensles_default)));
// Check Disable built-in AEC flag. // Check Disable built-in AEC flag.
boolean disableBuiltInAEC = sharedPref.getBoolean( boolean disableBuiltInAEC = sharedPref.getBoolean(keyprefDisableBuiltInAec,
keyprefDisableBuiltInAec,
Boolean.valueOf(getString(R.string.pref_disable_built_in_aec_default))); Boolean.valueOf(getString(R.string.pref_disable_built_in_aec_default)));
// Check Disable built-in AGC flag. // Check Disable built-in AGC flag.
boolean disableBuiltInAGC = sharedPref.getBoolean( boolean disableBuiltInAGC = sharedPref.getBoolean(keyprefDisableBuiltInAgc,
keyprefDisableBuiltInAgc,
Boolean.valueOf(getString(R.string.pref_disable_built_in_agc_default))); Boolean.valueOf(getString(R.string.pref_disable_built_in_agc_default)));
// Check Disable built-in NS flag. // Check Disable built-in NS flag.
boolean disableBuiltInNS = sharedPref.getBoolean( boolean disableBuiltInNS = sharedPref.getBoolean(keyprefDisableBuiltInNs,
keyprefDisableBuiltInNs,
Boolean.valueOf(getString(R.string.pref_disable_built_in_ns_default))); Boolean.valueOf(getString(R.string.pref_disable_built_in_ns_default)));
// Check Enable level control. // Check Enable level control.
boolean enableLevelControl = sharedPref.getBoolean( boolean enableLevelControl = sharedPref.getBoolean(keyprefEnableLevelControl,
keyprefEnableLevelControl,
Boolean.valueOf(getString(R.string.pref_enable_level_control_key))); Boolean.valueOf(getString(R.string.pref_enable_level_control_key)));
// Get video resolution from settings. // Get video resolution from settings.
int videoWidth = 0; int videoWidth = 0;
int videoHeight = 0; int videoHeight = 0;
String resolution = sharedPref.getString(keyprefResolution, String resolution =
getString(R.string.pref_resolution_default)); sharedPref.getString(keyprefResolution, getString(R.string.pref_resolution_default));
String[] dimensions = resolution.split("[ x]+"); String[] dimensions = resolution.split("[ x]+");
if (dimensions.length == 2) { if (dimensions.length == 2) {
try { try {
@ -339,8 +324,7 @@ public class ConnectActivity extends Activity {
// Get camera fps from settings. // Get camera fps from settings.
int cameraFps = 0; int cameraFps = 0;
String fps = sharedPref.getString(keyprefFps, String fps = sharedPref.getString(keyprefFps, getString(R.string.pref_fps_default));
getString(R.string.pref_fps_default));
String[] fpsValues = fps.split("[ x]+"); String[] fpsValues = fps.split("[ x]+");
if (fpsValues.length == 2) { if (fpsValues.length == 2) {
try { try {
@ -356,28 +340,25 @@ public class ConnectActivity extends Activity {
// Get video and audio start bitrate. // Get video and audio start bitrate.
int videoStartBitrate = 0; int videoStartBitrate = 0;
String bitrateTypeDefault = getString( String bitrateTypeDefault = getString(R.string.pref_maxvideobitrate_default);
R.string.pref_maxvideobitrate_default); String bitrateType = sharedPref.getString(keyprefVideoBitrateType, bitrateTypeDefault);
String bitrateType = sharedPref.getString(
keyprefVideoBitrateType, bitrateTypeDefault);
if (!bitrateType.equals(bitrateTypeDefault)) { if (!bitrateType.equals(bitrateTypeDefault)) {
String bitrateValue = sharedPref.getString(keyprefVideoBitrateValue, String bitrateValue = sharedPref.getString(
getString(R.string.pref_maxvideobitratevalue_default)); keyprefVideoBitrateValue, getString(R.string.pref_maxvideobitratevalue_default));
videoStartBitrate = Integer.parseInt(bitrateValue); videoStartBitrate = Integer.parseInt(bitrateValue);
} }
int audioStartBitrate = 0; int audioStartBitrate = 0;
bitrateTypeDefault = getString(R.string.pref_startaudiobitrate_default); bitrateTypeDefault = getString(R.string.pref_startaudiobitrate_default);
bitrateType = sharedPref.getString( bitrateType = sharedPref.getString(keyprefAudioBitrateType, bitrateTypeDefault);
keyprefAudioBitrateType, bitrateTypeDefault);
if (!bitrateType.equals(bitrateTypeDefault)) { if (!bitrateType.equals(bitrateTypeDefault)) {
String bitrateValue = sharedPref.getString(keyprefAudioBitrateValue, String bitrateValue = sharedPref.getString(
getString(R.string.pref_startaudiobitratevalue_default)); keyprefAudioBitrateValue, getString(R.string.pref_startaudiobitratevalue_default));
audioStartBitrate = Integer.parseInt(bitrateValue); audioStartBitrate = Integer.parseInt(bitrateValue);
} }
// Check statistics display option. // Check statistics display option.
boolean displayHud = sharedPref.getBoolean(keyprefDisplayHud, boolean displayHud = sharedPref.getBoolean(
Boolean.valueOf(getString(R.string.pref_displayhud_default))); keyprefDisplayHud, Boolean.valueOf(getString(R.string.pref_displayhud_default)));
boolean tracing = sharedPref.getBoolean( boolean tracing = sharedPref.getBoolean(
keyprefTracing, Boolean.valueOf(getString(R.string.pref_tracing_default))); keyprefTracing, Boolean.valueOf(getString(R.string.pref_tracing_default)));
@ -395,14 +376,12 @@ public class ConnectActivity extends Activity {
intent.putExtra(CallActivity.EXTRA_VIDEO_WIDTH, videoWidth); intent.putExtra(CallActivity.EXTRA_VIDEO_WIDTH, videoWidth);
intent.putExtra(CallActivity.EXTRA_VIDEO_HEIGHT, videoHeight); intent.putExtra(CallActivity.EXTRA_VIDEO_HEIGHT, videoHeight);
intent.putExtra(CallActivity.EXTRA_VIDEO_FPS, cameraFps); intent.putExtra(CallActivity.EXTRA_VIDEO_FPS, cameraFps);
intent.putExtra(CallActivity.EXTRA_VIDEO_CAPTUREQUALITYSLIDER_ENABLED, intent.putExtra(CallActivity.EXTRA_VIDEO_CAPTUREQUALITYSLIDER_ENABLED, captureQualitySlider);
captureQualitySlider);
intent.putExtra(CallActivity.EXTRA_VIDEO_BITRATE, videoStartBitrate); intent.putExtra(CallActivity.EXTRA_VIDEO_BITRATE, videoStartBitrate);
intent.putExtra(CallActivity.EXTRA_VIDEOCODEC, videoCodec); intent.putExtra(CallActivity.EXTRA_VIDEOCODEC, videoCodec);
intent.putExtra(CallActivity.EXTRA_HWCODEC_ENABLED, hwCodec); intent.putExtra(CallActivity.EXTRA_HWCODEC_ENABLED, hwCodec);
intent.putExtra(CallActivity.EXTRA_CAPTURETOTEXTURE_ENABLED, captureToTexture); intent.putExtra(CallActivity.EXTRA_CAPTURETOTEXTURE_ENABLED, captureToTexture);
intent.putExtra(CallActivity.EXTRA_NOAUDIOPROCESSING_ENABLED, intent.putExtra(CallActivity.EXTRA_NOAUDIOPROCESSING_ENABLED, noAudioProcessing);
noAudioProcessing);
intent.putExtra(CallActivity.EXTRA_AECDUMP_ENABLED, aecDump); intent.putExtra(CallActivity.EXTRA_AECDUMP_ENABLED, aecDump);
intent.putExtra(CallActivity.EXTRA_OPENSLES_ENABLED, useOpenSLES); intent.putExtra(CallActivity.EXTRA_OPENSLES_ENABLED, useOpenSLES);
intent.putExtra(CallActivity.EXTRA_DISABLE_BUILT_IN_AEC, disableBuiltInAEC); intent.putExtra(CallActivity.EXTRA_DISABLE_BUILT_IN_AEC, disableBuiltInAEC);
@ -429,16 +408,19 @@ public class ConnectActivity extends Activity {
.setTitle(getText(R.string.invalid_url_title)) .setTitle(getText(R.string.invalid_url_title))
.setMessage(getString(R.string.invalid_url_text, url)) .setMessage(getString(R.string.invalid_url_text, url))
.setCancelable(false) .setCancelable(false)
.setNeutralButton(R.string.ok, new DialogInterface.OnClickListener() { .setNeutralButton(R.string.ok,
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) { public void onClick(DialogInterface dialog, int id) {
dialog.cancel(); dialog.cancel();
} }
}).create().show(); })
.create()
.show();
return false; return false;
} }
private final AdapterView.OnItemClickListener private final AdapterView.OnItemClickListener roomListClickListener =
roomListClickListener = new AdapterView.OnItemClickListener() { new AdapterView.OnItemClickListener() {
@Override @Override
public void onItemClick(AdapterView<?> adapterView, View view, int i, long l) { public void onItemClick(AdapterView<?> adapterView, View view, int i, long l) {
String roomId = ((TextView) view).getText().toString(); String roomId = ((TextView) view).getText().toString();

View File

@ -275,8 +275,8 @@ class CpuMonitor {
int batteryLevel = 0; int batteryLevel = 0;
int batteryScale = intent.getIntExtra(BatteryManager.EXTRA_SCALE, 100); int batteryScale = intent.getIntExtra(BatteryManager.EXTRA_SCALE, 100);
if (batteryScale > 0) { if (batteryScale > 0) {
batteryLevel = (int) ( batteryLevel =
100f * intent.getIntExtra(BatteryManager.EXTRA_LEVEL, 0) / batteryScale); (int) (100f * intent.getIntExtra(BatteryManager.EXTRA_LEVEL, 0) / batteryScale);
} }
return batteryLevel; return batteryLevel;
} }
@ -402,16 +402,20 @@ class CpuMonitor {
private synchronized String getStatString() { private synchronized String getStatString() {
StringBuilder stat = new StringBuilder(); StringBuilder stat = new StringBuilder();
stat.append("CPU User: ") stat.append("CPU User: ")
.append(doubleToPercent(userCpuUsage.getCurrent())).append("/") .append(doubleToPercent(userCpuUsage.getCurrent()))
.append("/")
.append(doubleToPercent(userCpuUsage.getAverage())) .append(doubleToPercent(userCpuUsage.getAverage()))
.append(". System: ") .append(". System: ")
.append(doubleToPercent(systemCpuUsage.getCurrent())).append("/") .append(doubleToPercent(systemCpuUsage.getCurrent()))
.append("/")
.append(doubleToPercent(systemCpuUsage.getAverage())) .append(doubleToPercent(systemCpuUsage.getAverage()))
.append(". Freq: ") .append(". Freq: ")
.append(doubleToPercent(frequencyScale.getCurrent())).append("/") .append(doubleToPercent(frequencyScale.getCurrent()))
.append("/")
.append(doubleToPercent(frequencyScale.getAverage())) .append(doubleToPercent(frequencyScale.getAverage()))
.append(". Total usage: ") .append(". Total usage: ")
.append(doubleToPercent(totalCpuUsage.getCurrent())).append("/") .append(doubleToPercent(totalCpuUsage.getCurrent()))
.append("/")
.append(doubleToPercent(totalCpuUsage.getAverage())) .append(doubleToPercent(totalCpuUsage.getAverage()))
.append(". Cores: ") .append(". Cores: ")
.append(actualCpusPresent); .append(actualCpusPresent);

View File

@ -35,8 +35,7 @@ public class DirectRTCClient implements AppRTCClient, TCPChannelClient.TCPChanne
private static final int DEFAULT_PORT = 8888; private static final int DEFAULT_PORT = 8888;
// Regex pattern used for checking if room id looks like an IP. // Regex pattern used for checking if room id looks like an IP.
static final Pattern IP_PATTERN = Pattern.compile( static final Pattern IP_PATTERN = Pattern.compile("("
"("
// IPv4 // IPv4
+ "((\\d+\\.){3}\\d+)|" + "((\\d+\\.){3}\\d+)|"
// IPv6 // IPv6
@ -50,17 +49,14 @@ public class DirectRTCClient implements AppRTCClient, TCPChannelClient.TCPChanne
+ "localhost" + "localhost"
+ ")" + ")"
// Optional port number // Optional port number
+ "(:(\\d+))?" + "(:(\\d+))?");
);
private final ExecutorService executor; private final ExecutorService executor;
private final SignalingEvents events; private final SignalingEvents events;
private TCPChannelClient tcpClient; private TCPChannelClient tcpClient;
private RoomConnectionParameters connectionParameters; private RoomConnectionParameters connectionParameters;
private enum ConnectionState { private enum ConnectionState { NEW, CONNECTED, CLOSED, ERROR }
NEW, CONNECTED, CLOSED, ERROR
};
// All alterations of the room state should be done from inside the looper thread. // All alterations of the room state should be done from inside the looper thread.
private ConnectionState roomState; private ConnectionState roomState;
@ -265,13 +261,11 @@ public class DirectRTCClient implements AppRTCClient, TCPChannelClient.TCPChanne
events.onRemoteIceCandidatesRemoved(candidates); events.onRemoteIceCandidatesRemoved(candidates);
} else if (type.equals("answer")) { } else if (type.equals("answer")) {
SessionDescription sdp = new SessionDescription( SessionDescription sdp = new SessionDescription(
SessionDescription.Type.fromCanonicalForm(type), SessionDescription.Type.fromCanonicalForm(type), json.getString("sdp"));
json.getString("sdp"));
events.onRemoteDescription(sdp); events.onRemoteDescription(sdp);
} else if (type.equals("offer")) { } else if (type.equals("offer")) {
SessionDescription sdp = new SessionDescription( SessionDescription sdp = new SessionDescription(
SessionDescription.Type.fromCanonicalForm(type), SessionDescription.Type.fromCanonicalForm(type), json.getString("sdp"));
json.getString("sdp"));
SignalingParameters parameters = new SignalingParameters( SignalingParameters parameters = new SignalingParameters(
// Ice servers are not needed for direct connections. // Ice servers are not needed for direct connections.
@ -347,8 +341,7 @@ public class DirectRTCClient implements AppRTCClient, TCPChannelClient.TCPChanne
// Converts a JSON candidate to a Java object. // Converts a JSON candidate to a Java object.
private static IceCandidate toJavaCandidate(JSONObject json) throws JSONException { private static IceCandidate toJavaCandidate(JSONObject json) throws JSONException {
return new IceCandidate(json.getString("id"), return new IceCandidate(
json.getInt("label"), json.getString("id"), json.getInt("label"), json.getString("candidate"));
json.getString("candidate"));
} }
} }

View File

@ -41,8 +41,8 @@ public class HudFragment extends Fragment {
private CpuMonitor cpuMonitor; private CpuMonitor cpuMonitor;
@Override @Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, public View onCreateView(
Bundle savedInstanceState) { LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
controlView = inflater.inflate(R.layout.fragment_hud, container, false); controlView = inflater.inflate(R.layout.fragment_hud, container, false);
// Create UI controls. // Create UI controls.
@ -57,8 +57,8 @@ public class HudFragment extends Fragment {
@Override @Override
public void onClick(View view) { public void onClick(View view) {
if (displayHud) { if (displayHud) {
int visibility = (hudViewBwe.getVisibility() == View.VISIBLE) int visibility =
? View.INVISIBLE : View.VISIBLE; (hudViewBwe.getVisibility() == View.VISIBLE) ? View.INVISIBLE : View.VISIBLE;
hudViewsSetProperties(visibility); hudViewsSetProperties(visibility);
} }
} }
@ -126,8 +126,7 @@ public class HudFragment extends Fragment {
String actualBitrate = null; String actualBitrate = null;
for (StatsReport report : reports) { for (StatsReport report : reports) {
if (report.type.equals("ssrc") && report.id.contains("ssrc") if (report.type.equals("ssrc") && report.id.contains("ssrc") && report.id.contains("send")) {
&& report.id.contains("send")) {
// Send video statistics. // Send video statistics.
Map<String, String> reportMap = getReportMap(report); Map<String, String> reportMap = getReportMap(report);
String trackId = reportMap.get("googTrackId"); String trackId = reportMap.get("googTrackId");
@ -195,9 +194,11 @@ public class HudFragment extends Fragment {
if (cpuMonitor != null) { if (cpuMonitor != null) {
encoderStat.append("CPU%: ") encoderStat.append("CPU%: ")
.append(cpuMonitor.getCpuUsageCurrent()).append("/") .append(cpuMonitor.getCpuUsageCurrent())
.append("/")
.append(cpuMonitor.getCpuUsageAverage()) .append(cpuMonitor.getCpuUsageAverage())
.append(". Freq: ").append(cpuMonitor.getFrequencyScaleAverage()); .append(". Freq: ")
.append(cpuMonitor.getFrequencyScaleAverage());
} }
encoderStatView.setText(encoderStat.toString()); encoderStatView.setText(encoderStat.toString());
} }

View File

@ -74,8 +74,7 @@ public class PeerConnectionClient {
private static final String VIDEO_CODEC_H264 = "H264"; private static final String VIDEO_CODEC_H264 = "H264";
private static final String AUDIO_CODEC_OPUS = "opus"; private static final String AUDIO_CODEC_OPUS = "opus";
private static final String AUDIO_CODEC_ISAC = "ISAC"; private static final String AUDIO_CODEC_ISAC = "ISAC";
private static final String VIDEO_CODEC_PARAM_START_BITRATE = private static final String VIDEO_CODEC_PARAM_START_BITRATE = "x-google-start-bitrate";
"x-google-start-bitrate";
private static final String AUDIO_CODEC_PARAM_BITRATE = "maxaveragebitrate"; private static final String AUDIO_CODEC_PARAM_BITRATE = "maxaveragebitrate";
private static final String AUDIO_ECHO_CANCELLATION_CONSTRAINT = "googEchoCancellation"; private static final String AUDIO_ECHO_CANCELLATION_CONSTRAINT = "googEchoCancellation";
private static final String AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT = "googAutoGainControl"; private static final String AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT = "googAutoGainControl";
@ -162,14 +161,12 @@ public class PeerConnectionClient {
public final boolean disableBuiltInNS; public final boolean disableBuiltInNS;
public final boolean enableLevelControl; public final boolean enableLevelControl;
public PeerConnectionParameters( public PeerConnectionParameters(boolean videoCallEnabled, boolean loopback, boolean tracing,
boolean videoCallEnabled, boolean loopback, boolean tracing, boolean useCamera2, boolean useCamera2, int videoWidth, int videoHeight, int videoFps, int videoMaxBitrate,
int videoWidth, int videoHeight, int videoFps, String videoCodec, boolean videoCodecHwAcceleration, boolean captureToTexture,
int videoMaxBitrate, String videoCodec, boolean videoCodecHwAcceleration, int audioStartBitrate, String audioCodec, boolean noAudioProcessing, boolean aecDump,
boolean captureToTexture, int audioStartBitrate, String audioCodec, boolean useOpenSLES, boolean disableBuiltInAEC, boolean disableBuiltInAGC,
boolean noAudioProcessing, boolean aecDump, boolean useOpenSLES, boolean disableBuiltInNS, boolean enableLevelControl) {
boolean disableBuiltInAEC, boolean disableBuiltInAGC, boolean disableBuiltInNS,
boolean enableLevelControl) {
this.videoCallEnabled = videoCallEnabled; this.videoCallEnabled = videoCallEnabled;
this.useCamera2 = useCamera2; this.useCamera2 = useCamera2;
this.loopback = loopback; this.loopback = loopback;
@ -255,10 +252,8 @@ public class PeerConnectionClient {
this.options = options; this.options = options;
} }
public void createPeerConnectionFactory( public void createPeerConnectionFactory(final Context context,
final Context context, final PeerConnectionParameters peerConnectionParameters, final PeerConnectionEvents events) {
final PeerConnectionParameters peerConnectionParameters,
final PeerConnectionEvents events) {
this.peerConnectionParameters = peerConnectionParameters; this.peerConnectionParameters = peerConnectionParameters;
this.events = events; this.events = events;
videoCallEnabled = peerConnectionParameters.videoCallEnabled; videoCallEnabled = peerConnectionParameters.videoCallEnabled;
@ -289,10 +284,8 @@ public class PeerConnectionClient {
}); });
} }
public void createPeerConnection( public void createPeerConnection(final EglBase.Context renderEGLContext,
final EglBase.Context renderEGLContext, final VideoRenderer.Callbacks localRender, final VideoRenderer.Callbacks remoteRender,
final VideoRenderer.Callbacks localRender,
final VideoRenderer.Callbacks remoteRender,
final SignalingParameters signalingParameters) { final SignalingParameters signalingParameters) {
if (peerConnectionParameters == null) { if (peerConnectionParameters == null) {
Log.e(TAG, "Creating peer connection without initializing factory."); Log.e(TAG, "Creating peer connection without initializing factory.");
@ -335,8 +328,8 @@ public class PeerConnectionClient {
Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator
+ "webrtc-trace.txt"); + "webrtc-trace.txt");
} }
Log.d(TAG, "Create peer connection factory. Use video: " + Log.d(TAG,
peerConnectionParameters.videoCallEnabled); "Create peer connection factory. Use video: " + peerConnectionParameters.videoCallEnabled);
isError = false; isError = false;
// Initialize field trials. // Initialize field trials.
@ -391,8 +384,8 @@ public class PeerConnectionClient {
} }
// Create peer connection factory. // Create peer connection factory.
if (!PeerConnectionFactory.initializeAndroidGlobals(context, true, true, if (!PeerConnectionFactory.initializeAndroidGlobals(
peerConnectionParameters.videoCodecHwAcceleration)) { context, true, true, peerConnectionParameters.videoCodecHwAcceleration)) {
events.onPeerConnectionError("Failed to initializeAndroidGlobals"); events.onPeerConnectionError("Failed to initializeAndroidGlobals");
} }
if (options != null) { if (options != null) {
@ -448,30 +441,30 @@ public class PeerConnectionClient {
// added for audio performance measurements // added for audio performance measurements
if (peerConnectionParameters.noAudioProcessing) { if (peerConnectionParameters.noAudioProcessing) {
Log.d(TAG, "Disabling audio processing"); Log.d(TAG, "Disabling audio processing");
audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair( audioConstraints.mandatory.add(
AUDIO_ECHO_CANCELLATION_CONSTRAINT, "false")); new MediaConstraints.KeyValuePair(AUDIO_ECHO_CANCELLATION_CONSTRAINT, "false"));
audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair( audioConstraints.mandatory.add(
AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT, "false")); new MediaConstraints.KeyValuePair(AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT, "false"));
audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair( audioConstraints.mandatory.add(
AUDIO_HIGH_PASS_FILTER_CONSTRAINT, "false")); new MediaConstraints.KeyValuePair(AUDIO_HIGH_PASS_FILTER_CONSTRAINT, "false"));
audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair( audioConstraints.mandatory.add(
AUDIO_NOISE_SUPPRESSION_CONSTRAINT , "false")); new MediaConstraints.KeyValuePair(AUDIO_NOISE_SUPPRESSION_CONSTRAINT, "false"));
} }
if (peerConnectionParameters.enableLevelControl) { if (peerConnectionParameters.enableLevelControl) {
Log.d(TAG, "Enabling level control."); Log.d(TAG, "Enabling level control.");
audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair( audioConstraints.mandatory.add(
AUDIO_LEVEL_CONTROL_CONSTRAINT, "true")); new MediaConstraints.KeyValuePair(AUDIO_LEVEL_CONTROL_CONSTRAINT, "true"));
} }
// Create SDP constraints. // Create SDP constraints.
sdpMediaConstraints = new MediaConstraints(); sdpMediaConstraints = new MediaConstraints();
sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair( sdpMediaConstraints.mandatory.add(
"OfferToReceiveAudio", "true")); new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true"));
if (videoCallEnabled || peerConnectionParameters.loopback) { if (videoCallEnabled || peerConnectionParameters.loopback) {
sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair( sdpMediaConstraints.mandatory.add(
"OfferToReceiveVideo", "true")); new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "true"));
} else { } else {
sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair( sdpMediaConstraints.mandatory.add(
"OfferToReceiveVideo", "false")); new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "false"));
} }
} }
@ -531,15 +524,12 @@ public class PeerConnectionClient {
// Use ECDSA encryption. // Use ECDSA encryption.
rtcConfig.keyType = PeerConnection.KeyType.ECDSA; rtcConfig.keyType = PeerConnection.KeyType.ECDSA;
peerConnection = factory.createPeerConnection( peerConnection = factory.createPeerConnection(rtcConfig, pcConstraints, pcObserver);
rtcConfig, pcConstraints, pcObserver);
isInitiator = false; isInitiator = false;
// Set default WebRTC tracing and INFO libjingle logging. // Set default WebRTC tracing and INFO libjingle logging.
// NOTE: this _must_ happen while |factory| is alive! // NOTE: this _must_ happen while |factory| is alive!
Logging.enableTracing( Logging.enableTracing("logcat:", EnumSet.of(Logging.TraceLevel.TRACE_DEFAULT));
"logcat:",
EnumSet.of(Logging.TraceLevel.TRACE_DEFAULT));
Logging.enableLogToDebugOutput(Logging.Severity.LS_INFO); Logging.enableLogToDebugOutput(Logging.Severity.LS_INFO);
mediaStream = factory.createLocalMediaStream("ARDAMS"); mediaStream = factory.createLocalMediaStream("ARDAMS");
@ -572,13 +562,11 @@ public class PeerConnectionClient {
if (peerConnectionParameters.aecDump) { if (peerConnectionParameters.aecDump) {
try { try {
aecDumpFileDescriptor = ParcelFileDescriptor.open( aecDumpFileDescriptor =
new File(Environment.getExternalStorageDirectory().getPath() ParcelFileDescriptor.open(new File(Environment.getExternalStorageDirectory().getPath()
+ File.separator + File.separator + "Download/audio.aecdump"),
+ "Download/audio.aecdump"), ParcelFileDescriptor.MODE_READ_WRITE | ParcelFileDescriptor.MODE_CREATE
ParcelFileDescriptor.MODE_READ_WRITE | | ParcelFileDescriptor.MODE_TRUNCATE);
ParcelFileDescriptor.MODE_CREATE |
ParcelFileDescriptor.MODE_TRUNCATE);
factory.startAecDump(aecDumpFileDescriptor.getFd(), -1); factory.startAecDump(aecDumpFileDescriptor.getFd(), -1);
} catch (IOException e) { } catch (IOException e) {
Log.e(TAG, "Can not open aecdump file", e); Log.e(TAG, "Can not open aecdump file", e);
@ -773,12 +761,11 @@ public class PeerConnectionClient {
sdpDescription = preferCodec(sdpDescription, preferredVideoCodec, false); sdpDescription = preferCodec(sdpDescription, preferredVideoCodec, false);
} }
if (peerConnectionParameters.audioStartBitrate > 0) { if (peerConnectionParameters.audioStartBitrate > 0) {
sdpDescription = setStartBitrate(AUDIO_CODEC_OPUS, false, sdpDescription = setStartBitrate(
sdpDescription, peerConnectionParameters.audioStartBitrate); AUDIO_CODEC_OPUS, false, sdpDescription, peerConnectionParameters.audioStartBitrate);
} }
Log.d(TAG, "Set remote SDP."); Log.d(TAG, "Set remote SDP.");
SessionDescription sdpRemote = new SessionDescription( SessionDescription sdpRemote = new SessionDescription(sdp.type, sdpDescription);
sdp.type, sdpDescription);
peerConnection.setRemoteDescription(sdpObserver, sdpRemote); peerConnection.setRemoteDescription(sdpObserver, sdpRemote);
} }
}); });
@ -792,7 +779,8 @@ public class PeerConnectionClient {
Log.d(TAG, "Stop video source."); Log.d(TAG, "Stop video source.");
try { try {
videoCapturer.stopCapture(); videoCapturer.stopCapture();
} catch (InterruptedException e) {} } catch (InterruptedException e) {
}
videoCapturerStopped = true; videoCapturerStopped = true;
} }
} }
@ -833,9 +821,7 @@ public class PeerConnectionClient {
for (RtpParameters.Encoding encoding : parameters.encodings) { for (RtpParameters.Encoding encoding : parameters.encodings) {
// Null value means no limit. // Null value means no limit.
encoding.maxBitrateBps = maxBitrateKbps == null encoding.maxBitrateBps = maxBitrateKbps == null ? null : maxBitrateKbps * BPS_IN_KBPS;
? null
: maxBitrateKbps * BPS_IN_KBPS;
} }
if (!localVideoSender.setParameters(parameters)) { if (!localVideoSender.setParameters(parameters)) {
Log.e(TAG, "RtpSender.setParameters failed."); Log.e(TAG, "RtpSender.setParameters failed.");
@ -887,8 +873,8 @@ public class PeerConnectionClient {
} }
} }
private static String setStartBitrate(String codec, boolean isVideoCodec, private static String setStartBitrate(
String sdpDescription, int bitrateKbps) { String codec, boolean isVideoCodec, String sdpDescription, int bitrateKbps) {
String[] lines = sdpDescription.split("\r\n"); String[] lines = sdpDescription.split("\r\n");
int rtpmapLineIndex = -1; int rtpmapLineIndex = -1;
boolean sdpFormatUpdated = false; boolean sdpFormatUpdated = false;
@ -909,8 +895,7 @@ public class PeerConnectionClient {
Log.w(TAG, "No rtpmap for " + codec + " codec"); Log.w(TAG, "No rtpmap for " + codec + " codec");
return sdpDescription; return sdpDescription;
} }
Log.d(TAG, "Found " + codec + " rtpmap " + codecRtpMap Log.d(TAG, "Found " + codec + " rtpmap " + codecRtpMap + " at " + lines[rtpmapLineIndex]);
+ " at " + lines[rtpmapLineIndex]);
// Check if a=fmtp string already exist in remote SDP for this codec and // Check if a=fmtp string already exist in remote SDP for this codec and
// update it with new bitrate parameter. // update it with new bitrate parameter.
@ -921,11 +906,9 @@ public class PeerConnectionClient {
if (codecMatcher.matches()) { if (codecMatcher.matches()) {
Log.d(TAG, "Found " + codec + " " + lines[i]); Log.d(TAG, "Found " + codec + " " + lines[i]);
if (isVideoCodec) { if (isVideoCodec) {
lines[i] += "; " + VIDEO_CODEC_PARAM_START_BITRATE lines[i] += "; " + VIDEO_CODEC_PARAM_START_BITRATE + "=" + bitrateKbps;
+ "=" + bitrateKbps;
} else { } else {
lines[i] += "; " + AUDIO_CODEC_PARAM_BITRATE lines[i] += "; " + AUDIO_CODEC_PARAM_BITRATE + "=" + (bitrateKbps * 1000);
+ "=" + (bitrateKbps * 1000);
} }
Log.d(TAG, "Update remote SDP line: " + lines[i]); Log.d(TAG, "Update remote SDP line: " + lines[i]);
sdpFormatUpdated = true; sdpFormatUpdated = true;
@ -940,22 +923,20 @@ public class PeerConnectionClient {
if (!sdpFormatUpdated && i == rtpmapLineIndex) { if (!sdpFormatUpdated && i == rtpmapLineIndex) {
String bitrateSet; String bitrateSet;
if (isVideoCodec) { if (isVideoCodec) {
bitrateSet = "a=fmtp:" + codecRtpMap + " " bitrateSet =
+ VIDEO_CODEC_PARAM_START_BITRATE + "=" + bitrateKbps; "a=fmtp:" + codecRtpMap + " " + VIDEO_CODEC_PARAM_START_BITRATE + "=" + bitrateKbps;
} else { } else {
bitrateSet = "a=fmtp:" + codecRtpMap + " " bitrateSet = "a=fmtp:" + codecRtpMap + " " + AUDIO_CODEC_PARAM_BITRATE + "="
+ AUDIO_CODEC_PARAM_BITRATE + "=" + (bitrateKbps * 1000); + (bitrateKbps * 1000);
} }
Log.d(TAG, "Add remote SDP line: " + bitrateSet); Log.d(TAG, "Add remote SDP line: " + bitrateSet);
newSdpDescription.append(bitrateSet).append("\r\n"); newSdpDescription.append(bitrateSet).append("\r\n");
} }
} }
return newSdpDescription.toString(); return newSdpDescription.toString();
} }
private static String preferCodec( private static String preferCodec(String sdpDescription, String codec, boolean isAudio) {
String sdpDescription, String codec, boolean isAudio) {
String[] lines = sdpDescription.split("\r\n"); String[] lines = sdpDescription.split("\r\n");
int mLineIndex = -1; int mLineIndex = -1;
String codecRtpMap = null; String codecRtpMap = null;
@ -966,8 +947,7 @@ public class PeerConnectionClient {
if (isAudio) { if (isAudio) {
mediaDescription = "m=audio "; mediaDescription = "m=audio ";
} }
for (int i = 0; (i < lines.length) for (int i = 0; (i < lines.length) && (mLineIndex == -1 || codecRtpMap == null); i++) {
&& (mLineIndex == -1 || codecRtpMap == null); i++) {
if (lines[i].startsWith(mediaDescription)) { if (lines[i].startsWith(mediaDescription)) {
mLineIndex = i; mLineIndex = i;
continue; continue;
@ -985,8 +965,7 @@ public class PeerConnectionClient {
Log.w(TAG, "No rtpmap for " + codec); Log.w(TAG, "No rtpmap for " + codec);
return sdpDescription; return sdpDescription;
} }
Log.d(TAG, "Found " + codec + " rtpmap " + codecRtpMap + ", prefer at " Log.d(TAG, "Found " + codec + " rtpmap " + codecRtpMap + ", prefer at " + lines[mLineIndex]);
+ lines[mLineIndex]);
String[] origMLineParts = lines[mLineIndex].split(" "); String[] origMLineParts = lines[mLineIndex].split(" ");
if (origMLineParts.length > 3) { if (origMLineParts.length > 3) {
StringBuilder newMLine = new StringBuilder(); StringBuilder newMLine = new StringBuilder();
@ -1025,8 +1004,8 @@ public class PeerConnectionClient {
private void switchCameraInternal() { private void switchCameraInternal() {
if (!videoCallEnabled || numberOfCameras < 2 || isError || videoCapturer == null) { if (!videoCallEnabled || numberOfCameras < 2 || isError || videoCapturer == null) {
Log.e(TAG, "Failed to switch camera. Video: " + videoCallEnabled + ". Error : " Log.e(TAG, "Failed to switch camera. Video: " + videoCallEnabled + ". Error : " + isError
+ isError + ". Number of cameras: " + numberOfCameras); + ". Number of cameras: " + numberOfCameras);
return; // No video is sent or only one camera is available or error happened. return; // No video is sent or only one camera is available or error happened.
} }
Log.d(TAG, "Switch camera"); Log.d(TAG, "Switch camera");
@ -1053,8 +1032,8 @@ public class PeerConnectionClient {
private void changeCaptureFormatInternal(int width, int height, int framerate) { private void changeCaptureFormatInternal(int width, int height, int framerate) {
if (!videoCallEnabled || isError || videoCapturer == null) { if (!videoCallEnabled || isError || videoCapturer == null) {
Log.e(TAG, "Failed to change capture format. Video: " + videoCallEnabled + ". Error : " Log.e(TAG,
+ isError); "Failed to change capture format. Video: " + videoCallEnabled + ". Error : " + isError);
return; return;
} }
Log.d(TAG, "changeCaptureFormat: " + width + "x" + height + "@" + framerate); Log.d(TAG, "changeCaptureFormat: " + width + "x" + height + "@" + framerate);
@ -1084,14 +1063,12 @@ public class PeerConnectionClient {
} }
@Override @Override
public void onSignalingChange( public void onSignalingChange(PeerConnection.SignalingState newState) {
PeerConnection.SignalingState newState) {
Log.d(TAG, "SignalingState: " + newState); Log.d(TAG, "SignalingState: " + newState);
} }
@Override @Override
public void onIceConnectionChange( public void onIceConnectionChange(final PeerConnection.IceConnectionState newState) {
final PeerConnection.IceConnectionState newState) {
executor.execute(new Runnable() { executor.execute(new Runnable() {
@Override @Override
public void run() { public void run() {
@ -1108,8 +1085,7 @@ public class PeerConnectionClient {
} }
@Override @Override
public void onIceGatheringChange( public void onIceGatheringChange(PeerConnection.IceGatheringState newState) {
PeerConnection.IceGatheringState newState) {
Log.d(TAG, "IceGatheringState: " + newState); Log.d(TAG, "IceGatheringState: " + newState);
} }
@ -1151,8 +1127,7 @@ public class PeerConnectionClient {
@Override @Override
public void onDataChannel(final DataChannel dc) { public void onDataChannel(final DataChannel dc) {
reportError("AppRTC doesn't use data channels, but got: " + dc.label() reportError("AppRTC doesn't use data channels, but got: " + dc.label() + " anyway!");
+ " anyway!");
} }
@Override @Override
@ -1178,8 +1153,7 @@ public class PeerConnectionClient {
if (videoCallEnabled) { if (videoCallEnabled) {
sdpDescription = preferCodec(sdpDescription, preferredVideoCodec, false); sdpDescription = preferCodec(sdpDescription, preferredVideoCodec, false);
} }
final SessionDescription sdp = new SessionDescription( final SessionDescription sdp = new SessionDescription(origSdp.type, sdpDescription);
origSdp.type, sdpDescription);
localSdp = sdp; localSdp = sdp;
executor.execute(new Runnable() { executor.execute(new Runnable() {
@Override @Override

View File

@ -54,8 +54,7 @@ public class PercentFrameLayout extends ViewGroup {
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
final int width = getDefaultSize(Integer.MAX_VALUE, widthMeasureSpec); final int width = getDefaultSize(Integer.MAX_VALUE, widthMeasureSpec);
final int height = getDefaultSize(Integer.MAX_VALUE, heightMeasureSpec); final int height = getDefaultSize(Integer.MAX_VALUE, heightMeasureSpec);
setMeasuredDimension( setMeasuredDimension(MeasureSpec.makeMeasureSpec(width, MeasureSpec.EXACTLY),
MeasureSpec.makeMeasureSpec(width, MeasureSpec.EXACTLY),
MeasureSpec.makeMeasureSpec(height, MeasureSpec.EXACTLY)); MeasureSpec.makeMeasureSpec(height, MeasureSpec.EXACTLY));
final int childWidthMeasureSpec = final int childWidthMeasureSpec =

View File

@ -58,8 +58,8 @@ public class RoomParametersFetcher {
void onSignalingParametersError(final String description); void onSignalingParametersError(final String description);
} }
public RoomParametersFetcher(String roomUrl, String roomMessage, public RoomParametersFetcher(
final RoomParametersFetcherEvents events) { String roomUrl, String roomMessage, final RoomParametersFetcherEvents events) {
this.roomUrl = roomUrl; this.roomUrl = roomUrl;
this.roomMessage = roomMessage; this.roomMessage = roomMessage;
this.events = events; this.events = events;
@ -67,9 +67,8 @@ public class RoomParametersFetcher {
public void makeRequest() { public void makeRequest() {
Log.d(TAG, "Connecting to room: " + roomUrl); Log.d(TAG, "Connecting to room: " + roomUrl);
httpConnection = new AsyncHttpURLConnection( httpConnection =
"POST", roomUrl, roomMessage, new AsyncHttpURLConnection("POST", roomUrl, roomMessage, new AsyncHttpEvents() {
new AsyncHttpEvents() {
@Override @Override
public void onHttpError(String errorMessage) { public void onHttpError(String errorMessage) {
Log.e(TAG, "Room connection error: " + errorMessage); Log.e(TAG, "Room connection error: " + errorMessage);
@ -114,13 +113,10 @@ public class RoomParametersFetcher {
Log.d(TAG, "GAE->C #" + i + " : " + messageString); Log.d(TAG, "GAE->C #" + i + " : " + messageString);
if (messageType.equals("offer")) { if (messageType.equals("offer")) {
offerSdp = new SessionDescription( offerSdp = new SessionDescription(
SessionDescription.Type.fromCanonicalForm(messageType), SessionDescription.Type.fromCanonicalForm(messageType), message.getString("sdp"));
message.getString("sdp"));
} else if (messageType.equals("candidate")) { } else if (messageType.equals("candidate")) {
IceCandidate candidate = new IceCandidate( IceCandidate candidate = new IceCandidate(
message.getString("id"), message.getString("id"), message.getInt("label"), message.getString("candidate"));
message.getInt("label"),
message.getString("candidate"));
iceCandidates.add(candidate); iceCandidates.add(candidate);
} else { } else {
Log.e(TAG, "Unknown message: " + messageString); Log.e(TAG, "Unknown message: " + messageString);
@ -153,13 +149,10 @@ public class RoomParametersFetcher {
} }
SignalingParameters params = new SignalingParameters( SignalingParameters params = new SignalingParameters(
iceServers, initiator, iceServers, initiator, clientId, wssUrl, wssPostUrl, offerSdp, iceCandidates);
clientId, wssUrl, wssPostUrl,
offerSdp, iceCandidates);
events.onSignalingParametersReady(params); events.onSignalingParametersReady(params);
} catch (JSONException e) { } catch (JSONException e) {
events.onSignalingParametersError( events.onSignalingParametersError("Room JSON parsing error: " + e.toString());
"Room JSON parsing error: " + e.toString());
} catch (IOException e) { } catch (IOException e) {
events.onSignalingParametersError("Room IO error: " + e.toString()); events.onSignalingParametersError("Room IO error: " + e.toString());
} }
@ -169,19 +162,17 @@ public class RoomParametersFetcher {
// off the main thread! // off the main thread!
private LinkedList<PeerConnection.IceServer> requestTurnServers(String url) private LinkedList<PeerConnection.IceServer> requestTurnServers(String url)
throws IOException, JSONException { throws IOException, JSONException {
LinkedList<PeerConnection.IceServer> turnServers = LinkedList<PeerConnection.IceServer> turnServers = new LinkedList<PeerConnection.IceServer>();
new LinkedList<PeerConnection.IceServer>();
Log.d(TAG, "Request TURN from: " + url); Log.d(TAG, "Request TURN from: " + url);
HttpURLConnection connection = HttpURLConnection connection = (HttpURLConnection) new URL(url).openConnection();
(HttpURLConnection) new URL(url).openConnection();
connection.setDoOutput(true); connection.setDoOutput(true);
connection.setRequestProperty("REFERER", "https://appr.tc"); connection.setRequestProperty("REFERER", "https://appr.tc");
connection.setConnectTimeout(TURN_HTTP_TIMEOUT_MS); connection.setConnectTimeout(TURN_HTTP_TIMEOUT_MS);
connection.setReadTimeout(TURN_HTTP_TIMEOUT_MS); connection.setReadTimeout(TURN_HTTP_TIMEOUT_MS);
int responseCode = connection.getResponseCode(); int responseCode = connection.getResponseCode();
if (responseCode != 200) { if (responseCode != 200) {
throw new IOException("Non-200 response when requesting TURN server from " throw new IOException("Non-200 response when requesting TURN server from " + url + " : "
+ url + " : " + connection.getHeaderField(null)); + connection.getHeaderField(null));
} }
InputStream responseStream = connection.getInputStream(); InputStream responseStream = connection.getInputStream();
String response = drainStream(responseStream); String response = drainStream(responseStream);
@ -192,14 +183,11 @@ public class RoomParametersFetcher {
for (int i = 0; i < iceServers.length(); ++i) { for (int i = 0; i < iceServers.length(); ++i) {
JSONObject server = iceServers.getJSONObject(i); JSONObject server = iceServers.getJSONObject(i);
JSONArray turnUrls = server.getJSONArray("urls"); JSONArray turnUrls = server.getJSONArray("urls");
String username = String username = server.has("username") ? server.getString("username") : "";
server.has("username") ? server.getString("username") : ""; String credential = server.has("credential") ? server.getString("credential") : "";
String credential =
server.has("credential") ? server.getString("credential") : "";
for (int j = 0; j < turnUrls.length(); j++) { for (int j = 0; j < turnUrls.length(); j++) {
String turnUrl = turnUrls.getString(j); String turnUrl = turnUrls.getString(j);
turnServers.add(new PeerConnection.IceServer(turnUrl, username, turnServers.add(new PeerConnection.IceServer(turnUrl, username, credential));
credential));
} }
} }
return turnServers; return turnServers;
@ -207,17 +195,15 @@ public class RoomParametersFetcher {
// Return the list of ICE servers described by a WebRTCPeerConnection // Return the list of ICE servers described by a WebRTCPeerConnection
// configuration string. // configuration string.
private LinkedList<PeerConnection.IceServer> iceServersFromPCConfigJSON( private LinkedList<PeerConnection.IceServer> iceServersFromPCConfigJSON(String pcConfig)
String pcConfig) throws JSONException { throws JSONException {
JSONObject json = new JSONObject(pcConfig); JSONObject json = new JSONObject(pcConfig);
JSONArray servers = json.getJSONArray("iceServers"); JSONArray servers = json.getJSONArray("iceServers");
LinkedList<PeerConnection.IceServer> ret = LinkedList<PeerConnection.IceServer> ret = new LinkedList<PeerConnection.IceServer>();
new LinkedList<PeerConnection.IceServer>();
for (int i = 0; i < servers.length(); ++i) { for (int i = 0; i < servers.length(); ++i) {
JSONObject server = servers.getJSONObject(i); JSONObject server = servers.getJSONObject(i);
String url = server.getString("urls"); String url = server.getString("urls");
String credential = String credential = server.has("credential") ? server.getString("credential") : "";
server.has("credential") ? server.getString("credential") : "";
ret.add(new PeerConnection.IceServer(url, "", credential)); ret.add(new PeerConnection.IceServer(url, "", credential));
} }
return ret; return ret;
@ -228,5 +214,4 @@ public class RoomParametersFetcher {
Scanner s = new Scanner(in).useDelimiter("\\A"); Scanner s = new Scanner(in).useDelimiter("\\A");
return s.hasNext() ? s.next() : ""; return s.hasNext() ? s.next() : "";
} }
} }

View File

@ -23,8 +23,7 @@ import org.webrtc.voiceengine.WebRtcAudioUtils;
/** /**
* Settings activity for AppRTC. * Settings activity for AppRTC.
*/ */
public class SettingsActivity extends Activity public class SettingsActivity extends Activity implements OnSharedPreferenceChangeListener {
implements OnSharedPreferenceChangeListener{
private SettingsFragment settingsFragment; private SettingsFragment settingsFragment;
private String keyprefVideoCall; private String keyprefVideoCall;
private String keyprefCamera2; private String keyprefCamera2;
@ -85,7 +84,8 @@ public class SettingsActivity extends Activity
// Display the fragment as the main content. // Display the fragment as the main content.
settingsFragment = new SettingsFragment(); settingsFragment = new SettingsFragment();
getFragmentManager().beginTransaction() getFragmentManager()
.beginTransaction()
.replace(android.R.id.content, settingsFragment) .replace(android.R.id.content, settingsFragment)
.commit(); .commit();
} }
@ -127,8 +127,7 @@ public class SettingsActivity extends Activity
updateSummaryB(sharedPreferences, keyPrefTracing); updateSummaryB(sharedPreferences, keyPrefTracing);
if (!Camera2Enumerator.isSupported(this)) { if (!Camera2Enumerator.isSupported(this)) {
Preference camera2Preference = Preference camera2Preference = settingsFragment.findPreference(keyprefCamera2);
settingsFragment.findPreference(keyprefCamera2);
camera2Preference.setSummary(getString(R.string.pref_camera2_not_supported)); camera2Preference.setSummary(getString(R.string.pref_camera2_not_supported));
camera2Preference.setEnabled(false); camera2Preference.setEnabled(false);
@ -173,8 +172,8 @@ public class SettingsActivity extends Activity
} }
@Override @Override
public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, String key) {
String key) { // clang-format off
if (key.equals(keyprefResolution) if (key.equals(keyprefResolution)
|| key.equals(keyprefFps) || key.equals(keyprefFps)
|| key.equals(keyprefMaxVideoBitrateType) || key.equals(keyprefMaxVideoBitrateType)
@ -204,6 +203,7 @@ public class SettingsActivity extends Activity
} else if (key.equals(keyprefSpeakerphone)) { } else if (key.equals(keyprefSpeakerphone)) {
updateSummaryList(sharedPreferences, key); updateSummaryList(sharedPreferences, key);
} }
// clang-format on
if (key.equals(keyprefMaxVideoBitrateType)) { if (key.equals(keyprefMaxVideoBitrateType)) {
setVideoBitrateEnable(sharedPreferences); setVideoBitrateEnable(sharedPreferences);
} }
@ -218,8 +218,7 @@ public class SettingsActivity extends Activity
updatedPref.setSummary(sharedPreferences.getString(key, "")); updatedPref.setSummary(sharedPreferences.getString(key, ""));
} }
private void updateSummaryBitrate( private void updateSummaryBitrate(SharedPreferences sharedPreferences, String key) {
SharedPreferences sharedPreferences, String key) {
Preference updatedPref = settingsFragment.findPreference(key); Preference updatedPref = settingsFragment.findPreference(key);
updatedPref.setSummary(sharedPreferences.getString(key, "") + " kbps"); updatedPref.setSummary(sharedPreferences.getString(key, "") + " kbps");
} }
@ -240,8 +239,8 @@ public class SettingsActivity extends Activity
Preference bitratePreferenceValue = Preference bitratePreferenceValue =
settingsFragment.findPreference(keyprefMaxVideoBitrateValue); settingsFragment.findPreference(keyprefMaxVideoBitrateValue);
String bitrateTypeDefault = getString(R.string.pref_maxvideobitrate_default); String bitrateTypeDefault = getString(R.string.pref_maxvideobitrate_default);
String bitrateType = sharedPreferences.getString( String bitrateType =
keyprefMaxVideoBitrateType, bitrateTypeDefault); sharedPreferences.getString(keyprefMaxVideoBitrateType, bitrateTypeDefault);
if (bitrateType.equals(bitrateTypeDefault)) { if (bitrateType.equals(bitrateTypeDefault)) {
bitratePreferenceValue.setEnabled(false); bitratePreferenceValue.setEnabled(false);
} else { } else {
@ -253,8 +252,8 @@ public class SettingsActivity extends Activity
Preference bitratePreferenceValue = Preference bitratePreferenceValue =
settingsFragment.findPreference(keyprefStartAudioBitrateValue); settingsFragment.findPreference(keyprefStartAudioBitrateValue);
String bitrateTypeDefault = getString(R.string.pref_startaudiobitrate_default); String bitrateTypeDefault = getString(R.string.pref_startaudiobitrate_default);
String bitrateType = sharedPreferences.getString( String bitrateType =
keyprefStartAudioBitrateType, bitrateTypeDefault); sharedPreferences.getString(keyprefStartAudioBitrateType, bitrateTypeDefault);
if (bitrateType.equals(bitrateTypeDefault)) { if (bitrateType.equals(bitrateTypeDefault)) {
bitratePreferenceValue.setEnabled(false); bitratePreferenceValue.setEnabled(false);
} else { } else {

View File

@ -17,7 +17,6 @@ import android.preference.PreferenceFragment;
* Settings fragment for AppRTC. * Settings fragment for AppRTC.
*/ */
public class SettingsFragment extends PreferenceFragment { public class SettingsFragment extends PreferenceFragment {
@Override @Override
public void onCreate(Bundle savedInstanceState) { public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState); super.onCreate(savedInstanceState);

View File

@ -116,7 +116,6 @@ public class TCPChannelClient {
}); });
} }
/** /**
* Base class for server and client sockets. Contains a listening thread that will call * Base class for server and client sockets. Contains a listening thread that will call
* eventListener.onTCPMessage on new messages. * eventListener.onTCPMessage on new messages.

View File

@ -29,8 +29,7 @@ import java.io.StringWriter;
* Thread.setDefaultUncaughtExceptionHandler() rather than * Thread.setDefaultUncaughtExceptionHandler() rather than
* Thread.setUncaughtExceptionHandler(), to apply to background threads as well. * Thread.setUncaughtExceptionHandler(), to apply to background threads as well.
*/ */
public class UnhandledExceptionHandler public class UnhandledExceptionHandler implements Thread.UncaughtExceptionHandler {
implements Thread.UncaughtExceptionHandler {
private static final String TAG = "AppRTCMobileActivity"; private static final String TAG = "AppRTCMobileActivity";
private final Activity activity; private final Activity activity;
@ -40,7 +39,8 @@ public class UnhandledExceptionHandler
public void uncaughtException(Thread unusedThread, final Throwable e) { public void uncaughtException(Thread unusedThread, final Throwable e) {
activity.runOnUiThread(new Runnable() { activity.runOnUiThread(new Runnable() {
@Override public void run() { @Override
public void run() {
String title = "Fatal error: " + getTopLevelCauseMessage(e); String title = "Fatal error: " + getTopLevelCauseMessage(e);
String msg = getRecursiveStackTrace(e); String msg = getRecursiveStackTrace(e);
TextView errorView = new TextView(activity); TextView errorView = new TextView(activity);
@ -49,20 +49,18 @@ public class UnhandledExceptionHandler
ScrollView scrollingContainer = new ScrollView(activity); ScrollView scrollingContainer = new ScrollView(activity);
scrollingContainer.addView(errorView); scrollingContainer.addView(errorView);
Log.e(TAG, title + "\n\n" + msg); Log.e(TAG, title + "\n\n" + msg);
DialogInterface.OnClickListener listener = DialogInterface.OnClickListener listener = new DialogInterface.OnClickListener() {
new DialogInterface.OnClickListener() { @Override
@Override public void onClick( public void onClick(DialogInterface dialog, int which) {
DialogInterface dialog, int which) {
dialog.dismiss(); dialog.dismiss();
System.exit(1); System.exit(1);
} }
}; };
AlertDialog.Builder builder = AlertDialog.Builder builder = new AlertDialog.Builder(activity);
new AlertDialog.Builder(activity); builder.setTitle(title)
builder
.setTitle(title)
.setView(scrollingContainer) .setView(scrollingContainer)
.setPositiveButton("Exit", listener).show(); .setPositiveButton("Exit", listener)
.show();
} }
}); });
} }

View File

@ -56,9 +56,7 @@ public class WebSocketChannelClient {
/** /**
* Possible WebSocket connection states. * Possible WebSocket connection states.
*/ */
public enum WebSocketConnectionState { public enum WebSocketConnectionState { NEW, CONNECTED, REGISTERED, CLOSED, ERROR }
NEW, CONNECTED, REGISTERED, CLOSED, ERROR
};
/** /**
* Callback interface for messages delivered on WebSocket. * Callback interface for messages delivered on WebSocket.
@ -179,8 +177,7 @@ public class WebSocketChannelClient {
sendWSSMessage("DELETE", ""); sendWSSMessage("DELETE", "");
} }
// Close WebSocket in CONNECTED or ERROR states only. // Close WebSocket in CONNECTED or ERROR states only.
if (state == WebSocketConnectionState.CONNECTED if (state == WebSocketConnectionState.CONNECTED || state == WebSocketConnectionState.ERROR) {
|| state == WebSocketConnectionState.ERROR) {
ws.disconnect(); ws.disconnect();
state = WebSocketConnectionState.CLOSED; state = WebSocketConnectionState.CLOSED;
@ -219,16 +216,15 @@ public class WebSocketChannelClient {
private void sendWSSMessage(final String method, final String message) { private void sendWSSMessage(final String method, final String message) {
String postUrl = postServerUrl + "/" + roomID + "/" + clientID; String postUrl = postServerUrl + "/" + roomID + "/" + clientID;
Log.d(TAG, "WS " + method + " : " + postUrl + " : " + message); Log.d(TAG, "WS " + method + " : " + postUrl + " : " + message);
AsyncHttpURLConnection httpConnection = new AsyncHttpURLConnection( AsyncHttpURLConnection httpConnection =
method, postUrl, message, new AsyncHttpEvents() { new AsyncHttpURLConnection(method, postUrl, message, new AsyncHttpEvents() {
@Override @Override
public void onHttpError(String errorMessage) { public void onHttpError(String errorMessage) {
reportError("WS " + method + " error: " + errorMessage); reportError("WS " + method + " error: " + errorMessage);
} }
@Override @Override
public void onHttpComplete(String response) { public void onHttpComplete(String response) {}
}
}); });
httpConnection.send(); httpConnection.send();
} }
@ -237,8 +233,7 @@ public class WebSocketChannelClient {
// called on a looper thread. // called on a looper thread.
private void checkIfCalledOnValidThread() { private void checkIfCalledOnValidThread() {
if (Thread.currentThread() != handler.getLooper().getThread()) { if (Thread.currentThread() != handler.getLooper().getThread()) {
throw new IllegalStateException( throw new IllegalStateException("WebSocket method is not called on valid thread");
"WebSocket method is not called on valid thread");
} }
} }
@ -260,8 +255,8 @@ public class WebSocketChannelClient {
@Override @Override
public void onClose(WebSocketCloseNotification code, String reason) { public void onClose(WebSocketCloseNotification code, String reason) {
Log.d(TAG, "WebSocket connection closed. Code: " + code Log.d(TAG, "WebSocket connection closed. Code: " + code + ". Reason: " + reason + ". State: "
+ ". Reason: " + reason + ". State: " + state); + state);
synchronized (closeEventLock) { synchronized (closeEventLock) {
closeEvent = true; closeEvent = true;
closeEventLock.notify(); closeEventLock.notify();
@ -293,12 +288,9 @@ public class WebSocketChannelClient {
} }
@Override @Override
public void onRawTextMessage(byte[] payload) { public void onRawTextMessage(byte[] payload) {}
}
@Override @Override
public void onBinaryMessage(byte[] payload) { public void onBinaryMessage(byte[] payload) {}
} }
} }
}

View File

@ -36,19 +36,16 @@ import org.webrtc.SessionDescription;
* Messages to other party (with local Ice candidates and answer SDP) can * Messages to other party (with local Ice candidates and answer SDP) can
* be sent after WebSocket connection is established. * be sent after WebSocket connection is established.
*/ */
public class WebSocketRTCClient implements AppRTCClient, public class WebSocketRTCClient implements AppRTCClient, WebSocketChannelEvents {
WebSocketChannelEvents {
private static final String TAG = "WSRTCClient"; private static final String TAG = "WSRTCClient";
private static final String ROOM_JOIN = "join"; private static final String ROOM_JOIN = "join";
private static final String ROOM_MESSAGE = "message"; private static final String ROOM_MESSAGE = "message";
private static final String ROOM_LEAVE = "leave"; private static final String ROOM_LEAVE = "leave";
private enum ConnectionState { private enum ConnectionState { NEW, CONNECTED, CLOSED, ERROR }
NEW, CONNECTED, CLOSED, ERROR
}; private enum MessageType { MESSAGE, LEAVE }
private enum MessageType {
MESSAGE, LEAVE
};
private final Handler handler; private final Handler handler;
private boolean initiator; private boolean initiator;
private SignalingEvents events; private SignalingEvents events;
@ -101,8 +98,7 @@ public class WebSocketRTCClient implements AppRTCClient,
RoomParametersFetcherEvents callbacks = new RoomParametersFetcherEvents() { RoomParametersFetcherEvents callbacks = new RoomParametersFetcherEvents() {
@Override @Override
public void onSignalingParametersReady( public void onSignalingParametersReady(final SignalingParameters params) {
final SignalingParameters params) {
WebSocketRTCClient.this.handler.post(new Runnable() { WebSocketRTCClient.this.handler.post(new Runnable() {
@Override @Override
public void run() { public void run() {
@ -134,37 +130,32 @@ public class WebSocketRTCClient implements AppRTCClient,
} }
// Helper functions to get connection, post message and leave message URLs // Helper functions to get connection, post message and leave message URLs
private String getConnectionUrl( private String getConnectionUrl(RoomConnectionParameters connectionParameters) {
RoomConnectionParameters connectionParameters) { return connectionParameters.roomUrl + "/" + ROOM_JOIN + "/" + connectionParameters.roomId;
return connectionParameters.roomUrl + "/" + ROOM_JOIN + "/"
+ connectionParameters.roomId;
} }
private String getMessageUrl(RoomConnectionParameters connectionParameters, private String getMessageUrl(
SignalingParameters signalingParameters) { RoomConnectionParameters connectionParameters, SignalingParameters signalingParameters) {
return connectionParameters.roomUrl + "/" + ROOM_MESSAGE + "/" return connectionParameters.roomUrl + "/" + ROOM_MESSAGE + "/" + connectionParameters.roomId
+ connectionParameters.roomId + "/" + signalingParameters.clientId; + "/" + signalingParameters.clientId;
} }
private String getLeaveUrl(RoomConnectionParameters connectionParameters, private String getLeaveUrl(
SignalingParameters signalingParameters) { RoomConnectionParameters connectionParameters, SignalingParameters signalingParameters) {
return connectionParameters.roomUrl + "/" + ROOM_LEAVE + "/" return connectionParameters.roomUrl + "/" + ROOM_LEAVE + "/" + connectionParameters.roomId + "/"
+ connectionParameters.roomId + "/" + signalingParameters.clientId; + signalingParameters.clientId;
} }
// Callback issued when room parameters are extracted. Runs on local // Callback issued when room parameters are extracted. Runs on local
// looper thread. // looper thread.
private void signalingParametersReady( private void signalingParametersReady(final SignalingParameters signalingParameters) {
final SignalingParameters signalingParameters) {
Log.d(TAG, "Room connection completed."); Log.d(TAG, "Room connection completed.");
if (connectionParameters.loopback if (connectionParameters.loopback
&& (!signalingParameters.initiator && (!signalingParameters.initiator || signalingParameters.offerSdp != null)) {
|| signalingParameters.offerSdp != null)) {
reportError("Loopback room is busy."); reportError("Loopback room is busy.");
return; return;
} }
if (!connectionParameters.loopback if (!connectionParameters.loopback && !signalingParameters.initiator
&& !signalingParameters.initiator
&& signalingParameters.offerSdp == null) { && signalingParameters.offerSdp == null) {
Log.w(TAG, "No offer SDP in room response."); Log.w(TAG, "No offer SDP in room response.");
} }
@ -200,8 +191,7 @@ public class WebSocketRTCClient implements AppRTCClient,
if (connectionParameters.loopback) { if (connectionParameters.loopback) {
// In loopback mode rename this offer to answer and route it back. // In loopback mode rename this offer to answer and route it back.
SessionDescription sdpAnswer = new SessionDescription( SessionDescription sdpAnswer = new SessionDescription(
SessionDescription.Type.fromCanonicalForm("answer"), SessionDescription.Type.fromCanonicalForm("answer"), sdp.description);
sdp.description);
events.onRemoteDescription(sdpAnswer); events.onRemoteDescription(sdpAnswer);
} }
} }
@ -315,8 +305,7 @@ public class WebSocketRTCClient implements AppRTCClient,
} else if (type.equals("answer")) { } else if (type.equals("answer")) {
if (initiator) { if (initiator) {
SessionDescription sdp = new SessionDescription( SessionDescription sdp = new SessionDescription(
SessionDescription.Type.fromCanonicalForm(type), SessionDescription.Type.fromCanonicalForm(type), json.getString("sdp"));
json.getString("sdp"));
events.onRemoteDescription(sdp); events.onRemoteDescription(sdp);
} else { } else {
reportError("Received answer for call initiator: " + msg); reportError("Received answer for call initiator: " + msg);
@ -324,8 +313,7 @@ public class WebSocketRTCClient implements AppRTCClient,
} else if (type.equals("offer")) { } else if (type.equals("offer")) {
if (!initiator) { if (!initiator) {
SessionDescription sdp = new SessionDescription( SessionDescription sdp = new SessionDescription(
SessionDescription.Type.fromCanonicalForm(type), SessionDescription.Type.fromCanonicalForm(type), json.getString("sdp"));
json.getString("sdp"));
events.onRemoteDescription(sdp); events.onRemoteDescription(sdp);
} else { } else {
reportError("Received offer for call receiver: " + msg); reportError("Received offer for call receiver: " + msg);
@ -389,8 +377,8 @@ public class WebSocketRTCClient implements AppRTCClient,
logInfo += ". Message: " + message; logInfo += ". Message: " + message;
} }
Log.d(TAG, "C->GAE: " + logInfo); Log.d(TAG, "C->GAE: " + logInfo);
AsyncHttpURLConnection httpConnection = new AsyncHttpURLConnection( AsyncHttpURLConnection httpConnection =
"POST", url, message, new AsyncHttpEvents() { new AsyncHttpURLConnection("POST", url, message, new AsyncHttpEvents() {
@Override @Override
public void onHttpError(String errorMessage) { public void onHttpError(String errorMessage) {
reportError("GAE POST error: " + errorMessage); reportError("GAE POST error: " + errorMessage);
@ -425,8 +413,7 @@ public class WebSocketRTCClient implements AppRTCClient,
// Converts a JSON candidate to a Java object. // Converts a JSON candidate to a Java object.
IceCandidate toJavaCandidate(JSONObject json) throws JSONException { IceCandidate toJavaCandidate(JSONObject json) throws JSONException {
return new IceCandidate(json.getString("id"), return new IceCandidate(
json.getInt("label"), json.getString("id"), json.getInt("label"), json.getString("candidate"));
json.getString("candidate"));
} }
} }

View File

@ -17,9 +17,7 @@ import android.util.Log;
* AppRTCUtils provides helper functions for managing thread safety. * AppRTCUtils provides helper functions for managing thread safety.
*/ */
public final class AppRTCUtils { public final class AppRTCUtils {
private AppRTCUtils() {}
private AppRTCUtils() {
}
/** Helper method which throws an exception when an assertion has failed. */ /** Helper method which throws an exception when an assertion has failed. */
public static void assertIsTrue(boolean condition) { public static void assertIsTrue(boolean condition) {
@ -30,8 +28,8 @@ public final class AppRTCUtils {
/** Helper method for building a string of thread information.*/ /** Helper method for building a string of thread information.*/
public static String getThreadInfo() { public static String getThreadInfo() {
return "@[name=" + Thread.currentThread().getName() return "@[name=" + Thread.currentThread().getName() + ", id=" + Thread.currentThread().getId()
+ ", id=" + Thread.currentThread().getId() + "]"; + "]";
} }
/** Information about the current build, taken from system properties. */ /** Information about the current build, taken from system properties. */

View File

@ -38,8 +38,7 @@ public class AsyncHttpURLConnection {
void onHttpComplete(String response); void onHttpComplete(String response);
} }
public AsyncHttpURLConnection(String method, String url, String message, public AsyncHttpURLConnection(String method, String url, String message, AsyncHttpEvents events) {
AsyncHttpEvents events) {
this.method = method; this.method = method;
this.url = url; this.url = url;
this.message = message; this.message = message;
@ -61,8 +60,7 @@ public class AsyncHttpURLConnection {
private void sendHttpMessage() { private void sendHttpMessage() {
try { try {
HttpURLConnection connection = HttpURLConnection connection = (HttpURLConnection) new URL(url).openConnection();
(HttpURLConnection) new URL(url).openConnection();
byte[] postData = new byte[0]; byte[] postData = new byte[0];
if (message != null) { if (message != null) {
postData = message.getBytes("UTF-8"); postData = message.getBytes("UTF-8");
@ -96,8 +94,8 @@ public class AsyncHttpURLConnection {
// Get response. // Get response.
int responseCode = connection.getResponseCode(); int responseCode = connection.getResponseCode();
if (responseCode != 200) { if (responseCode != 200) {
events.onHttpError("Non-200 response to " + method + " to URL: " events.onHttpError("Non-200 response to " + method + " to URL: " + url + " : "
+ url + " : " + connection.getHeaderField(null)); + connection.getHeaderField(null));
connection.disconnect(); connection.disconnect();
return; return;
} }
@ -109,8 +107,7 @@ public class AsyncHttpURLConnection {
} catch (SocketTimeoutException e) { } catch (SocketTimeoutException e) {
events.onHttpError("HTTP " + method + " to " + url + " timeout"); events.onHttpError("HTTP " + method + " to " + url + " timeout");
} catch (IOException e) { } catch (IOException e) {
events.onHttpError("HTTP " + method + " to " + url + " error: " events.onHttpError("HTTP " + method + " to " + url + " error: " + e.getMessage());
+ e.getMessage());
} }
} }

View File

@ -62,6 +62,7 @@ public class DirectRTCClientTest {
@Test @Test
public void testValidIpPattern() { public void testValidIpPattern() {
// Strings that should match the pattern. // Strings that should match the pattern.
// clang-format off
final String[] ipAddresses = new String[] { final String[] ipAddresses = new String[] {
"0.0.0.0", "0.0.0.0",
"127.0.0.1", "127.0.0.1",
@ -79,6 +80,7 @@ public class DirectRTCClientTest {
"[::1]:8888", "[::1]:8888",
"[2001:0db8:85a3:0000:0000:8a2e:0370:7946]:8888" "[2001:0db8:85a3:0000:0000:8a2e:0370:7946]:8888"
}; };
// clang-format on
for (String ip : ipAddresses) { for (String ip : ipAddresses) {
assertTrue(ip + " didn't match IP_PATTERN even though it should.", assertTrue(ip + " didn't match IP_PATTERN even though it should.",
@ -89,6 +91,7 @@ public class DirectRTCClientTest {
@Test @Test
public void testInvalidIpPattern() { public void testInvalidIpPattern() {
// Strings that shouldn't match the pattern. // Strings that shouldn't match the pattern.
// clang-format off
final String[] invalidIpAddresses = new String[] { final String[] invalidIpAddresses = new String[] {
"Hello, World!", "Hello, World!",
"aaaa", "aaaa",
@ -96,6 +99,7 @@ public class DirectRTCClientTest {
"[hello world]", "[hello world]",
"hello:world" "hello:world"
}; };
// clang-format on
for (String invalidIp : invalidIpAddresses) { for (String invalidIp : invalidIpAddresses) {
assertFalse(invalidIp + " matched IP_PATTERN even though it shouldn't.", assertFalse(invalidIp + " matched IP_PATTERN even though it shouldn't.",
@ -121,8 +125,8 @@ public class DirectRTCClientTest {
verify(clientEvents, timeout(NETWORK_TIMEOUT)) verify(clientEvents, timeout(NETWORK_TIMEOUT))
.onConnectedToRoom(any(AppRTCClient.SignalingParameters.class)); .onConnectedToRoom(any(AppRTCClient.SignalingParameters.class));
SessionDescription answerSdp SessionDescription answerSdp =
= new SessionDescription(SessionDescription.Type.ANSWER, DUMMY_SDP); new SessionDescription(SessionDescription.Type.ANSWER, DUMMY_SDP);
client.sendAnswerSdp(answerSdp); client.sendAnswerSdp(answerSdp);
verify(serverEvents, timeout(NETWORK_TIMEOUT)) verify(serverEvents, timeout(NETWORK_TIMEOUT))
.onRemoteDescription(isNotNull(SessionDescription.class)); .onRemoteDescription(isNotNull(SessionDescription.class));

View File

@ -52,7 +52,6 @@ public class TCPChannelClientTest {
private TCPChannelClient server; private TCPChannelClient server;
private TCPChannelClient client; private TCPChannelClient client;
@Before @Before
public void setUp() { public void setUp() {
ShadowLog.stream = System.out; ShadowLog.stream = System.out;

View File

@ -37,8 +37,8 @@ import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors; import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
public class PeerConnectionClientTest extends InstrumentationTestCase public class PeerConnectionClientTest
implements PeerConnectionEvents { extends InstrumentationTestCase implements PeerConnectionEvents {
private static final String TAG = "RTCClientTest"; private static final String TAG = "RTCClientTest";
private static final int ICE_CONNECTION_WAIT_TIMEOUT = 10000; private static final int ICE_CONNECTION_WAIT_TIMEOUT = 10000;
private static final int WAIT_TIMEOUT = 7000; private static final int WAIT_TIMEOUT = 7000;
@ -103,8 +103,8 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
public synchronized void renderFrame(VideoRenderer.I420Frame frame) { public synchronized void renderFrame(VideoRenderer.I420Frame frame) {
if (!renderFrameCalled) { if (!renderFrameCalled) {
if (rendererName != null) { if (rendererName != null) {
Log.d(TAG, rendererName + " render frame: " Log.d(TAG, rendererName + " render frame: " + frame.rotatedWidth() + " x "
+ frame.rotatedWidth() + " x " + frame.rotatedHeight()); + frame.rotatedHeight());
} else { } else {
Log.d(TAG, "Render frame: " + frame.rotatedWidth() + " x " + frame.rotatedHeight()); Log.d(TAG, "Render frame: " + frame.rotatedWidth() + " x " + frame.rotatedHeight());
} }
@ -114,11 +114,9 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
doneRendering.countDown(); doneRendering.countDown();
} }
// This method shouldn't hold any locks or touch member variables since it // This method shouldn't hold any locks or touch member variables since it
// blocks. // blocks.
public boolean waitForFramesRendered(int timeoutMs) public boolean waitForFramesRendered(int timeoutMs) throws InterruptedException {
throws InterruptedException {
doneRendering.await(timeoutMs, TimeUnit.MILLISECONDS); doneRendering.await(timeoutMs, TimeUnit.MILLISECONDS);
return (doneRendering.getCount() <= 0); return (doneRendering.getCount() <= 0);
} }
@ -191,12 +189,10 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
} }
@Override @Override
public void onPeerConnectionStatsReady(StatsReport[] reports) { public void onPeerConnectionStatsReady(StatsReport[] reports) {}
}
// Helper wait functions. // Helper wait functions.
private boolean waitForLocalSDP(int timeoutMs) private boolean waitForLocalSDP(int timeoutMs) throws InterruptedException {
throws InterruptedException {
synchronized (localSdpEvent) { synchronized (localSdpEvent) {
if (localSdp == null) { if (localSdp == null) {
localSdpEvent.wait(timeoutMs); localSdpEvent.wait(timeoutMs);
@ -205,8 +201,7 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
} }
} }
private boolean waitForIceCandidates(int timeoutMs) private boolean waitForIceCandidates(int timeoutMs) throws InterruptedException {
throws InterruptedException {
synchronized (iceCandidateEvent) { synchronized (iceCandidateEvent) {
if (iceCandidates.size() == 0) { if (iceCandidates.size() == 0) {
iceCandidateEvent.wait(timeoutMs); iceCandidateEvent.wait(timeoutMs);
@ -215,8 +210,7 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
} }
} }
private boolean waitForIceConnected(int timeoutMs) private boolean waitForIceConnected(int timeoutMs) throws InterruptedException {
throws InterruptedException {
synchronized (iceConnectedEvent) { synchronized (iceConnectedEvent) {
if (!isIceConnected) { if (!isIceConnected) {
iceConnectedEvent.wait(timeoutMs); iceConnectedEvent.wait(timeoutMs);
@ -229,8 +223,7 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
} }
} }
private boolean waitForPeerConnectionClosed(int timeoutMs) private boolean waitForPeerConnectionClosed(int timeoutMs) throws InterruptedException {
throws InterruptedException {
synchronized (closeEvent) { synchronized (closeEvent) {
if (!isClosed) { if (!isClosed) {
closeEvent.wait(timeoutMs); closeEvent.wait(timeoutMs);
@ -239,13 +232,12 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
} }
} }
PeerConnectionClient createPeerConnectionClient( PeerConnectionClient createPeerConnectionClient(MockRenderer localRenderer,
MockRenderer localRenderer, MockRenderer remoteRenderer, MockRenderer remoteRenderer, PeerConnectionParameters peerConnectionParameters,
PeerConnectionParameters peerConnectionParameters, EglBase.Context eglContext) { EglBase.Context eglContext) {
List<PeerConnection.IceServer> iceServers = List<PeerConnection.IceServer> iceServers = new LinkedList<PeerConnection.IceServer>();
new LinkedList<PeerConnection.IceServer>(); SignalingParameters signalingParameters =
SignalingParameters signalingParameters = new SignalingParameters( new SignalingParameters(iceServers, true, // iceServers, initiator.
iceServers, true, // iceServers, initiator.
null, null, null, // clientId, wssUrl, wssPostUrl. null, null, null, // clientId, wssUrl, wssPostUrl.
null, null); // offerSdp, iceCandidates. null, null); // offerSdp, iceCandidates.
@ -263,8 +255,7 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
private PeerConnectionParameters createParametersForAudioCall() { private PeerConnectionParameters createParametersForAudioCall() {
PeerConnectionParameters peerConnectionParameters = PeerConnectionParameters peerConnectionParameters =
new PeerConnectionParameters( new PeerConnectionParameters(false, /* videoCallEnabled */
false, /* videoCallEnabled */
true, /* loopback */ true, /* loopback */
false, /* tracing */ false, /* tracing */
// Video codec parameters. // Video codec parameters.
@ -281,22 +272,18 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
"OPUS", /* audioCodec */ "OPUS", /* audioCodec */
false, /* noAudioProcessing */ false, /* noAudioProcessing */
false, /* aecDump */ false, /* aecDump */
false /* useOpenSLES */, false /* useOpenSLES */, false /* disableBuiltInAEC */, false /* disableBuiltInAGC */,
false /* disableBuiltInAEC */, false /* disableBuiltInNS */, false /* enableLevelControl */);
false /* disableBuiltInAGC */,
false /* disableBuiltInNS */,
false /* enableLevelControl */);
return peerConnectionParameters; return peerConnectionParameters;
} }
private PeerConnectionParameters createParametersForVideoCall( private PeerConnectionParameters createParametersForVideoCall(
String videoCodec, boolean captureToTexture) { String videoCodec, boolean captureToTexture) {
final boolean useCamera2 = captureToTexture final boolean useCamera2 =
&& Camera2Enumerator.isSupported(getInstrumentation().getTargetContext()); captureToTexture && Camera2Enumerator.isSupported(getInstrumentation().getTargetContext());
PeerConnectionParameters peerConnectionParameters = PeerConnectionParameters peerConnectionParameters =
new PeerConnectionParameters( new PeerConnectionParameters(true, /* videoCallEnabled */
true, /* videoCallEnabled */
true, /* loopback */ true, /* loopback */
false, /* tracing */ false, /* tracing */
// Video codec parameters. // Video codec parameters.
@ -313,11 +300,8 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
"OPUS", /* audioCodec */ "OPUS", /* audioCodec */
false, /* noAudioProcessing */ false, /* noAudioProcessing */
false, /* aecDump */ false, /* aecDump */
false /* useOpenSLES */, false /* useOpenSLES */, false /* disableBuiltInAEC */, false /* disableBuiltInAGC */,
false /* disableBuiltInAEC */, false /* disableBuiltInNS */, false /* enableLevelControl */);
false /* disableBuiltInAGC */,
false /* disableBuiltInNS */,
false /* enableLevelControl */);
return peerConnectionParameters; return peerConnectionParameters;
} }
@ -338,26 +322,23 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
} }
@SmallTest @SmallTest
public void testSetLocalOfferMakesVideoFlowLocally() public void testSetLocalOfferMakesVideoFlowLocally() throws InterruptedException {
throws InterruptedException {
Log.d(TAG, "testSetLocalOfferMakesVideoFlowLocally"); Log.d(TAG, "testSetLocalOfferMakesVideoFlowLocally");
MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME); MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME);
pcClient = createPeerConnectionClient( pcClient = createPeerConnectionClient(localRenderer, new MockRenderer(0, null),
localRenderer, new MockRenderer(0, null),
createParametersForVideoCall(VIDEO_CODEC_VP8, false), null); createParametersForVideoCall(VIDEO_CODEC_VP8, false), null);
// Wait for local SDP and ice candidates set events. // Wait for local SDP and ice candidates set events.
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
assertTrue("ICE candidates were not generated.", assertTrue("ICE candidates were not generated.", waitForIceCandidates(WAIT_TIMEOUT));
waitForIceCandidates(WAIT_TIMEOUT));
// Check that local video frames were rendered. // Check that local video frames were rendered.
assertTrue("Local video frames were not rendered.", assertTrue(
localRenderer.waitForFramesRendered(WAIT_TIMEOUT)); "Local video frames were not rendered.", localRenderer.waitForFramesRendered(WAIT_TIMEOUT));
pcClient.close(); pcClient.close();
assertTrue("PeerConnection close event was not received.", assertTrue(
waitForPeerConnectionClosed(WAIT_TIMEOUT)); "PeerConnection close event was not received.", waitForPeerConnectionClosed(WAIT_TIMEOUT));
Log.d(TAG, "testSetLocalOfferMakesVideoFlowLocally Done."); Log.d(TAG, "testSetLocalOfferMakesVideoFlowLocally Done.");
} }
@ -379,8 +360,7 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
// Wait for local SDP, rename it to answer and set as remote SDP. // Wait for local SDP, rename it to answer and set as remote SDP.
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
SessionDescription remoteSdp = new SessionDescription( SessionDescription remoteSdp = new SessionDescription(
SessionDescription.Type.fromCanonicalForm("answer"), SessionDescription.Type.fromCanonicalForm("answer"), localSdp.description);
localSdp.description);
pcClient.setRemoteDescription(remoteSdp); pcClient.setRemoteDescription(remoteSdp);
// Wait for ICE connection. // Wait for ICE connection.
@ -492,15 +472,14 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
eglBase = null; eglBase = null;
SessionDescription remoteSdp = new SessionDescription( SessionDescription remoteSdp = new SessionDescription(
SessionDescription.Type.fromCanonicalForm("answer"), SessionDescription.Type.fromCanonicalForm("answer"), localSdp.description);
localSdp.description);
pcClient.setRemoteDescription(remoteSdp); pcClient.setRemoteDescription(remoteSdp);
// Wait for ICE connection. // Wait for ICE connection.
assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAIT_TIMEOUT)); assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAIT_TIMEOUT));
// Check that local and remote video frames were rendered. // Check that local and remote video frames were rendered.
assertTrue("Local video frames were not rendered.", assertTrue(
localRenderer.waitForFramesRendered(WAIT_TIMEOUT)); "Local video frames were not rendered.", localRenderer.waitForFramesRendered(WAIT_TIMEOUT));
assertTrue("Remote video frames were not rendered.", assertTrue("Remote video frames were not rendered.",
remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT)); remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT));
@ -524,7 +503,6 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264, true), true); doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264, true), true);
} }
// Checks if default front camera can be switched to back camera and then // Checks if default front camera can be switched to back camera and then
// again to front camera. // again to front camera.
@SmallTest @SmallTest
@ -541,8 +519,7 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
// Wait for local SDP, rename it to answer and set as remote SDP. // Wait for local SDP, rename it to answer and set as remote SDP.
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
SessionDescription remoteSdp = new SessionDescription( SessionDescription remoteSdp = new SessionDescription(
SessionDescription.Type.fromCanonicalForm("answer"), SessionDescription.Type.fromCanonicalForm("answer"), localSdp.description);
localSdp.description);
pcClient.setRemoteDescription(remoteSdp); pcClient.setRemoteDescription(remoteSdp);
// Wait for ICE connection. // Wait for ICE connection.
@ -588,8 +565,7 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
// Wait for local SDP, rename it to answer and set as remote SDP. // Wait for local SDP, rename it to answer and set as remote SDP.
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
SessionDescription remoteSdp = new SessionDescription( SessionDescription remoteSdp = new SessionDescription(
SessionDescription.Type.fromCanonicalForm("answer"), SessionDescription.Type.fromCanonicalForm("answer"), localSdp.description);
localSdp.description);
pcClient.setRemoteDescription(remoteSdp); pcClient.setRemoteDescription(remoteSdp);
// Wait for ICE connection. // Wait for ICE connection.
@ -636,8 +612,7 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
// Wait for local SDP, rename it to answer and set as remote SDP. // Wait for local SDP, rename it to answer and set as remote SDP.
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
SessionDescription remoteSdp = new SessionDescription( SessionDescription remoteSdp = new SessionDescription(
SessionDescription.Type.fromCanonicalForm("answer"), SessionDescription.Type.fromCanonicalForm("answer"), localSdp.description);
localSdp.description);
pcClient.setRemoteDescription(remoteSdp); pcClient.setRemoteDescription(remoteSdp);
// Wait for ICE connection. // Wait for ICE connection.
@ -671,5 +646,4 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT)); assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT));
Log.d(TAG, "testCaptureFormatChange done."); Log.d(TAG, "testCaptureFormatChange done.");
} }
} }

View File

@ -67,8 +67,7 @@ class WebRtcAudioEffects {
// Note: we're using isAcousticEchoCancelerEffectAvailable() instead of // Note: we're using isAcousticEchoCancelerEffectAvailable() instead of
// AcousticEchoCanceler.isAvailable() to avoid the expensive getEffects() // AcousticEchoCanceler.isAvailable() to avoid the expensive getEffects()
// OS API call. // OS API call.
return WebRtcAudioUtils.runningOnJellyBeanOrHigher() return WebRtcAudioUtils.runningOnJellyBeanOrHigher() && isAcousticEchoCancelerEffectAvailable();
&& isAcousticEchoCancelerEffectAvailable();
} }
// Checks if the device implements Automatic Gain Control (AGC). // Checks if the device implements Automatic Gain Control (AGC).
@ -77,8 +76,7 @@ class WebRtcAudioEffects {
// Note: we're using isAutomaticGainControlEffectAvailable() instead of // Note: we're using isAutomaticGainControlEffectAvailable() instead of
// AutomaticGainControl.isAvailable() to avoid the expensive getEffects() // AutomaticGainControl.isAvailable() to avoid the expensive getEffects()
// OS API call. // OS API call.
return WebRtcAudioUtils.runningOnJellyBeanOrHigher() return WebRtcAudioUtils.runningOnJellyBeanOrHigher() && isAutomaticGainControlEffectAvailable();
&& isAutomaticGainControlEffectAvailable();
} }
// Checks if the device implements Noise Suppression (NS). // Checks if the device implements Noise Suppression (NS).
@ -87,14 +85,12 @@ class WebRtcAudioEffects {
// Note: we're using isNoiseSuppressorEffectAvailable() instead of // Note: we're using isNoiseSuppressorEffectAvailable() instead of
// NoiseSuppressor.isAvailable() to avoid the expensive getEffects() // NoiseSuppressor.isAvailable() to avoid the expensive getEffects()
// OS API call. // OS API call.
return WebRtcAudioUtils.runningOnJellyBeanOrHigher() return WebRtcAudioUtils.runningOnJellyBeanOrHigher() && isNoiseSuppressorEffectAvailable();
&& isNoiseSuppressorEffectAvailable();
} }
// Returns true if the device is blacklisted for HW AEC usage. // Returns true if the device is blacklisted for HW AEC usage.
public static boolean isAcousticEchoCancelerBlacklisted() { public static boolean isAcousticEchoCancelerBlacklisted() {
List<String> blackListedModels = List<String> blackListedModels = WebRtcAudioUtils.getBlackListedModelsForAecUsage();
WebRtcAudioUtils.getBlackListedModelsForAecUsage();
boolean isBlacklisted = blackListedModels.contains(Build.MODEL); boolean isBlacklisted = blackListedModels.contains(Build.MODEL);
if (isBlacklisted) { if (isBlacklisted) {
Logging.w(TAG, Build.MODEL + " is blacklisted for HW AEC usage!"); Logging.w(TAG, Build.MODEL + " is blacklisted for HW AEC usage!");
@ -104,8 +100,7 @@ class WebRtcAudioEffects {
// Returns true if the device is blacklisted for HW AGC usage. // Returns true if the device is blacklisted for HW AGC usage.
public static boolean isAutomaticGainControlBlacklisted() { public static boolean isAutomaticGainControlBlacklisted() {
List<String> blackListedModels = List<String> blackListedModels = WebRtcAudioUtils.getBlackListedModelsForAgcUsage();
WebRtcAudioUtils.getBlackListedModelsForAgcUsage();
boolean isBlacklisted = blackListedModels.contains(Build.MODEL); boolean isBlacklisted = blackListedModels.contains(Build.MODEL);
if (isBlacklisted) { if (isBlacklisted) {
Logging.w(TAG, Build.MODEL + " is blacklisted for HW AGC usage!"); Logging.w(TAG, Build.MODEL + " is blacklisted for HW AGC usage!");
@ -115,8 +110,7 @@ class WebRtcAudioEffects {
// Returns true if the device is blacklisted for HW NS usage. // Returns true if the device is blacklisted for HW NS usage.
public static boolean isNoiseSuppressorBlacklisted() { public static boolean isNoiseSuppressorBlacklisted() {
List<String> blackListedModels = List<String> blackListedModels = WebRtcAudioUtils.getBlackListedModelsForNsUsage();
WebRtcAudioUtils.getBlackListedModelsForNsUsage();
boolean isBlacklisted = blackListedModels.contains(Build.MODEL); boolean isBlacklisted = blackListedModels.contains(Build.MODEL);
if (isBlacklisted) { if (isBlacklisted) {
Logging.w(TAG, Build.MODEL + " is blacklisted for HW NS usage!"); Logging.w(TAG, Build.MODEL + " is blacklisted for HW NS usage!");
@ -129,8 +123,8 @@ class WebRtcAudioEffects {
@TargetApi(18) @TargetApi(18)
private static boolean isAcousticEchoCancelerExcludedByUUID() { private static boolean isAcousticEchoCancelerExcludedByUUID() {
for (Descriptor d : getAvailableEffects()) { for (Descriptor d : getAvailableEffects()) {
if (d.type.equals(AudioEffect.EFFECT_TYPE_AEC) && if (d.type.equals(AudioEffect.EFFECT_TYPE_AEC)
d.uuid.equals(AOSP_ACOUSTIC_ECHO_CANCELER)) { && d.uuid.equals(AOSP_ACOUSTIC_ECHO_CANCELER)) {
return true; return true;
} }
} }
@ -142,8 +136,8 @@ class WebRtcAudioEffects {
@TargetApi(18) @TargetApi(18)
private static boolean isAutomaticGainControlExcludedByUUID() { private static boolean isAutomaticGainControlExcludedByUUID() {
for (Descriptor d : getAvailableEffects()) { for (Descriptor d : getAvailableEffects()) {
if (d.type.equals(AudioEffect.EFFECT_TYPE_AGC) && if (d.type.equals(AudioEffect.EFFECT_TYPE_AGC)
d.uuid.equals(AOSP_AUTOMATIC_GAIN_CONTROL)) { && d.uuid.equals(AOSP_AUTOMATIC_GAIN_CONTROL)) {
return true; return true;
} }
} }
@ -155,8 +149,7 @@ class WebRtcAudioEffects {
@TargetApi(18) @TargetApi(18)
private static boolean isNoiseSuppressorExcludedByUUID() { private static boolean isNoiseSuppressorExcludedByUUID() {
for (Descriptor d : getAvailableEffects()) { for (Descriptor d : getAvailableEffects()) {
if (d.type.equals(AudioEffect.EFFECT_TYPE_NS) && if (d.type.equals(AudioEffect.EFFECT_TYPE_NS) && d.uuid.equals(AOSP_NOISE_SUPPRESSOR)) {
d.uuid.equals(AOSP_NOISE_SUPPRESSOR)) {
return true; return true;
} }
} }
@ -184,36 +177,28 @@ class WebRtcAudioEffects {
// Returns true if all conditions for supporting the HW AEC are fulfilled. // Returns true if all conditions for supporting the HW AEC are fulfilled.
// It will not be possible to enable the HW AEC if this method returns false. // It will not be possible to enable the HW AEC if this method returns false.
public static boolean canUseAcousticEchoCanceler() { public static boolean canUseAcousticEchoCanceler() {
boolean canUseAcousticEchoCanceler = boolean canUseAcousticEchoCanceler = isAcousticEchoCancelerSupported()
isAcousticEchoCancelerSupported()
&& !WebRtcAudioUtils.useWebRtcBasedAcousticEchoCanceler() && !WebRtcAudioUtils.useWebRtcBasedAcousticEchoCanceler()
&& !isAcousticEchoCancelerBlacklisted() && !isAcousticEchoCancelerBlacklisted() && !isAcousticEchoCancelerExcludedByUUID();
&& !isAcousticEchoCancelerExcludedByUUID(); Logging.d(TAG, "canUseAcousticEchoCanceler: " + canUseAcousticEchoCanceler);
Logging.d(TAG, "canUseAcousticEchoCanceler: "
+ canUseAcousticEchoCanceler);
return canUseAcousticEchoCanceler; return canUseAcousticEchoCanceler;
} }
// Returns true if all conditions for supporting the HW AGC are fulfilled. // Returns true if all conditions for supporting the HW AGC are fulfilled.
// It will not be possible to enable the HW AGC if this method returns false. // It will not be possible to enable the HW AGC if this method returns false.
public static boolean canUseAutomaticGainControl() { public static boolean canUseAutomaticGainControl() {
boolean canUseAutomaticGainControl = boolean canUseAutomaticGainControl = isAutomaticGainControlSupported()
isAutomaticGainControlSupported()
&& !WebRtcAudioUtils.useWebRtcBasedAutomaticGainControl() && !WebRtcAudioUtils.useWebRtcBasedAutomaticGainControl()
&& !isAutomaticGainControlBlacklisted() && !isAutomaticGainControlBlacklisted() && !isAutomaticGainControlExcludedByUUID();
&& !isAutomaticGainControlExcludedByUUID(); Logging.d(TAG, "canUseAutomaticGainControl: " + canUseAutomaticGainControl);
Logging.d(TAG, "canUseAutomaticGainControl: "
+ canUseAutomaticGainControl);
return canUseAutomaticGainControl; return canUseAutomaticGainControl;
} }
// Returns true if all conditions for supporting the HW NS are fulfilled. // Returns true if all conditions for supporting the HW NS are fulfilled.
// It will not be possible to enable the HW NS if this method returns false. // It will not be possible to enable the HW NS if this method returns false.
public static boolean canUseNoiseSuppressor() { public static boolean canUseNoiseSuppressor() {
boolean canUseNoiseSuppressor = boolean canUseNoiseSuppressor = isNoiseSuppressorSupported()
isNoiseSuppressorSupported() && !WebRtcAudioUtils.useWebRtcBasedNoiseSuppressor() && !isNoiseSuppressorBlacklisted()
&& !WebRtcAudioUtils.useWebRtcBasedNoiseSuppressor()
&& !isNoiseSuppressorBlacklisted()
&& !isNoiseSuppressorExcludedByUUID(); && !isNoiseSuppressorExcludedByUUID();
Logging.d(TAG, "canUseNoiseSuppressor: " + canUseNoiseSuppressor); Logging.d(TAG, "canUseNoiseSuppressor: " + canUseNoiseSuppressor);
return canUseNoiseSuppressor; return canUseNoiseSuppressor;
@ -316,8 +301,7 @@ class WebRtcAudioEffects {
if (aec.setEnabled(enable) != AudioEffect.SUCCESS) { if (aec.setEnabled(enable) != AudioEffect.SUCCESS) {
Logging.e(TAG, "Failed to set the AcousticEchoCanceler state"); Logging.e(TAG, "Failed to set the AcousticEchoCanceler state");
} }
Logging.d(TAG, "AcousticEchoCanceler: was " Logging.d(TAG, "AcousticEchoCanceler: was " + (enabled ? "enabled" : "disabled")
+ (enabled ? "enabled" : "disabled")
+ ", enable: " + enable + ", is now: " + ", enable: " + enable + ", is now: "
+ (aec.getEnabled() ? "enabled" : "disabled")); + (aec.getEnabled() ? "enabled" : "disabled"));
} else { } else {
@ -335,8 +319,7 @@ class WebRtcAudioEffects {
if (agc.setEnabled(enable) != AudioEffect.SUCCESS) { if (agc.setEnabled(enable) != AudioEffect.SUCCESS) {
Logging.e(TAG, "Failed to set the AutomaticGainControl state"); Logging.e(TAG, "Failed to set the AutomaticGainControl state");
} }
Logging.d(TAG, "AutomaticGainControl: was " Logging.d(TAG, "AutomaticGainControl: was " + (enabled ? "enabled" : "disabled")
+ (enabled ? "enabled" : "disabled")
+ ", enable: " + enable + ", is now: " + ", enable: " + enable + ", is now: "
+ (agc.getEnabled() ? "enabled" : "disabled")); + (agc.getEnabled() ? "enabled" : "disabled"));
} else { } else {
@ -354,10 +337,8 @@ class WebRtcAudioEffects {
if (ns.setEnabled(enable) != AudioEffect.SUCCESS) { if (ns.setEnabled(enable) != AudioEffect.SUCCESS) {
Logging.e(TAG, "Failed to set the NoiseSuppressor state"); Logging.e(TAG, "Failed to set the NoiseSuppressor state");
} }
Logging.d(TAG, "NoiseSuppressor: was " Logging.d(TAG, "NoiseSuppressor: was " + (enabled ? "enabled" : "disabled") + ", enable: "
+ (enabled ? "enabled" : "disabled") + enable + ", is now: " + (ns.getEnabled() ? "enabled" : "disabled"));
+ ", enable: " + enable + ", is now: "
+ (ns.getEnabled() ? "enabled" : "disabled"));
} else { } else {
Logging.e(TAG, "Failed to create the NoiseSuppressor instance"); Logging.e(TAG, "Failed to create the NoiseSuppressor instance");
} }
@ -395,12 +376,9 @@ class WebRtcAudioEffects {
if (!WebRtcAudioUtils.runningOnJellyBeanMR2OrHigher()) if (!WebRtcAudioUtils.runningOnJellyBeanMR2OrHigher())
return false; return false;
return (AudioEffect.EFFECT_TYPE_AEC.equals(type) return (AudioEffect.EFFECT_TYPE_AEC.equals(type) && isAcousticEchoCancelerSupported())
&& isAcousticEchoCancelerSupported()) || (AudioEffect.EFFECT_TYPE_AGC.equals(type) && isAutomaticGainControlSupported())
|| (AudioEffect.EFFECT_TYPE_AGC.equals(type) || (AudioEffect.EFFECT_TYPE_NS.equals(type) && isNoiseSuppressorSupported());
&& isAutomaticGainControlSupported())
|| (AudioEffect.EFFECT_TYPE_NS.equals(type)
&& isNoiseSuppressorSupported());
} }
// Helper method which throws an exception when an assertion has failed. // Helper method which throws an exception when an assertion has failed.

View File

@ -45,8 +45,7 @@ public class WebRtcAudioManager {
// specified in WebRtcAudioUtils.BLACKLISTED_OPEN_SL_ES_MODELS. // specified in WebRtcAudioUtils.BLACKLISTED_OPEN_SL_ES_MODELS.
// Allows an app to take control over which devices to exlude from using // Allows an app to take control over which devices to exlude from using
// the OpenSL ES audio output path // the OpenSL ES audio output path
public static synchronized void setBlacklistDeviceForOpenSLESUsage( public static synchronized void setBlacklistDeviceForOpenSLESUsage(boolean enable) {
boolean enable) {
blacklistDeviceForOpenSLESUsageIsOverridden = true; blacklistDeviceForOpenSLESUsageIsOverridden = true;
blacklistDeviceForOpenSLESUsage = enable; blacklistDeviceForOpenSLESUsage = enable;
} }
@ -62,10 +61,7 @@ public class WebRtcAudioManager {
// List of possible audio modes. // List of possible audio modes.
private static final String[] AUDIO_MODES = new String[] { private static final String[] AUDIO_MODES = new String[] {
"MODE_NORMAL", "MODE_NORMAL", "MODE_RINGTONE", "MODE_IN_CALL", "MODE_IN_COMMUNICATION",
"MODE_RINGTONE",
"MODE_IN_CALL",
"MODE_IN_COMMUNICATION",
}; };
// Private utility class that periodically checks and logs the volume level // Private utility class that periodically checks and logs the volume level
@ -85,8 +81,7 @@ public class WebRtcAudioManager {
public void start() { public void start() {
timer = new Timer(THREAD_NAME); timer = new Timer(THREAD_NAME);
timer.schedule(new LogVolumeTask( timer.schedule(new LogVolumeTask(audioManager.getStreamMaxVolume(AudioManager.STREAM_RING),
audioManager.getStreamMaxVolume(AudioManager.STREAM_RING),
audioManager.getStreamMaxVolume(AudioManager.STREAM_VOICE_CALL)), audioManager.getStreamMaxVolume(AudioManager.STREAM_VOICE_CALL)),
0, TIMER_PERIOD_IN_SECONDS * 1000); 0, TIMER_PERIOD_IN_SECONDS * 1000);
} }
@ -104,12 +99,12 @@ public class WebRtcAudioManager {
final int mode = audioManager.getMode(); final int mode = audioManager.getMode();
if (mode == AudioManager.MODE_RINGTONE) { if (mode == AudioManager.MODE_RINGTONE) {
Logging.d(TAG, "STREAM_RING stream volume: " Logging.d(TAG, "STREAM_RING stream volume: "
+ audioManager.getStreamVolume(AudioManager.STREAM_RING) + audioManager.getStreamVolume(AudioManager.STREAM_RING) + " (max="
+ " (max=" + maxRingVolume + ")"); + maxRingVolume + ")");
} else if (mode == AudioManager.MODE_IN_COMMUNICATION) { } else if (mode == AudioManager.MODE_IN_COMMUNICATION) {
Logging.d(TAG, "VOICE_CALL stream volume: " Logging.d(TAG, "VOICE_CALL stream volume: "
+ audioManager.getStreamVolume(AudioManager.STREAM_VOICE_CALL) + audioManager.getStreamVolume(AudioManager.STREAM_VOICE_CALL) + " (max="
+ " (max=" + maxVoiceCallVolume + ")"); + maxVoiceCallVolume + ")");
} }
} }
} }
@ -147,8 +142,7 @@ public class WebRtcAudioManager {
Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo()); Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo());
this.context = context; this.context = context;
this.nativeAudioManager = nativeAudioManager; this.nativeAudioManager = nativeAudioManager;
audioManager = (AudioManager) context.getSystemService( audioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
Context.AUDIO_SERVICE);
if (DEBUG) { if (DEBUG) {
WebRtcAudioUtils.logDeviceInfo(TAG); WebRtcAudioUtils.logDeviceInfo(TAG);
} }
@ -183,9 +177,9 @@ public class WebRtcAudioManager {
} }
private boolean isDeviceBlacklistedForOpenSLESUsage() { private boolean isDeviceBlacklistedForOpenSLESUsage() {
boolean blacklisted = blacklistDeviceForOpenSLESUsageIsOverridden ? boolean blacklisted = blacklistDeviceForOpenSLESUsageIsOverridden
blacklistDeviceForOpenSLESUsage : ? blacklistDeviceForOpenSLESUsage
WebRtcAudioUtils.deviceIsBlacklistedForOpenSLESUsage(); : WebRtcAudioUtils.deviceIsBlacklistedForOpenSLESUsage();
if (blacklisted) { if (blacklisted) {
Logging.e(TAG, Build.MODEL + " is blacklisted for OpenSL ES usage!"); Logging.e(TAG, Build.MODEL + " is blacklisted for OpenSL ES usage!");
} }
@ -203,24 +197,21 @@ public class WebRtcAudioManager {
lowLatencyOutput = isLowLatencyOutputSupported(); lowLatencyOutput = isLowLatencyOutputSupported();
lowLatencyInput = isLowLatencyInputSupported(); lowLatencyInput = isLowLatencyInputSupported();
proAudio = isProAudioSupported(); proAudio = isProAudioSupported();
outputBufferSize = lowLatencyOutput ? outputBufferSize = lowLatencyOutput ? getLowLatencyOutputFramesPerBuffer()
getLowLatencyOutputFramesPerBuffer() : : getMinOutputFrameSize(sampleRate, channels);
getMinOutputFrameSize(sampleRate, channels);
inputBufferSize = lowLatencyInput ? getLowLatencyInputFramesPerBuffer() inputBufferSize = lowLatencyInput ? getLowLatencyInputFramesPerBuffer()
: getMinInputFrameSize(sampleRate, channels); : getMinInputFrameSize(sampleRate, channels);
} }
// Gets the current earpiece state. // Gets the current earpiece state.
private boolean hasEarpiece() { private boolean hasEarpiece() {
return context.getPackageManager().hasSystemFeature( return context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_TELEPHONY);
PackageManager.FEATURE_TELEPHONY);
} }
// Returns true if low-latency audio output is supported. // Returns true if low-latency audio output is supported.
private boolean isLowLatencyOutputSupported() { private boolean isLowLatencyOutputSupported() {
return isOpenSLESSupported() && return isOpenSLESSupported()
context.getPackageManager().hasSystemFeature( && context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_AUDIO_LOW_LATENCY);
PackageManager.FEATURE_AUDIO_LOW_LATENCY);
} }
// Returns true if low-latency audio input is supported. // Returns true if low-latency audio input is supported.
@ -231,16 +222,14 @@ public class WebRtcAudioManager {
// as well. The NDK doc states that: "As of API level 21, lower latency // as well. The NDK doc states that: "As of API level 21, lower latency
// audio input is supported on select devices. To take advantage of this // audio input is supported on select devices. To take advantage of this
// feature, first confirm that lower latency output is available". // feature, first confirm that lower latency output is available".
return WebRtcAudioUtils.runningOnLollipopOrHigher() && return WebRtcAudioUtils.runningOnLollipopOrHigher() && isLowLatencyOutputSupported();
isLowLatencyOutputSupported();
} }
// Returns true if the device has professional audio level of functionality // Returns true if the device has professional audio level of functionality
// and therefore supports the lowest possible round-trip latency. // and therefore supports the lowest possible round-trip latency.
private boolean isProAudioSupported() { private boolean isProAudioSupported() {
return WebRtcAudioUtils.runningOnMarshmallowOrHigher() return WebRtcAudioUtils.runningOnMarshmallowOrHigher()
&& context.getPackageManager().hasSystemFeature( && context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_AUDIO_PRO);
PackageManager.FEATURE_AUDIO_PRO);
} }
// Returns the native output sample rate for this device's output stream. // Returns the native output sample rate for this device's output stream.
@ -254,8 +243,8 @@ public class WebRtcAudioManager {
// Default can be overriden by WebRtcAudioUtils.setDefaultSampleRateHz(). // Default can be overriden by WebRtcAudioUtils.setDefaultSampleRateHz().
// If so, use that value and return here. // If so, use that value and return here.
if (WebRtcAudioUtils.isDefaultSampleRateOverridden()) { if (WebRtcAudioUtils.isDefaultSampleRateOverridden()) {
Logging.d(TAG, "Default sample rate is overriden to " + Logging.d(TAG, "Default sample rate is overriden to "
WebRtcAudioUtils.getDefaultSampleRateHz() + " Hz"); + WebRtcAudioUtils.getDefaultSampleRateHz() + " Hz");
return WebRtcAudioUtils.getDefaultSampleRateHz(); return WebRtcAudioUtils.getDefaultSampleRateHz();
} }
// No overrides available. Deliver best possible estimate based on default // No overrides available. Deliver best possible estimate based on default
@ -272,10 +261,8 @@ public class WebRtcAudioManager {
@TargetApi(17) @TargetApi(17)
private int getSampleRateOnJellyBeanMR10OrHigher() { private int getSampleRateOnJellyBeanMR10OrHigher() {
String sampleRateString = audioManager.getProperty( String sampleRateString = audioManager.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE);
AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE); return (sampleRateString == null) ? WebRtcAudioUtils.getDefaultSampleRateHz()
return (sampleRateString == null)
? WebRtcAudioUtils.getDefaultSampleRateHz()
: Integer.parseInt(sampleRateString); : Integer.parseInt(sampleRateString);
} }
@ -286,10 +273,9 @@ public class WebRtcAudioManager {
if (!WebRtcAudioUtils.runningOnJellyBeanMR1OrHigher()) { if (!WebRtcAudioUtils.runningOnJellyBeanMR1OrHigher()) {
return DEFAULT_FRAME_PER_BUFFER; return DEFAULT_FRAME_PER_BUFFER;
} }
String framesPerBuffer = audioManager.getProperty( String framesPerBuffer =
AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER); audioManager.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER);
return framesPerBuffer == null ? return framesPerBuffer == null ? DEFAULT_FRAME_PER_BUFFER : Integer.parseInt(framesPerBuffer);
DEFAULT_FRAME_PER_BUFFER : Integer.parseInt(framesPerBuffer);
} }
// Returns true if the device supports an audio effect (AEC, AGC or NS). // Returns true if the device supports an audio effect (AEC, AGC or NS).
@ -322,8 +308,8 @@ public class WebRtcAudioManager {
return -1; return -1;
} }
return AudioTrack.getMinBufferSize( return AudioTrack.getMinBufferSize(
sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT) / sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT)
bytesPerFrame; / bytesPerFrame;
} }
// Returns the native input buffer size for input streams. // Returns the native input buffer size for input streams.
@ -338,9 +324,9 @@ public class WebRtcAudioManager {
private static int getMinInputFrameSize(int sampleRateInHz, int numChannels) { private static int getMinInputFrameSize(int sampleRateInHz, int numChannels) {
final int bytesPerFrame = numChannels * (BITS_PER_SAMPLE / 8); final int bytesPerFrame = numChannels * (BITS_PER_SAMPLE / 8);
assertTrue(numChannels == CHANNELS); assertTrue(numChannels == CHANNELS);
return AudioRecord.getMinBufferSize(sampleRateInHz, return AudioRecord.getMinBufferSize(
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT) / sampleRateInHz, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT)
bytesPerFrame; / bytesPerFrame;
} }
// Returns true if OpenSL ES audio is supported. // Returns true if OpenSL ES audio is supported.
@ -357,7 +343,6 @@ public class WebRtcAudioManager {
} }
private native void nativeCacheAudioParameters(int sampleRate, int channels, boolean hardwareAEC, private native void nativeCacheAudioParameters(int sampleRate, int channels, boolean hardwareAEC,
boolean hardwareAGC, boolean hardwareNS, boolean lowLatencyOutput, boolean hardwareAGC, boolean hardwareNS, boolean lowLatencyOutput, boolean lowLatencyInput,
boolean lowLatencyInput, boolean proAudio, int outputBufferSize, int inputBufferSize, boolean proAudio, int outputBufferSize, int inputBufferSize, long nativeAudioManager);
long nativeAudioManager);
} }

View File

@ -77,8 +77,7 @@ public class WebRtcAudioRecord {
public void run() { public void run() {
Process.setThreadPriority(Process.THREAD_PRIORITY_URGENT_AUDIO); Process.setThreadPriority(Process.THREAD_PRIORITY_URGENT_AUDIO);
Logging.d(TAG, "AudioRecordThread" + WebRtcAudioUtils.getThreadInfo()); Logging.d(TAG, "AudioRecordThread" + WebRtcAudioUtils.getThreadInfo());
assertTrue(audioRecord.getRecordingState() assertTrue(audioRecord.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING);
== AudioRecord.RECORDSTATE_RECORDING);
long lastTime = System.nanoTime(); long lastTime = System.nanoTime();
while (keepAlive) { while (keepAlive) {
@ -97,8 +96,7 @@ public class WebRtcAudioRecord {
} }
if (DEBUG) { if (DEBUG) {
long nowTime = System.nanoTime(); long nowTime = System.nanoTime();
long durationInMs = long durationInMs = TimeUnit.NANOSECONDS.toMillis((nowTime - lastTime));
TimeUnit.NANOSECONDS.toMillis((nowTime - lastTime));
lastTime = nowTime; lastTime = nowTime;
Logging.d(TAG, "bytesRead[" + durationInMs + "] " + bytesRead); Logging.d(TAG, "bytesRead[" + durationInMs + "] " + bytesRead);
} }
@ -159,10 +157,8 @@ public class WebRtcAudioRecord {
} }
private int initRecording(int sampleRate, int channels) { private int initRecording(int sampleRate, int channels) {
Logging.d(TAG, "initRecording(sampleRate=" + sampleRate + ", channels=" + Logging.d(TAG, "initRecording(sampleRate=" + sampleRate + ", channels=" + channels + ")");
channels + ")"); if (!WebRtcAudioUtils.hasPermission(context, android.Manifest.permission.RECORD_AUDIO)) {
if (!WebRtcAudioUtils.hasPermission(
context, android.Manifest.permission.RECORD_AUDIO)) {
Logging.e(TAG, "RECORD_AUDIO permission is missing"); Logging.e(TAG, "RECORD_AUDIO permission is missing");
return -1; return -1;
} }
@ -184,11 +180,8 @@ public class WebRtcAudioRecord {
// an AudioRecord object, in byte units. // an AudioRecord object, in byte units.
// Note that this size doesn't guarantee a smooth recording under load. // Note that this size doesn't guarantee a smooth recording under load.
int minBufferSize = AudioRecord.getMinBufferSize( int minBufferSize = AudioRecord.getMinBufferSize(
sampleRate, sampleRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
AudioFormat.CHANNEL_IN_MONO, if (minBufferSize == AudioRecord.ERROR || minBufferSize == AudioRecord.ERROR_BAD_VALUE) {
AudioFormat.ENCODING_PCM_16BIT);
if (minBufferSize == AudioRecord.ERROR
|| minBufferSize == AudioRecord.ERROR_BAD_VALUE) {
Logging.e(TAG, "AudioRecord.getMinBufferSize failed: " + minBufferSize); Logging.e(TAG, "AudioRecord.getMinBufferSize failed: " + minBufferSize);
return -1; return -1;
} }
@ -197,21 +190,16 @@ public class WebRtcAudioRecord {
// Use a larger buffer size than the minimum required when creating the // Use a larger buffer size than the minimum required when creating the
// AudioRecord instance to ensure smooth recording under load. It has been // AudioRecord instance to ensure smooth recording under load. It has been
// verified that it does not increase the actual recording latency. // verified that it does not increase the actual recording latency.
int bufferSizeInBytes = int bufferSizeInBytes = Math.max(BUFFER_SIZE_FACTOR * minBufferSize, byteBuffer.capacity());
Math.max(BUFFER_SIZE_FACTOR * minBufferSize, byteBuffer.capacity());
Logging.d(TAG, "bufferSizeInBytes: " + bufferSizeInBytes); Logging.d(TAG, "bufferSizeInBytes: " + bufferSizeInBytes);
try { try {
audioRecord = new AudioRecord(AudioSource.VOICE_COMMUNICATION, audioRecord = new AudioRecord(AudioSource.VOICE_COMMUNICATION, sampleRate,
sampleRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSizeInBytes);
AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT,
bufferSizeInBytes);
} catch (IllegalArgumentException e) { } catch (IllegalArgumentException e) {
Logging.e(TAG, e.getMessage()); Logging.e(TAG, e.getMessage());
return -1; return -1;
} }
if (audioRecord == null || if (audioRecord == null || audioRecord.getState() != AudioRecord.STATE_INITIALIZED) {
audioRecord.getState() != AudioRecord.STATE_INITIALIZED) {
Logging.e(TAG, "Failed to create a new AudioRecord instance"); Logging.e(TAG, "Failed to create a new AudioRecord instance");
return -1; return -1;
} }
@ -261,8 +249,7 @@ public class WebRtcAudioRecord {
Logging.d(TAG, "stopRecording"); Logging.d(TAG, "stopRecording");
assertTrue(audioThread != null); assertTrue(audioThread != null);
audioThread.stopThread(); audioThread.stopThread();
if (!ThreadUtils.joinUninterruptibly( if (!ThreadUtils.joinUninterruptibly(audioThread, AUDIO_RECORD_THREAD_JOIN_TIMEOUT_MS)) {
audioThread, AUDIO_RECORD_THREAD_JOIN_TIMEOUT_MS)) {
Logging.e(TAG, "Join of AudioRecordJavaThread timed out"); Logging.e(TAG, "Join of AudioRecordJavaThread timed out");
} }
audioThread = null; audioThread = null;
@ -281,8 +268,7 @@ public class WebRtcAudioRecord {
} }
} }
private native void nativeCacheDirectBufferAddress( private native void nativeCacheDirectBufferAddress(ByteBuffer byteBuffer, long nativeAudioRecord);
ByteBuffer byteBuffer, long nativeAudioRecord);
private native void nativeDataIsRecorded(int bytes, long nativeAudioRecord); private native void nativeDataIsRecorded(int bytes, long nativeAudioRecord);

View File

@ -155,19 +155,16 @@ public class WebRtcAudioTrack {
Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo()); Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo());
this.context = context; this.context = context;
this.nativeAudioTrack = nativeAudioTrack; this.nativeAudioTrack = nativeAudioTrack;
audioManager = (AudioManager) context.getSystemService( audioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
Context.AUDIO_SERVICE);
if (DEBUG) { if (DEBUG) {
WebRtcAudioUtils.logDeviceInfo(TAG); WebRtcAudioUtils.logDeviceInfo(TAG);
} }
} }
private boolean initPlayout(int sampleRate, int channels) { private boolean initPlayout(int sampleRate, int channels) {
Logging.d(TAG, "initPlayout(sampleRate=" + sampleRate + ", channels=" Logging.d(TAG, "initPlayout(sampleRate=" + sampleRate + ", channels=" + channels + ")");
+ channels + ")");
final int bytesPerFrame = channels * (BITS_PER_SAMPLE / 8); final int bytesPerFrame = channels * (BITS_PER_SAMPLE / 8);
byteBuffer = byteBuffer.allocateDirect( byteBuffer = byteBuffer.allocateDirect(bytesPerFrame * (sampleRate / BUFFERS_PER_SECOND));
bytesPerFrame * (sampleRate / BUFFERS_PER_SECOND));
Logging.d(TAG, "byteBuffer.capacity: " + byteBuffer.capacity()); Logging.d(TAG, "byteBuffer.capacity: " + byteBuffer.capacity());
emptyBytes = new byte[byteBuffer.capacity()]; emptyBytes = new byte[byteBuffer.capacity()];
// Rather than passing the ByteBuffer with every callback (requiring // Rather than passing the ByteBuffer with every callback (requiring
@ -180,9 +177,7 @@ public class WebRtcAudioTrack {
// Note that this size doesn't guarantee a smooth playback under load. // Note that this size doesn't guarantee a smooth playback under load.
// TODO(henrika): should we extend the buffer size to avoid glitches? // TODO(henrika): should we extend the buffer size to avoid glitches?
final int minBufferSizeInBytes = AudioTrack.getMinBufferSize( final int minBufferSizeInBytes = AudioTrack.getMinBufferSize(
sampleRate, sampleRate, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT);
AudioFormat.CHANNEL_OUT_MONO,
AudioFormat.ENCODING_PCM_16BIT);
Logging.d(TAG, "AudioTrack.getMinBufferSize: " + minBufferSizeInBytes); Logging.d(TAG, "AudioTrack.getMinBufferSize: " + minBufferSizeInBytes);
// For the streaming mode, data must be written to the audio sink in // For the streaming mode, data must be written to the audio sink in
// chunks of size (given by byteBuffer.capacity()) less than or equal // chunks of size (given by byteBuffer.capacity()) less than or equal
@ -204,12 +199,9 @@ public class WebRtcAudioTrack {
// Create an AudioTrack object and initialize its associated audio buffer. // Create an AudioTrack object and initialize its associated audio buffer.
// The size of this buffer determines how long an AudioTrack can play // The size of this buffer determines how long an AudioTrack can play
// before running out of data. // before running out of data.
audioTrack = new AudioTrack(AudioManager.STREAM_VOICE_CALL, audioTrack =
sampleRate, new AudioTrack(AudioManager.STREAM_VOICE_CALL, sampleRate, AudioFormat.CHANNEL_OUT_MONO,
AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, minBufferSizeInBytes, AudioTrack.MODE_STREAM);
AudioFormat.ENCODING_PCM_16BIT,
minBufferSizeInBytes,
AudioTrack.MODE_STREAM);
} catch (IllegalArgumentException e) { } catch (IllegalArgumentException e) {
Logging.d(TAG, e.getMessage()); Logging.d(TAG, e.getMessage());
return false; return false;
@ -290,8 +282,7 @@ public class WebRtcAudioTrack {
} }
} }
private native void nativeCacheDirectBufferAddress( private native void nativeCacheDirectBufferAddress(ByteBuffer byteBuffer, long nativeAudioRecord);
ByteBuffer byteBuffer, long nativeAudioRecord);
private native void nativeGetPlayoutData(int bytes, long nativeAudioRecord); private native void nativeGetPlayoutData(int bytes, long nativeAudioRecord);

View File

@ -40,12 +40,10 @@ public final class WebRtcAudioUtils {
"MotoG3", // Moto G (3rd Generation) "MotoG3", // Moto G (3rd Generation)
}; };
private static final String[] BLACKLISTED_AGC_MODELS = new String[] { private static final String[] BLACKLISTED_AGC_MODELS = new String[] {
"Nexus 10", "Nexus 10", "Nexus 9",
"Nexus 9",
}; };
private static final String[] BLACKLISTED_NS_MODELS = new String[] { private static final String[] BLACKLISTED_NS_MODELS = new String[] {
"Nexus 10", "Nexus 10", "Nexus 9",
"Nexus 9",
"ONE A2005", // OnePlus 2 "ONE A2005", // OnePlus 2
}; };
@ -63,16 +61,13 @@ public final class WebRtcAudioUtils {
// Call these methods if any hardware based effect shall be replaced by a // Call these methods if any hardware based effect shall be replaced by a
// software based version provided by the WebRTC stack instead. // software based version provided by the WebRTC stack instead.
public static synchronized void setWebRtcBasedAcousticEchoCanceler( public static synchronized void setWebRtcBasedAcousticEchoCanceler(boolean enable) {
boolean enable) {
useWebRtcBasedAcousticEchoCanceler = enable; useWebRtcBasedAcousticEchoCanceler = enable;
} }
public static synchronized void setWebRtcBasedAutomaticGainControl( public static synchronized void setWebRtcBasedAutomaticGainControl(boolean enable) {
boolean enable) {
useWebRtcBasedAutomaticGainControl = enable; useWebRtcBasedAutomaticGainControl = enable;
} }
public static synchronized void setWebRtcBasedNoiseSuppressor( public static synchronized void setWebRtcBasedNoiseSuppressor(boolean enable) {
boolean enable) {
useWebRtcBasedNoiseSuppressor = enable; useWebRtcBasedNoiseSuppressor = enable;
} }
@ -171,20 +166,18 @@ public final class WebRtcAudioUtils {
// Helper method for building a string of thread information. // Helper method for building a string of thread information.
public static String getThreadInfo() { public static String getThreadInfo() {
return "@[name=" + Thread.currentThread().getName() return "@[name=" + Thread.currentThread().getName() + ", id=" + Thread.currentThread().getId()
+ ", id=" + Thread.currentThread().getId() + "]"; + "]";
} }
// Returns true if we're running on emulator. // Returns true if we're running on emulator.
public static boolean runningOnEmulator() { public static boolean runningOnEmulator() {
return Build.HARDWARE.equals("goldfish") && return Build.HARDWARE.equals("goldfish") && Build.BRAND.startsWith("generic_");
Build.BRAND.startsWith("generic_");
} }
// Returns true if the device is blacklisted for OpenSL ES usage. // Returns true if the device is blacklisted for OpenSL ES usage.
public static boolean deviceIsBlacklistedForOpenSLESUsage() { public static boolean deviceIsBlacklistedForOpenSLESUsage() {
List<String> blackListedModels = List<String> blackListedModels = Arrays.asList(BLACKLISTED_OPEN_SL_ES_MODELS);
Arrays.asList(BLACKLISTED_OPEN_SL_ES_MODELS);
return blackListedModels.contains(Build.MODEL); return blackListedModels.contains(Build.MODEL);
} }
@ -203,9 +196,7 @@ public final class WebRtcAudioUtils {
// Checks if the process has as specified permission or not. // Checks if the process has as specified permission or not.
public static boolean hasPermission(Context context, String permission) { public static boolean hasPermission(Context context, String permission) {
return context.checkPermission( return context.checkPermission(permission, Process.myPid(), Process.myUid())
permission, == PackageManager.PERMISSION_GRANTED;
Process.myPid(),
Process.myUid()) == PackageManager.PERMISSION_GRANTED;
} }
} }