Introduces Android API level linting, fixes all current API lint errors.
This CL attempts to annotate accesses on >16 API levels using as small scopes as possible. The TargetApi notations mean "yes, I know I'm accessing a higher API and I take responsibility for gating the call on Android API level". The Encoder/Decoder classes are annotated on the whole class, but they're only accessed through JNI; we should annotate on method level otherwise and preferably on private methods. This patch also fixes some compiler-level deprecation warnings (i.e. -Xlint:deprecation), but probably not all of them. BUG=webrtc:5063 R=henrika@webrtc.org, kjellander@webrtc.org, magjed@webrtc.org Review URL: https://codereview.webrtc.org/1412673008 . Cr-Commit-Position: refs/heads/master@{#10624}
This commit is contained in:
@ -27,7 +27,9 @@
|
|||||||
|
|
||||||
package org.webrtc;
|
package org.webrtc;
|
||||||
|
|
||||||
|
import android.annotation.TargetApi;
|
||||||
import android.content.Context;
|
import android.content.Context;
|
||||||
|
|
||||||
import android.graphics.ImageFormat;
|
import android.graphics.ImageFormat;
|
||||||
import android.hardware.camera2.CameraCharacteristics;
|
import android.hardware.camera2.CameraCharacteristics;
|
||||||
import android.hardware.camera2.CameraManager;
|
import android.hardware.camera2.CameraManager;
|
||||||
@ -45,6 +47,7 @@ import java.util.HashMap;
|
|||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
|
@TargetApi(21)
|
||||||
public class Camera2Enumerator implements CameraEnumerationAndroid.Enumerator {
|
public class Camera2Enumerator implements CameraEnumerationAndroid.Enumerator {
|
||||||
private final static String TAG = "Camera2Enumerator";
|
private final static String TAG = "Camera2Enumerator";
|
||||||
private final static double NANO_SECONDS_PER_SECOND = 1.0e9;
|
private final static double NANO_SECONDS_PER_SECOND = 1.0e9;
|
||||||
|
@ -29,7 +29,6 @@ package org.webrtc;
|
|||||||
|
|
||||||
import static java.lang.Math.abs;
|
import static java.lang.Math.abs;
|
||||||
import static java.lang.Math.ceil;
|
import static java.lang.Math.ceil;
|
||||||
import android.hardware.Camera;
|
|
||||||
import android.graphics.ImageFormat;
|
import android.graphics.ImageFormat;
|
||||||
|
|
||||||
import org.json.JSONArray;
|
import org.json.JSONArray;
|
||||||
@ -127,8 +126,8 @@ public class CameraEnumerationAndroid {
|
|||||||
|
|
||||||
// Returns device names that can be used to create a new VideoCapturerAndroid.
|
// Returns device names that can be used to create a new VideoCapturerAndroid.
|
||||||
public static String[] getDeviceNames() {
|
public static String[] getDeviceNames() {
|
||||||
String[] names = new String[Camera.getNumberOfCameras()];
|
String[] names = new String[android.hardware.Camera.getNumberOfCameras()];
|
||||||
for (int i = 0; i < Camera.getNumberOfCameras(); ++i) {
|
for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
|
||||||
names[i] = getDeviceName(i);
|
names[i] = getDeviceName(i);
|
||||||
}
|
}
|
||||||
return names;
|
return names;
|
||||||
@ -136,22 +135,22 @@ public class CameraEnumerationAndroid {
|
|||||||
|
|
||||||
// Returns number of cameras on device.
|
// Returns number of cameras on device.
|
||||||
public static int getDeviceCount() {
|
public static int getDeviceCount() {
|
||||||
return Camera.getNumberOfCameras();
|
return android.hardware.Camera.getNumberOfCameras();
|
||||||
}
|
}
|
||||||
|
|
||||||
// Returns the name of the camera with camera index. Returns null if the
|
// Returns the name of the camera with camera index. Returns null if the
|
||||||
// camera can not be used.
|
// camera can not be used.
|
||||||
public static String getDeviceName(int index) {
|
public static String getDeviceName(int index) {
|
||||||
Camera.CameraInfo info = new Camera.CameraInfo();
|
android.hardware.Camera.CameraInfo info = new android.hardware.Camera.CameraInfo();
|
||||||
try {
|
try {
|
||||||
Camera.getCameraInfo(index, info);
|
android.hardware.Camera.getCameraInfo(index, info);
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
Logging.e(TAG, "getCameraInfo failed on index " + index,e);
|
Logging.e(TAG, "getCameraInfo failed on index " + index,e);
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
String facing =
|
String facing =
|
||||||
(info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) ? "front" : "back";
|
(info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT) ? "front" : "back";
|
||||||
return "Camera " + index + ", Facing " + facing
|
return "Camera " + index + ", Facing " + facing
|
||||||
+ ", Orientation " + info.orientation;
|
+ ", Orientation " + info.orientation;
|
||||||
}
|
}
|
||||||
@ -159,13 +158,13 @@ public class CameraEnumerationAndroid {
|
|||||||
// Returns the name of the front facing camera. Returns null if the
|
// Returns the name of the front facing camera. Returns null if the
|
||||||
// camera can not be used or does not exist.
|
// camera can not be used or does not exist.
|
||||||
public static String getNameOfFrontFacingDevice() {
|
public static String getNameOfFrontFacingDevice() {
|
||||||
return getNameOfDevice(Camera.CameraInfo.CAMERA_FACING_FRONT);
|
return getNameOfDevice(android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Returns the name of the back facing camera. Returns null if the
|
// Returns the name of the back facing camera. Returns null if the
|
||||||
// camera can not be used or does not exist.
|
// camera can not be used or does not exist.
|
||||||
public static String getNameOfBackFacingDevice() {
|
public static String getNameOfBackFacingDevice() {
|
||||||
return getNameOfDevice(Camera.CameraInfo.CAMERA_FACING_BACK);
|
return getNameOfDevice(android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static String getSupportedFormatsAsJson(int id) throws JSONException {
|
public static String getSupportedFormatsAsJson(int id) throws JSONException {
|
||||||
@ -194,7 +193,8 @@ public class CameraEnumerationAndroid {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public static int[] getFramerateRange(Camera.Parameters parameters, final int framerate) {
|
public static int[] getFramerateRange(android.hardware.Camera.Parameters parameters,
|
||||||
|
final int framerate) {
|
||||||
List<int[]> listFpsRange = parameters.getSupportedPreviewFpsRange();
|
List<int[]> listFpsRange = parameters.getSupportedPreviewFpsRange();
|
||||||
if (listFpsRange.isEmpty()) {
|
if (listFpsRange.isEmpty()) {
|
||||||
Logging.w(TAG, "No supported preview fps range");
|
Logging.w(TAG, "No supported preview fps range");
|
||||||
@ -203,27 +203,28 @@ public class CameraEnumerationAndroid {
|
|||||||
return Collections.min(listFpsRange,
|
return Collections.min(listFpsRange,
|
||||||
new ClosestComparator<int[]>() {
|
new ClosestComparator<int[]>() {
|
||||||
@Override int diff(int[] range) {
|
@Override int diff(int[] range) {
|
||||||
return abs(framerate - range[Camera.Parameters.PREVIEW_FPS_MIN_INDEX])
|
return abs(framerate - range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX])
|
||||||
+ abs(framerate - range[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
|
+ abs(framerate - range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Camera.Size getClosestSupportedSize(
|
public static android.hardware.Camera.Size getClosestSupportedSize(
|
||||||
List<Camera.Size> supportedSizes, final int requestedWidth, final int requestedHeight) {
|
List<android.hardware.Camera.Size> supportedSizes, final int requestedWidth,
|
||||||
|
final int requestedHeight) {
|
||||||
return Collections.min(supportedSizes,
|
return Collections.min(supportedSizes,
|
||||||
new ClosestComparator<Camera.Size>() {
|
new ClosestComparator<android.hardware.Camera.Size>() {
|
||||||
@Override int diff(Camera.Size size) {
|
@Override int diff(android.hardware.Camera.Size size) {
|
||||||
return abs(requestedWidth - size.width) + abs(requestedHeight - size.height);
|
return abs(requestedWidth - size.width) + abs(requestedHeight - size.height);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
private static String getNameOfDevice(int facing) {
|
private static String getNameOfDevice(int facing) {
|
||||||
final Camera.CameraInfo info = new Camera.CameraInfo();
|
final android.hardware.Camera.CameraInfo info = new android.hardware.Camera.CameraInfo();
|
||||||
for (int i = 0; i < Camera.getNumberOfCameras(); ++i) {
|
for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
|
||||||
try {
|
try {
|
||||||
Camera.getCameraInfo(i, info);
|
android.hardware.Camera.getCameraInfo(i, info);
|
||||||
if (info.facing == facing) {
|
if (info.facing == facing) {
|
||||||
return getDeviceName(i);
|
return getDeviceName(i);
|
||||||
}
|
}
|
||||||
|
@ -27,7 +27,6 @@
|
|||||||
|
|
||||||
package org.webrtc;
|
package org.webrtc;
|
||||||
|
|
||||||
import android.hardware.Camera;
|
|
||||||
import android.os.SystemClock;
|
import android.os.SystemClock;
|
||||||
|
|
||||||
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
|
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
|
||||||
@ -60,11 +59,11 @@ public class CameraEnumerator implements CameraEnumerationAndroid.Enumerator {
|
|||||||
private List<CaptureFormat> enumerateFormats(int cameraId) {
|
private List<CaptureFormat> enumerateFormats(int cameraId) {
|
||||||
Logging.d(TAG, "Get supported formats for camera index " + cameraId + ".");
|
Logging.d(TAG, "Get supported formats for camera index " + cameraId + ".");
|
||||||
final long startTimeMs = SystemClock.elapsedRealtime();
|
final long startTimeMs = SystemClock.elapsedRealtime();
|
||||||
final Camera.Parameters parameters;
|
final android.hardware.Camera.Parameters parameters;
|
||||||
Camera camera = null;
|
android.hardware.Camera camera = null;
|
||||||
try {
|
try {
|
||||||
Logging.d(TAG, "Opening camera with index " + cameraId);
|
Logging.d(TAG, "Opening camera with index " + cameraId);
|
||||||
camera = Camera.open(cameraId);
|
camera = android.hardware.Camera.open(cameraId);
|
||||||
parameters = camera.getParameters();
|
parameters = camera.getParameters();
|
||||||
} catch (RuntimeException e) {
|
} catch (RuntimeException e) {
|
||||||
Logging.e(TAG, "Open camera failed on camera index " + cameraId, e);
|
Logging.e(TAG, "Open camera failed on camera index " + cameraId, e);
|
||||||
@ -84,10 +83,10 @@ public class CameraEnumerator implements CameraEnumerationAndroid.Enumerator {
|
|||||||
// getSupportedPreviewFpsRange() returns a sorted list. Take the fps range
|
// getSupportedPreviewFpsRange() returns a sorted list. Take the fps range
|
||||||
// corresponding to the highest fps.
|
// corresponding to the highest fps.
|
||||||
final int[] range = listFpsRange.get(listFpsRange.size() - 1);
|
final int[] range = listFpsRange.get(listFpsRange.size() - 1);
|
||||||
minFps = range[Camera.Parameters.PREVIEW_FPS_MIN_INDEX];
|
minFps = range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX];
|
||||||
maxFps = range[Camera.Parameters.PREVIEW_FPS_MAX_INDEX];
|
maxFps = range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX];
|
||||||
}
|
}
|
||||||
for (Camera.Size size : parameters.getSupportedPreviewSizes()) {
|
for (android.hardware.Camera.Size size : parameters.getSupportedPreviewSizes()) {
|
||||||
formatList.add(new CaptureFormat(size.width, size.height, minFps, maxFps));
|
formatList.add(new CaptureFormat(size.width, size.height, minFps, maxFps));
|
||||||
}
|
}
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
|
@ -29,8 +29,6 @@ package org.webrtc;
|
|||||||
|
|
||||||
import android.content.Context;
|
import android.content.Context;
|
||||||
import android.graphics.SurfaceTexture;
|
import android.graphics.SurfaceTexture;
|
||||||
import android.hardware.Camera;
|
|
||||||
import android.hardware.Camera.PreviewCallback;
|
|
||||||
import android.os.Handler;
|
import android.os.Handler;
|
||||||
import android.os.HandlerThread;
|
import android.os.HandlerThread;
|
||||||
import android.os.SystemClock;
|
import android.os.SystemClock;
|
||||||
@ -68,20 +66,21 @@ import javax.microedition.khronos.egl.EGL10;
|
|||||||
// camera thread. The internal *OnCameraThread() methods must check |camera| for null to check if
|
// camera thread. The internal *OnCameraThread() methods must check |camera| for null to check if
|
||||||
// the camera has been stopped.
|
// the camera has been stopped.
|
||||||
@SuppressWarnings("deprecation")
|
@SuppressWarnings("deprecation")
|
||||||
public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallback,
|
public class VideoCapturerAndroid extends VideoCapturer implements
|
||||||
|
android.hardware.Camera.PreviewCallback,
|
||||||
SurfaceTextureHelper.OnTextureFrameAvailableListener {
|
SurfaceTextureHelper.OnTextureFrameAvailableListener {
|
||||||
private final static String TAG = "VideoCapturerAndroid";
|
private final static String TAG = "VideoCapturerAndroid";
|
||||||
private final static int CAMERA_OBSERVER_PERIOD_MS = 2000;
|
private final static int CAMERA_OBSERVER_PERIOD_MS = 2000;
|
||||||
private final static int CAMERA_FREEZE_REPORT_TIMOUT_MS = 6000;
|
private final static int CAMERA_FREEZE_REPORT_TIMOUT_MS = 6000;
|
||||||
|
|
||||||
private Camera camera; // Only non-null while capturing.
|
private android.hardware.Camera camera; // Only non-null while capturing.
|
||||||
private HandlerThread cameraThread;
|
private HandlerThread cameraThread;
|
||||||
private final Handler cameraThreadHandler;
|
private final Handler cameraThreadHandler;
|
||||||
private Context applicationContext;
|
private Context applicationContext;
|
||||||
// Synchronization lock for |id|.
|
// Synchronization lock for |id|.
|
||||||
private final Object cameraIdLock = new Object();
|
private final Object cameraIdLock = new Object();
|
||||||
private int id;
|
private int id;
|
||||||
private Camera.CameraInfo info;
|
private android.hardware.Camera.CameraInfo info;
|
||||||
private final FramePool videoBuffers;
|
private final FramePool videoBuffers;
|
||||||
private final CameraStatistics cameraStatistics;
|
private final CameraStatistics cameraStatistics;
|
||||||
// Remember the requested format in case we want to switch cameras.
|
// Remember the requested format in case we want to switch cameras.
|
||||||
@ -108,10 +107,10 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
|
|||||||
private int openCameraAttempts;
|
private int openCameraAttempts;
|
||||||
|
|
||||||
// Camera error callback.
|
// Camera error callback.
|
||||||
private final Camera.ErrorCallback cameraErrorCallback =
|
private final android.hardware.Camera.ErrorCallback cameraErrorCallback =
|
||||||
new Camera.ErrorCallback() {
|
new android.hardware.Camera.ErrorCallback() {
|
||||||
@Override
|
@Override
|
||||||
public void onError(int error, Camera camera) {
|
public void onError(int error, android.hardware.Camera camera) {
|
||||||
String errorMessage;
|
String errorMessage;
|
||||||
if (error == android.hardware.Camera.CAMERA_ERROR_SERVER_DIED) {
|
if (error == android.hardware.Camera.CAMERA_ERROR_SERVER_DIED) {
|
||||||
errorMessage = "Camera server died!";
|
errorMessage = "Camera server died!";
|
||||||
@ -261,7 +260,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
|
|||||||
// Switch camera to the next valid camera id. This can only be called while
|
// Switch camera to the next valid camera id. This can only be called while
|
||||||
// the camera is running.
|
// the camera is running.
|
||||||
public void switchCamera(final CameraSwitchHandler handler) {
|
public void switchCamera(final CameraSwitchHandler handler) {
|
||||||
if (Camera.getNumberOfCameras() < 2) {
|
if (android.hardware.Camera.getNumberOfCameras() < 2) {
|
||||||
if (handler != null) {
|
if (handler != null) {
|
||||||
handler.onCameraSwitchError("No camera to switch to.");
|
handler.onCameraSwitchError("No camera to switch to.");
|
||||||
}
|
}
|
||||||
@ -292,7 +291,8 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
|
|||||||
pendingCameraSwitch = false;
|
pendingCameraSwitch = false;
|
||||||
}
|
}
|
||||||
if (handler != null) {
|
if (handler != null) {
|
||||||
handler.onCameraSwitchDone(info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT);
|
handler.onCameraSwitchDone(
|
||||||
|
info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@ -375,13 +375,13 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
|
|||||||
// found. If |deviceName| is empty, the first available device is used.
|
// found. If |deviceName| is empty, the first available device is used.
|
||||||
private static int lookupDeviceName(String deviceName) {
|
private static int lookupDeviceName(String deviceName) {
|
||||||
Logging.d(TAG, "lookupDeviceName: " + deviceName);
|
Logging.d(TAG, "lookupDeviceName: " + deviceName);
|
||||||
if (deviceName == null || Camera.getNumberOfCameras() == 0) {
|
if (deviceName == null || android.hardware.Camera.getNumberOfCameras() == 0) {
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
if (deviceName.isEmpty()) {
|
if (deviceName.isEmpty()) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
for (int i = 0; i < Camera.getNumberOfCameras(); ++i) {
|
for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
|
||||||
if (deviceName.equals(CameraEnumerationAndroid.getDeviceName(i))) {
|
if (deviceName.equals(CameraEnumerationAndroid.getDeviceName(i))) {
|
||||||
return i;
|
return i;
|
||||||
}
|
}
|
||||||
@ -461,9 +461,9 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
|
|||||||
if (eventsHandler != null) {
|
if (eventsHandler != null) {
|
||||||
eventsHandler.onCameraOpening(id);
|
eventsHandler.onCameraOpening(id);
|
||||||
}
|
}
|
||||||
camera = Camera.open(id);
|
camera = android.hardware.Camera.open(id);
|
||||||
info = new Camera.CameraInfo();
|
info = new android.hardware.Camera.CameraInfo();
|
||||||
Camera.getCameraInfo(id, info);
|
android.hardware.Camera.getCameraInfo(id, info);
|
||||||
}
|
}
|
||||||
} catch (RuntimeException e) {
|
} catch (RuntimeException e) {
|
||||||
openCameraAttempts++;
|
openCameraAttempts++;
|
||||||
@ -525,14 +525,15 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
|
|||||||
requestedFramerate = framerate;
|
requestedFramerate = framerate;
|
||||||
|
|
||||||
// Find closest supported format for |width| x |height| @ |framerate|.
|
// Find closest supported format for |width| x |height| @ |framerate|.
|
||||||
final Camera.Parameters parameters = camera.getParameters();
|
final android.hardware.Camera.Parameters parameters = camera.getParameters();
|
||||||
final int[] range = CameraEnumerationAndroid.getFramerateRange(parameters, framerate * 1000);
|
final int[] range = CameraEnumerationAndroid.getFramerateRange(parameters, framerate * 1000);
|
||||||
final Camera.Size previewSize = CameraEnumerationAndroid.getClosestSupportedSize(
|
final android.hardware.Camera.Size previewSize =
|
||||||
parameters.getSupportedPreviewSizes(), width, height);
|
CameraEnumerationAndroid.getClosestSupportedSize(
|
||||||
|
parameters.getSupportedPreviewSizes(), width, height);
|
||||||
final CaptureFormat captureFormat = new CaptureFormat(
|
final CaptureFormat captureFormat = new CaptureFormat(
|
||||||
previewSize.width, previewSize.height,
|
previewSize.width, previewSize.height,
|
||||||
range[Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
|
range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
|
||||||
range[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
|
range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
|
||||||
|
|
||||||
// Check if we are already using this capture format, then we don't need to do anything.
|
// Check if we are already using this capture format, then we don't need to do anything.
|
||||||
if (captureFormat.equals(this.captureFormat)) {
|
if (captureFormat.equals(this.captureFormat)) {
|
||||||
@ -554,8 +555,9 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
|
|||||||
parameters.setPreviewFormat(captureFormat.imageFormat);
|
parameters.setPreviewFormat(captureFormat.imageFormat);
|
||||||
// Picture size is for taking pictures and not for preview/video, but we need to set it anyway
|
// Picture size is for taking pictures and not for preview/video, but we need to set it anyway
|
||||||
// as a workaround for an aspect ratio problem on Nexus 7.
|
// as a workaround for an aspect ratio problem on Nexus 7.
|
||||||
final Camera.Size pictureSize = CameraEnumerationAndroid.getClosestSupportedSize(
|
final android.hardware.Camera.Size pictureSize =
|
||||||
parameters.getSupportedPictureSizes(), width, height);
|
CameraEnumerationAndroid.getClosestSupportedSize(
|
||||||
|
parameters.getSupportedPictureSizes(), width, height);
|
||||||
parameters.setPictureSize(pictureSize.width, pictureSize.height);
|
parameters.setPictureSize(pictureSize.width, pictureSize.height);
|
||||||
|
|
||||||
// Temporarily stop preview if it's already running.
|
// Temporarily stop preview if it's already running.
|
||||||
@ -572,8 +574,8 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
|
|||||||
this.captureFormat = captureFormat;
|
this.captureFormat = captureFormat;
|
||||||
|
|
||||||
List<String> focusModes = parameters.getSupportedFocusModes();
|
List<String> focusModes = parameters.getSupportedFocusModes();
|
||||||
if (focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
|
if (focusModes.contains(android.hardware.Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
|
||||||
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
|
parameters.setFocusMode(android.hardware.Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
|
||||||
}
|
}
|
||||||
|
|
||||||
camera.setParameters(parameters);
|
camera.setParameters(parameters);
|
||||||
@ -637,7 +639,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
|
|||||||
Logging.d(TAG, "switchCameraOnCameraThread");
|
Logging.d(TAG, "switchCameraOnCameraThread");
|
||||||
stopCaptureOnCameraThread();
|
stopCaptureOnCameraThread();
|
||||||
synchronized (cameraIdLock) {
|
synchronized (cameraIdLock) {
|
||||||
id = (id + 1) % Camera.getNumberOfCameras();
|
id = (id + 1) % android.hardware.Camera.getNumberOfCameras();
|
||||||
}
|
}
|
||||||
dropNextFrame = true;
|
dropNextFrame = true;
|
||||||
startCaptureOnCameraThread(requestedWidth, requestedHeight, requestedFramerate, frameObserver,
|
startCaptureOnCameraThread(requestedWidth, requestedHeight, requestedFramerate, frameObserver,
|
||||||
@ -699,7 +701,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
|
|||||||
|
|
||||||
private int getFrameOrientation() {
|
private int getFrameOrientation() {
|
||||||
int rotation = getDeviceOrientation();
|
int rotation = getDeviceOrientation();
|
||||||
if (info.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
|
if (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK) {
|
||||||
rotation = 360 - rotation;
|
rotation = 360 - rotation;
|
||||||
}
|
}
|
||||||
return (info.orientation + rotation) % 360;
|
return (info.orientation + rotation) % 360;
|
||||||
@ -707,7 +709,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
|
|||||||
|
|
||||||
// Called on cameraThread so must not "synchronized".
|
// Called on cameraThread so must not "synchronized".
|
||||||
@Override
|
@Override
|
||||||
public void onPreviewFrame(byte[] data, Camera callbackCamera) {
|
public void onPreviewFrame(byte[] data, android.hardware.Camera callbackCamera) {
|
||||||
checkIsOnCameraThread();
|
checkIsOnCameraThread();
|
||||||
if (camera == null) {
|
if (camera == null) {
|
||||||
return;
|
return;
|
||||||
@ -752,7 +754,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
|
|||||||
}
|
}
|
||||||
|
|
||||||
int rotation = getFrameOrientation();
|
int rotation = getFrameOrientation();
|
||||||
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
|
if (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT) {
|
||||||
// Undo the mirror that the OS "helps" us with.
|
// Undo the mirror that the OS "helps" us with.
|
||||||
// http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int)
|
// http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int)
|
||||||
transformMatrix =
|
transformMatrix =
|
||||||
@ -784,7 +786,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
|
|||||||
// keeping the buffers alive and for finding the corresponding ByteBuffer given a timestamp.
|
// keeping the buffers alive and for finding the corresponding ByteBuffer given a timestamp.
|
||||||
private final Map<Long, ByteBuffer> pendingBuffers = new HashMap<Long, ByteBuffer>();
|
private final Map<Long, ByteBuffer> pendingBuffers = new HashMap<Long, ByteBuffer>();
|
||||||
private int frameSize = 0;
|
private int frameSize = 0;
|
||||||
private Camera camera;
|
private android.hardware.Camera camera;
|
||||||
|
|
||||||
public FramePool(Thread thread) {
|
public FramePool(Thread thread) {
|
||||||
this.thread = thread;
|
this.thread = thread;
|
||||||
@ -797,7 +799,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Discards previous queued buffers and adds new callback buffers to camera.
|
// Discards previous queued buffers and adds new callback buffers to camera.
|
||||||
public void queueCameraBuffers(int frameSize, Camera camera) {
|
public void queueCameraBuffers(int frameSize, android.hardware.Camera camera) {
|
||||||
checkIsOnValidThread();
|
checkIsOnValidThread();
|
||||||
this.camera = camera;
|
this.camera = camera;
|
||||||
this.frameSize = frameSize;
|
this.frameSize = frameSize;
|
||||||
|
@ -27,6 +27,7 @@
|
|||||||
|
|
||||||
package org.webrtc;
|
package org.webrtc;
|
||||||
|
|
||||||
|
import android.graphics.SurfaceTexture;
|
||||||
import android.media.MediaCodec;
|
import android.media.MediaCodec;
|
||||||
import android.media.MediaCodecInfo;
|
import android.media.MediaCodecInfo;
|
||||||
import android.media.MediaCodecInfo.CodecCapabilities;
|
import android.media.MediaCodecInfo.CodecCapabilities;
|
||||||
@ -48,6 +49,7 @@ import java.util.concurrent.TimeUnit;
|
|||||||
|
|
||||||
// Java-side of peerconnection_jni.cc:MediaCodecVideoDecoder.
|
// Java-side of peerconnection_jni.cc:MediaCodecVideoDecoder.
|
||||||
// This class is an implementation detail of the Java PeerConnection API.
|
// This class is an implementation detail of the Java PeerConnection API.
|
||||||
|
@SuppressWarnings("deprecation")
|
||||||
public class MediaCodecVideoDecoder {
|
public class MediaCodecVideoDecoder {
|
||||||
// This class is constructed, operated, and destroyed by its C++ incarnation,
|
// This class is constructed, operated, and destroyed by its C++ incarnation,
|
||||||
// so the class and its methods have non-public visibility. The API this
|
// so the class and its methods have non-public visibility. The API this
|
||||||
|
@ -27,6 +27,7 @@
|
|||||||
|
|
||||||
package org.webrtc;
|
package org.webrtc;
|
||||||
|
|
||||||
|
import android.annotation.TargetApi;
|
||||||
import android.media.MediaCodec;
|
import android.media.MediaCodec;
|
||||||
import android.media.MediaCodecInfo.CodecCapabilities;
|
import android.media.MediaCodecInfo.CodecCapabilities;
|
||||||
import android.media.MediaCodecInfo;
|
import android.media.MediaCodecInfo;
|
||||||
@ -44,6 +45,8 @@ import java.util.concurrent.CountDownLatch;
|
|||||||
|
|
||||||
// Java-side of peerconnection_jni.cc:MediaCodecVideoEncoder.
|
// Java-side of peerconnection_jni.cc:MediaCodecVideoEncoder.
|
||||||
// This class is an implementation detail of the Java PeerConnection API.
|
// This class is an implementation detail of the Java PeerConnection API.
|
||||||
|
@TargetApi(19)
|
||||||
|
@SuppressWarnings("deprecation")
|
||||||
public class MediaCodecVideoEncoder {
|
public class MediaCodecVideoEncoder {
|
||||||
// This class is constructed, operated, and destroyed by its C++ incarnation,
|
// This class is constructed, operated, and destroyed by its C++ incarnation,
|
||||||
// so the class and its methods have non-public visibility. The API this
|
// so the class and its methods have non-public visibility. The API this
|
||||||
|
@ -227,6 +227,12 @@
|
|||||||
'libjingle_peerconnection_so',
|
'libjingle_peerconnection_so',
|
||||||
],
|
],
|
||||||
'variables': {
|
'variables': {
|
||||||
|
# Designate as Chromium code and point to our lint settings to
|
||||||
|
# enable linting of the WebRTC code (this is the only way to make
|
||||||
|
# lint_action invoke the Android linter).
|
||||||
|
'android_manifest_path': '<(webrtc_root)/build/android/AndroidManifest.xml',
|
||||||
|
'suppressions_file': '<(webrtc_root)/build/android/suppressions.xml',
|
||||||
|
'chromium_code': 1,
|
||||||
'java_in_dir': 'app/webrtc/java',
|
'java_in_dir': 'app/webrtc/java',
|
||||||
'webrtc_base_dir': '<(webrtc_root)/base',
|
'webrtc_base_dir': '<(webrtc_root)/base',
|
||||||
'webrtc_modules_dir': '<(webrtc_root)/modules',
|
'webrtc_modules_dir': '<(webrtc_root)/modules',
|
||||||
|
14
webrtc/build/android/AndroidManifest.xml
Normal file
14
webrtc/build/android/AndroidManifest.xml
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
<?xml version="1.0" encoding="utf-8"?>
|
||||||
|
<!--
|
||||||
|
This is a dummy manifest which is required by:
|
||||||
|
1. aapt when generating R.java in java.gypi:
|
||||||
|
Nothing in the manifest is used, but it is still required by aapt.
|
||||||
|
2. lint: [min|target]SdkVersion are required by lint and should
|
||||||
|
be kept up-to-date.
|
||||||
|
-->
|
||||||
|
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
|
||||||
|
package="dummy.package">
|
||||||
|
|
||||||
|
<uses-sdk android:minSdkVersion="16" android:targetSdkVersion="23" />
|
||||||
|
|
||||||
|
</manifest>
|
23
webrtc/build/android/suppressions.xml
Normal file
23
webrtc/build/android/suppressions.xml
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
<?xml version="1.0" encoding="utf-8"?>
|
||||||
|
<lint>
|
||||||
|
<!-- These lint settings is for the Android linter that gets run by
|
||||||
|
lint_action.gypi on compile of WebRTC java code. All WebRTC java code
|
||||||
|
should lint cleanly for the issues below. -->
|
||||||
|
<!-- TODO(phoglund): make work with suppress.py or remove printout referring
|
||||||
|
to suppress.py. -->
|
||||||
|
<issue id="NewApi"></issue>
|
||||||
|
|
||||||
|
<issue id="Locale" severity="ignore"/>
|
||||||
|
<issue id="SdCardPath" severity="ignore"/>
|
||||||
|
<issue id="UseValueOf" severity="ignore"/>
|
||||||
|
<issue id="InlinedApi" severity="ignore"/>
|
||||||
|
<issue id="DefaultLocale" severity="ignore"/>
|
||||||
|
<issue id="Assert" severity="ignore"/>
|
||||||
|
<issue id="UseSparseArrays" severity="ignore"/>
|
||||||
|
|
||||||
|
<!-- These are just from the dummy AndroidManifest.xml we use for linting.
|
||||||
|
It's in the same directory as this file. -->
|
||||||
|
<issue id="MissingApplicationIcon" severity="ignore"/>
|
||||||
|
<issue id="AllowBackup" severity="ignore"/>
|
||||||
|
<issue id="MissingVersion" severity="ignore"/>
|
||||||
|
</lint>
|
@ -7,7 +7,7 @@
|
|||||||
<uses-feature android:name="android.hardware.camera" />
|
<uses-feature android:name="android.hardware.camera" />
|
||||||
<uses-feature android:name="android.hardware.camera.autofocus" />
|
<uses-feature android:name="android.hardware.camera.autofocus" />
|
||||||
<uses-feature android:glEsVersion="0x00020000" android:required="true" />
|
<uses-feature android:glEsVersion="0x00020000" android:required="true" />
|
||||||
<uses-sdk android:minSdkVersion="14" android:targetSdkVersion="21" />
|
<uses-sdk android:minSdkVersion="16" android:targetSdkVersion="21" />
|
||||||
|
|
||||||
<uses-permission android:name="android.permission.CAMERA" />
|
<uses-permission android:name="android.permission.CAMERA" />
|
||||||
<uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />
|
<uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />
|
||||||
|
@ -10,6 +10,7 @@
|
|||||||
|
|
||||||
package org.webrtc.voiceengine;
|
package org.webrtc.voiceengine;
|
||||||
|
|
||||||
|
import android.annotation.TargetApi;
|
||||||
import android.media.audiofx.AcousticEchoCanceler;
|
import android.media.audiofx.AcousticEchoCanceler;
|
||||||
import android.media.audiofx.AudioEffect;
|
import android.media.audiofx.AudioEffect;
|
||||||
import android.media.audiofx.AudioEffect.Descriptor;
|
import android.media.audiofx.AudioEffect.Descriptor;
|
||||||
@ -119,6 +120,7 @@ class WebRtcAudioEffects {
|
|||||||
|
|
||||||
// Returns true if the platform AEC should be excluded based on its UUID.
|
// Returns true if the platform AEC should be excluded based on its UUID.
|
||||||
// AudioEffect.queryEffects() can throw IllegalStateException.
|
// AudioEffect.queryEffects() can throw IllegalStateException.
|
||||||
|
@TargetApi(18)
|
||||||
private static boolean isAcousticEchoCancelerExcludedByUUID() {
|
private static boolean isAcousticEchoCancelerExcludedByUUID() {
|
||||||
for (Descriptor d : AudioEffect.queryEffects()) {
|
for (Descriptor d : AudioEffect.queryEffects()) {
|
||||||
if (d.type.equals(AudioEffect.EFFECT_TYPE_AEC) &&
|
if (d.type.equals(AudioEffect.EFFECT_TYPE_AEC) &&
|
||||||
@ -131,6 +133,7 @@ class WebRtcAudioEffects {
|
|||||||
|
|
||||||
// Returns true if the platform AGC should be excluded based on its UUID.
|
// Returns true if the platform AGC should be excluded based on its UUID.
|
||||||
// AudioEffect.queryEffects() can throw IllegalStateException.
|
// AudioEffect.queryEffects() can throw IllegalStateException.
|
||||||
|
@TargetApi(18)
|
||||||
private static boolean isAutomaticGainControlExcludedByUUID() {
|
private static boolean isAutomaticGainControlExcludedByUUID() {
|
||||||
for (Descriptor d : AudioEffect.queryEffects()) {
|
for (Descriptor d : AudioEffect.queryEffects()) {
|
||||||
if (d.type.equals(AudioEffect.EFFECT_TYPE_AGC) &&
|
if (d.type.equals(AudioEffect.EFFECT_TYPE_AGC) &&
|
||||||
@ -143,6 +146,7 @@ class WebRtcAudioEffects {
|
|||||||
|
|
||||||
// Returns true if the platform NS should be excluded based on its UUID.
|
// Returns true if the platform NS should be excluded based on its UUID.
|
||||||
// AudioEffect.queryEffects() can throw IllegalStateException.
|
// AudioEffect.queryEffects() can throw IllegalStateException.
|
||||||
|
@TargetApi(18)
|
||||||
private static boolean isNoiseSuppressorExcludedByUUID() {
|
private static boolean isNoiseSuppressorExcludedByUUID() {
|
||||||
for (Descriptor d : AudioEffect.queryEffects()) {
|
for (Descriptor d : AudioEffect.queryEffects()) {
|
||||||
if (d.type.equals(AudioEffect.EFFECT_TYPE_NS) &&
|
if (d.type.equals(AudioEffect.EFFECT_TYPE_NS) &&
|
||||||
@ -368,7 +372,11 @@ class WebRtcAudioEffects {
|
|||||||
// AudioEffect.Descriptor array that are actually not available on the device.
|
// AudioEffect.Descriptor array that are actually not available on the device.
|
||||||
// As an example: Samsung Galaxy S6 includes an AGC in the descriptor but
|
// As an example: Samsung Galaxy S6 includes an AGC in the descriptor but
|
||||||
// AutomaticGainControl.isAvailable() returns false.
|
// AutomaticGainControl.isAvailable() returns false.
|
||||||
|
@TargetApi(18)
|
||||||
private boolean effectTypeIsVoIP(UUID type) {
|
private boolean effectTypeIsVoIP(UUID type) {
|
||||||
|
if (!WebRtcAudioUtils.runningOnJellyBeanMR2OrHigher())
|
||||||
|
return false;
|
||||||
|
|
||||||
return (AudioEffect.EFFECT_TYPE_AEC.equals(type)
|
return (AudioEffect.EFFECT_TYPE_AEC.equals(type)
|
||||||
&& isAcousticEchoCancelerSupported())
|
&& isAcousticEchoCancelerSupported())
|
||||||
|| (AudioEffect.EFFECT_TYPE_AGC.equals(type)
|
|| (AudioEffect.EFFECT_TYPE_AGC.equals(type)
|
||||||
|
@ -10,6 +10,7 @@
|
|||||||
|
|
||||||
package org.webrtc.voiceengine;
|
package org.webrtc.voiceengine;
|
||||||
|
|
||||||
|
import android.annotation.TargetApi;
|
||||||
import android.content.Context;
|
import android.content.Context;
|
||||||
import android.content.pm.PackageManager;
|
import android.content.pm.PackageManager;
|
||||||
import android.media.AudioFormat;
|
import android.media.AudioFormat;
|
||||||
@ -189,20 +190,26 @@ public class WebRtcAudioManager {
|
|||||||
// No overrides available. Deliver best possible estimate based on default
|
// No overrides available. Deliver best possible estimate based on default
|
||||||
// Android AudioManager APIs.
|
// Android AudioManager APIs.
|
||||||
final int sampleRateHz;
|
final int sampleRateHz;
|
||||||
if (!WebRtcAudioUtils.runningOnJellyBeanMR1OrHigher()) {
|
if (WebRtcAudioUtils.runningOnJellyBeanMR1OrHigher()) {
|
||||||
sampleRateHz = WebRtcAudioUtils.getDefaultSampleRateHz();
|
sampleRateHz = getSampleRateOnJellyBeanMR10OrHigher();
|
||||||
} else {
|
} else {
|
||||||
String sampleRateString = audioManager.getProperty(
|
sampleRateHz = WebRtcAudioUtils.getDefaultSampleRateHz();
|
||||||
AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE);
|
|
||||||
sampleRateHz = (sampleRateString == null)
|
|
||||||
? WebRtcAudioUtils.getDefaultSampleRateHz()
|
|
||||||
: Integer.parseInt(sampleRateString);
|
|
||||||
}
|
}
|
||||||
Logging.d(TAG, "Sample rate is set to " + sampleRateHz + " Hz");
|
Logging.d(TAG, "Sample rate is set to " + sampleRateHz + " Hz");
|
||||||
return sampleRateHz;
|
return sampleRateHz;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@TargetApi(17)
|
||||||
|
private int getSampleRateOnJellyBeanMR10OrHigher() {
|
||||||
|
String sampleRateString = audioManager.getProperty(
|
||||||
|
AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE);
|
||||||
|
return (sampleRateString == null)
|
||||||
|
? WebRtcAudioUtils.getDefaultSampleRateHz()
|
||||||
|
: Integer.parseInt(sampleRateString);
|
||||||
|
}
|
||||||
|
|
||||||
// Returns the native output buffer size for low-latency output streams.
|
// Returns the native output buffer size for low-latency output streams.
|
||||||
|
@TargetApi(17)
|
||||||
private int getLowLatencyOutputFramesPerBuffer() {
|
private int getLowLatencyOutputFramesPerBuffer() {
|
||||||
assertTrue(isLowLatencyOutputSupported());
|
assertTrue(isLowLatencyOutputSupported());
|
||||||
if (!WebRtcAudioUtils.runningOnJellyBeanMR1OrHigher()) {
|
if (!WebRtcAudioUtils.runningOnJellyBeanMR1OrHigher()) {
|
||||||
|
@ -13,6 +13,7 @@ package org.webrtc.voiceengine;
|
|||||||
import java.lang.Thread;
|
import java.lang.Thread;
|
||||||
import java.nio.ByteBuffer;
|
import java.nio.ByteBuffer;
|
||||||
|
|
||||||
|
import android.annotation.TargetApi;
|
||||||
import android.content.Context;
|
import android.content.Context;
|
||||||
import android.media.AudioFormat;
|
import android.media.AudioFormat;
|
||||||
import android.media.AudioManager;
|
import android.media.AudioManager;
|
||||||
@ -90,13 +91,9 @@ class WebRtcAudioTrack {
|
|||||||
assertTrue(sizeInBytes <= byteBuffer.remaining());
|
assertTrue(sizeInBytes <= byteBuffer.remaining());
|
||||||
int bytesWritten = 0;
|
int bytesWritten = 0;
|
||||||
if (WebRtcAudioUtils.runningOnLollipopOrHigher()) {
|
if (WebRtcAudioUtils.runningOnLollipopOrHigher()) {
|
||||||
bytesWritten = audioTrack.write(byteBuffer,
|
bytesWritten = writeOnLollipop(audioTrack, byteBuffer, sizeInBytes);
|
||||||
sizeInBytes,
|
|
||||||
AudioTrack.WRITE_BLOCKING);
|
|
||||||
} else {
|
} else {
|
||||||
bytesWritten = audioTrack.write(byteBuffer.array(),
|
bytesWritten = writePreLollipop(audioTrack, byteBuffer, sizeInBytes);
|
||||||
byteBuffer.arrayOffset(),
|
|
||||||
sizeInBytes);
|
|
||||||
}
|
}
|
||||||
if (bytesWritten != sizeInBytes) {
|
if (bytesWritten != sizeInBytes) {
|
||||||
Logging.e(TAG, "AudioTrack.write failed: " + bytesWritten);
|
Logging.e(TAG, "AudioTrack.write failed: " + bytesWritten);
|
||||||
@ -123,6 +120,15 @@ class WebRtcAudioTrack {
|
|||||||
audioTrack.flush();
|
audioTrack.flush();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@TargetApi(21)
|
||||||
|
private int writeOnLollipop(AudioTrack audioTrack, ByteBuffer byteBuffer, int sizeInBytes) {
|
||||||
|
return audioTrack.write(byteBuffer, sizeInBytes, AudioTrack.WRITE_BLOCKING);
|
||||||
|
}
|
||||||
|
|
||||||
|
private int writePreLollipop(AudioTrack audioTrack, ByteBuffer byteBuffer, int sizeInBytes) {
|
||||||
|
return audioTrack.write(byteBuffer.array(), byteBuffer.arrayOffset(), sizeInBytes);
|
||||||
|
}
|
||||||
|
|
||||||
public void joinThread() {
|
public void joinThread() {
|
||||||
keepAlive = false;
|
keepAlive = false;
|
||||||
while (isAlive()) {
|
while (isAlive()) {
|
||||||
@ -224,16 +230,21 @@ class WebRtcAudioTrack {
|
|||||||
private boolean setStreamVolume(int volume) {
|
private boolean setStreamVolume(int volume) {
|
||||||
Logging.d(TAG, "setStreamVolume(" + volume + ")");
|
Logging.d(TAG, "setStreamVolume(" + volume + ")");
|
||||||
assertTrue(audioManager != null);
|
assertTrue(audioManager != null);
|
||||||
if (WebRtcAudioUtils.runningOnLollipopOrHigher()) {
|
if (isVolumeFixed()) {
|
||||||
if (audioManager.isVolumeFixed()) {
|
Logging.e(TAG, "The device implements a fixed volume policy.");
|
||||||
Logging.e(TAG, "The device implements a fixed volume policy.");
|
return false;
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
audioManager.setStreamVolume(AudioManager.STREAM_VOICE_CALL, volume, 0);
|
audioManager.setStreamVolume(AudioManager.STREAM_VOICE_CALL, volume, 0);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@TargetApi(21)
|
||||||
|
private boolean isVolumeFixed() {
|
||||||
|
if (!WebRtcAudioUtils.runningOnLollipopOrHigher())
|
||||||
|
return false;
|
||||||
|
return audioManager.isVolumeFixed();
|
||||||
|
}
|
||||||
|
|
||||||
/** Get current volume level for a phone call audio stream. */
|
/** Get current volume level for a phone call audio stream. */
|
||||||
private int getStreamVolume() {
|
private int getStreamVolume() {
|
||||||
Logging.d(TAG, "getStreamVolume");
|
Logging.d(TAG, "getStreamVolume");
|
||||||
|
@ -144,6 +144,11 @@ public final class WebRtcAudioUtils {
|
|||||||
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1;
|
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static boolean runningOnJellyBeanMR2OrHigher() {
|
||||||
|
// July 24, 2013: Android 4.3. API Level 18.
|
||||||
|
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2;
|
||||||
|
}
|
||||||
|
|
||||||
public static boolean runningOnLollipopOrHigher() {
|
public static boolean runningOnLollipopOrHigher() {
|
||||||
// API Level 21.
|
// API Level 21.
|
||||||
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP;
|
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP;
|
||||||
|
Reference in New Issue
Block a user