Merge commit 'upstream-main' into master

Bug: 261600888
Test: none, build files to be updated in follow up cl
Change-Id: Ib520938290c6bbdee4a9f73b6419b6c947a96ec4
This commit is contained in:
Jorge E. Moreira
2022-12-06 16:34:41 -08:00
5393 changed files with 541103 additions and 211666 deletions

View File

@ -0,0 +1,20 @@
/*
* Copyright 2021 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/** Interface to handle completion of addIceCandidate */
public interface AddIceObserver {
/** Called when ICE candidate added successfully.*/
@CalledByNative public void onAddSuccess();
/** Called when ICE candidate addition failed.*/
@CalledByNative public void onAddFailure(String error);
}

View File

@ -28,7 +28,6 @@ public class Camera1Capturer extends CameraCapturer {
SurfaceTextureHelper surfaceTextureHelper, String cameraName, int width, int height,
int framerate) {
Camera1Session.create(createSessionCallback, events, captureToTexture, applicationContext,
surfaceTextureHelper, Camera1Enumerator.getCameraIndex(cameraName), width, height,
framerate);
surfaceTextureHelper, cameraName, width, height, framerate);
}
}

View File

@ -11,7 +11,7 @@
package org.webrtc;
import android.os.SystemClock;
import android.support.annotation.Nullable;
import androidx.annotation.Nullable;
import java.util.ArrayList;
import java.util.List;
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
@ -158,7 +158,7 @@ public class Camera1Enumerator implements CameraEnumerator {
return ranges;
}
// Returns the camera index for camera with name |deviceName|, or throws IllegalArgumentException
// Returns the camera index for camera with name `deviceName`, or throws IllegalArgumentException
// if no such camera can be found.
static int getCameraIndex(String deviceName) {
Logging.d(TAG, "getCameraIndex: " + deviceName);

View File

@ -10,12 +10,10 @@
package org.webrtc;
import android.annotation.TargetApi;
import android.content.Context;
import android.hardware.camera2.CameraManager;
import android.support.annotation.Nullable;
import androidx.annotation.Nullable;
@TargetApi(21)
public class Camera2Capturer extends CameraCapturer {
private final Context context;
@Nullable private final CameraManager cameraManager;

View File

@ -10,26 +10,25 @@
package org.webrtc;
import android.annotation.TargetApi;
import android.content.Context;
import android.graphics.Rect;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CameraMetadata;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.os.Build;
import android.os.SystemClock;
import android.support.annotation.Nullable;
import android.util.AndroidException;
import android.util.Range;
import androidx.annotation.Nullable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
@TargetApi(21)
public class Camera2Enumerator implements CameraEnumerator {
private final static String TAG = "Camera2Enumerator";
private final static double NANO_SECONDS_PER_SECOND = 1.0e9;
@ -51,11 +50,8 @@ public class Camera2Enumerator implements CameraEnumerator {
public String[] getDeviceNames() {
try {
return cameraManager.getCameraIdList();
// On Android OS pre 4.4.2, a class will not load because of VerifyError if it contains a
// catch statement with an Exception from a newer API, even if the code is never executed.
// https://code.google.com/p/android/issues/detail?id=209129
} catch (/* CameraAccessException */ AndroidException e) {
Logging.e(TAG, "Camera access exception: " + e);
} catch (CameraAccessException e) {
Logging.e(TAG, "Camera access exception", e);
return new String[] {};
}
}
@ -93,11 +89,8 @@ public class Camera2Enumerator implements CameraEnumerator {
private @Nullable CameraCharacteristics getCameraCharacteristics(String deviceName) {
try {
return cameraManager.getCameraCharacteristics(deviceName);
// On Android OS pre 4.4.2, a class will not load because of VerifyError if it contains a
// catch statement with an Exception from a newer API, even if the code is never executed.
// https://code.google.com/p/android/issues/detail?id=209129
} catch (/* CameraAccessException */ AndroidException e) {
Logging.e(TAG, "Camera access exception: " + e);
} catch (CameraAccessException | RuntimeException e) {
Logging.e(TAG, "Camera access exception", e);
return null;
}
}
@ -106,10 +99,6 @@ public class Camera2Enumerator implements CameraEnumerator {
* Checks if API is supported and all cameras have better than legacy support.
*/
public static boolean isSupported(Context context) {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) {
return false;
}
CameraManager cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
try {
String[] cameraIds = cameraManager.getCameraIdList();
@ -120,11 +109,8 @@ public class Camera2Enumerator implements CameraEnumerator {
return false;
}
}
// On Android OS pre 4.4.2, a class will not load because of VerifyError if it contains a
// catch statement with an Exception from a newer API, even if the code is never executed.
// https://code.google.com/p/android/issues/detail?id=209129
} catch (/* CameraAccessException */ AndroidException e) {
Logging.e(TAG, "Camera access exception: " + e);
} catch (CameraAccessException | RuntimeException e) {
Logging.e(TAG, "Failed to check if camera2 is supported", e);
return false;
}
return true;
@ -186,7 +172,7 @@ public class Camera2Enumerator implements CameraEnumerator {
try {
cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId);
} catch (Exception ex) {
Logging.e(TAG, "getCameraCharacteristics(): " + ex);
Logging.e(TAG, "getCameraCharacteristics()", ex);
return new ArrayList<CaptureFormat>();
}
@ -230,7 +216,10 @@ public class Camera2Enumerator implements CameraEnumerator {
// Convert from android.util.Size to Size.
private static List<Size> convertSizes(android.util.Size[] cameraSizes) {
final List<Size> sizes = new ArrayList<Size>();
if (cameraSizes == null || cameraSizes.length == 0) {
return Collections.emptyList();
}
final List<Size> sizes = new ArrayList<>(cameraSizes.length);
for (android.util.Size size : cameraSizes) {
sizes.add(new Size(size.getWidth(), size.getHeight()));
}

View File

@ -152,24 +152,24 @@ public class CameraEnumerationAndroid {
}
}
// Prefer a fps range with an upper bound close to |framerate|. Also prefer a fps range with a low
// Prefer a fps range with an upper bound close to `framerate`. Also prefer a fps range with a low
// lower bound, to allow the framerate to fluctuate based on lightning conditions.
public static CaptureFormat.FramerateRange getClosestSupportedFramerateRange(
List<CaptureFormat.FramerateRange> supportedFramerates, final int requestedFps) {
return Collections.min(
supportedFramerates, new ClosestComparator<CaptureFormat.FramerateRange>() {
// Progressive penalty if the upper bound is further away than |MAX_FPS_DIFF_THRESHOLD|
// Progressive penalty if the upper bound is further away than `MAX_FPS_DIFF_THRESHOLD`
// from requested.
private static final int MAX_FPS_DIFF_THRESHOLD = 5000;
private static final int MAX_FPS_LOW_DIFF_WEIGHT = 1;
private static final int MAX_FPS_HIGH_DIFF_WEIGHT = 3;
// Progressive penalty if the lower bound is bigger than |MIN_FPS_THRESHOLD|.
// Progressive penalty if the lower bound is bigger than `MIN_FPS_THRESHOLD`.
private static final int MIN_FPS_THRESHOLD = 8000;
private static final int MIN_FPS_LOW_VALUE_WEIGHT = 1;
private static final int MIN_FPS_HIGH_VALUE_WEIGHT = 4;
// Use one weight for small |value| less than |threshold|, and another weight above.
// Use one weight for small `value` less than `threshold`, and another weight above.
private int progressivePenalty(int value, int threshold, int lowWeight, int highWeight) {
return (value < threshold) ? value * lowWeight
: threshold * lowWeight + (value - threshold) * highWeight;

View File

@ -48,7 +48,7 @@ public interface CameraVideoCapturer extends VideoCapturer {
* The callback may be called on an arbitrary thread.
*/
public interface CameraSwitchHandler {
// Invoked on success. |isFrontCamera| is true if the new camera is front facing.
// Invoked on success. `isFrontCamera` is true if the new camera is front facing.
void onCameraSwitchDone(boolean isFrontCamera);
// Invoked on failure, e.g. camera is stopped or only one camera available.

View File

@ -20,12 +20,20 @@ public final class CandidatePairChangeEvent {
public final int lastDataReceivedMs;
public final String reason;
/**
* An estimate from the ICE stack on how long it was disconnected before
* changing to the new candidate pair in this event.
* The first time an candidate pair is signaled the value will be 0.
*/
public final int estimatedDisconnectedTimeMs;
@CalledByNative
CandidatePairChangeEvent(
IceCandidate local, IceCandidate remote, int lastDataReceivedMs, String reason) {
CandidatePairChangeEvent(IceCandidate local, IceCandidate remote, int lastDataReceivedMs,
String reason, int estimatedDisconnectedTimeMs) {
this.local = local;
this.remote = remote;
this.lastDataReceivedMs = lastDataReceivedMs;
this.reason = reason;
this.estimatedDisconnectedTimeMs = estimatedDisconnectedTimeMs;
}
}

View File

@ -29,7 +29,7 @@ public final class CryptoOptions {
private final boolean enableGcmCryptoSuites;
/**
* If set to true, the (potentially insecure) crypto cipher
* SRTP_AES128_CM_SHA1_32 will be included in the list of supported ciphers
* kSrtpAes128CmSha1_32 will be included in the list of supported ciphers
* during negotiation. It will only be used if both peers support it and no
* other ciphers get preferred.
*/

View File

@ -63,7 +63,7 @@ public class DataChannel {
public final ByteBuffer data;
/**
* Indicates whether |data| contains UTF-8 text or "binary data"
* Indicates whether `data` contains UTF-8 text or "binary data"
* (i.e. anything else).
*/
public final boolean binary;
@ -82,7 +82,7 @@ public class DataChannel {
/** The data channel state has changed. */
@CalledByNative("Observer") public void onStateChange();
/**
* A data buffer was successfully received. NOTE: |buffer.data| will be
* A data buffer was successfully received. NOTE: `buffer.data` will be
* freed once this function returns so callers who want to use the data
* asynchronously must make sure to copy it first.
*/
@ -110,7 +110,7 @@ public class DataChannel {
this.nativeDataChannel = nativeDataChannel;
}
/** Register |observer|, replacing any previously-registered observer. */
/** Register `observer`, replacing any previously-registered observer. */
public void registerObserver(Observer observer) {
checkDataChannelExists();
if (nativeObserver != 0) {
@ -123,6 +123,7 @@ public class DataChannel {
public void unregisterObserver() {
checkDataChannelExists();
nativeUnregisterObserver(nativeObserver);
nativeObserver = 0;
}
public String label() {
@ -156,7 +157,7 @@ public class DataChannel {
nativeClose();
}
/** Send |data| to the remote peer; return success. */
/** Send `data` to the remote peer; return success. */
public boolean send(Buffer buffer) {
checkDataChannelExists();
// TODO(fischman): this could be cleverer about avoiding copies if the

View File

@ -0,0 +1,20 @@
/*
* Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
public class Dav1dDecoder extends WrappedNativeVideoDecoder {
@Override
public long createNativeVideoDecoder() {
return nativeCreateDecoder();
}
static native long nativeCreateDecoder();
}

View File

@ -10,7 +10,7 @@
package org.webrtc;
import android.support.annotation.Nullable;
import androidx.annotation.Nullable;
import java.util.Arrays;
import java.util.LinkedHashSet;

View File

@ -10,7 +10,7 @@
package org.webrtc;
import android.support.annotation.Nullable;
import androidx.annotation.Nullable;
import java.util.Arrays;
import java.util.LinkedHashSet;

View File

@ -11,8 +11,8 @@
package org.webrtc;
import android.graphics.SurfaceTexture;
import android.support.annotation.Nullable;
import android.view.Surface;
import androidx.annotation.Nullable;
import java.util.ArrayList;
import javax.microedition.khronos.egl.EGL10;
@ -146,14 +146,12 @@ public interface EglBase {
}
/**
* Create a new context with the specified config attributes, sharing data with |sharedContext|.
* If |sharedContext| is null, a root context is created. This function will try to create an EGL
* 1.4 context if possible, and an EGL 1.0 context otherwise.
* Create a new context with the specified config attributes, sharing data with `sharedContext`.
* If `sharedContext` is null, a root EGL 1.4 context is created.
*/
public static EglBase create(@Nullable Context sharedContext, int[] configAttributes) {
if (sharedContext == null) {
return EglBase14Impl.isEGL14Supported() ? createEgl14(configAttributes)
: createEgl10(configAttributes);
return createEgl14(configAttributes);
} else if (sharedContext instanceof EglBase14.Context) {
return createEgl14((EglBase14.Context) sharedContext, configAttributes);
} else if (sharedContext instanceof EglBase10.Context) {
@ -171,7 +169,7 @@ public interface EglBase {
}
/**
* Helper function for creating a plain context, sharing data with |sharedContext|. This function
* Helper function for creating a plain context, sharing data with `sharedContext`. This function
* will try to create an EGL 1.4 context if possible, and an EGL 1.0 context otherwise.
*/
public static EglBase create(Context sharedContext) {

View File

@ -18,8 +18,8 @@ import android.os.Handler;
import android.os.HandlerThread;
import android.os.Looper;
import android.os.Message;
import android.support.annotation.Nullable;
import android.view.Surface;
import androidx.annotation.Nullable;
import java.nio.ByteBuffer;
import java.text.DecimalFormat;
import java.util.ArrayList;
@ -111,8 +111,8 @@ public class EglRenderer implements VideoSink {
protected final String name;
// |renderThreadHandler| is a handler for communicating with |renderThread|, and is synchronized
// on |handlerLock|.
// `renderThreadHandler` is a handler for communicating with `renderThread`, and is synchronized
// on `handlerLock`.
private final Object handlerLock = new Object();
@Nullable private Handler renderThreadHandler;
@ -136,11 +136,11 @@ public class EglRenderer implements VideoSink {
private boolean usePresentationTimeStamp;
private final Matrix drawMatrix = new Matrix();
// Pending frame to render. Serves as a queue with size 1. Synchronized on |frameLock|.
// Pending frame to render. Serves as a queue with size 1. Synchronized on `frameLock`.
private final Object frameLock = new Object();
@Nullable private VideoFrame pendingFrame;
// These variables are synchronized on |layoutLock|.
// These variables are synchronized on `layoutLock`.
private final Object layoutLock = new Object();
private float layoutAspectRatio;
// If true, mirrors the video stream horizontally.
@ -148,7 +148,7 @@ public class EglRenderer implements VideoSink {
// If true, mirrors the video stream vertically.
private boolean mirrorVertically;
// These variables are synchronized on |statisticsLock|.
// These variables are synchronized on `statisticsLock`.
private final Object statisticsLock = new Object();
// Total number of video frames received in renderFrame() call.
private int framesReceived;
@ -198,9 +198,9 @@ public class EglRenderer implements VideoSink {
}
/**
* Initialize this class, sharing resources with |sharedContext|. The custom |drawer| will be used
* Initialize this class, sharing resources with `sharedContext`. The custom `drawer` will be used
* for drawing frames on the EGLSurface. This class is responsible for calling release() on
* |drawer|. It is allowed to call init() to reinitialize the renderer after a previous
* `drawer`. It is allowed to call init() to reinitialize the renderer after a previous
* init()/release() cycle. If usePresentationTimeStamp is true, eglPresentationTimeANDROID will be
* set with the frame timestamps, which specifies desired presentation time and might be useful
* for e.g. syncing audio and video.
@ -592,10 +592,10 @@ public class EglRenderer implements VideoSink {
}
/**
* Renders and releases |pendingFrame|.
* Renders and releases `pendingFrame`.
*/
private void renderFrameOnRenderThread() {
// Fetch and render |pendingFrame|.
// Fetch and render `pendingFrame`.
final VideoFrame frame;
synchronized (frameLock) {
if (pendingFrame == null) {

View File

@ -10,7 +10,7 @@
package org.webrtc;
import android.support.annotation.Nullable;
import androidx.annotation.Nullable;
import java.nio.ByteBuffer;
import java.util.concurrent.TimeUnit;
@ -54,7 +54,6 @@ public class EncodedImage implements RefCounted {
public final long captureTimeNs;
public final FrameType frameType;
public final int rotation;
public final boolean completeFrame;
public final @Nullable Integer qp;
// TODO(bugs.webrtc.org/9378): Use retain and release from jni code.
@ -71,7 +70,7 @@ public class EncodedImage implements RefCounted {
@CalledByNative
private EncodedImage(ByteBuffer buffer, @Nullable Runnable releaseCallback, int encodedWidth,
int encodedHeight, long captureTimeNs, FrameType frameType, int rotation,
boolean completeFrame, @Nullable Integer qp) {
@Nullable Integer qp) {
this.buffer = buffer;
this.encodedWidth = encodedWidth;
this.encodedHeight = encodedHeight;
@ -79,7 +78,6 @@ public class EncodedImage implements RefCounted {
this.captureTimeNs = captureTimeNs;
this.frameType = frameType;
this.rotation = rotation;
this.completeFrame = completeFrame;
this.qp = qp;
this.refCountDelegate = new RefCountDelegate(releaseCallback);
}
@ -114,11 +112,6 @@ public class EncodedImage implements RefCounted {
return rotation;
}
@CalledByNative
private boolean getCompleteFrame() {
return completeFrame;
}
@CalledByNative
private @Nullable Integer getQp() {
return qp;
@ -136,7 +129,6 @@ public class EncodedImage implements RefCounted {
private long captureTimeNs;
private EncodedImage.FrameType frameType;
private int rotation;
private boolean completeFrame;
private @Nullable Integer qp;
private Builder() {}
@ -178,11 +170,6 @@ public class EncodedImage implements RefCounted {
return this;
}
public Builder setCompleteFrame(boolean completeFrame) {
this.completeFrame = completeFrame;
return this;
}
public Builder setQp(@Nullable Integer qp) {
this.qp = qp;
return this;
@ -190,7 +177,7 @@ public class EncodedImage implements RefCounted {
public EncodedImage createEncodedImage() {
return new EncodedImage(buffer, releaseCallback, encodedWidth, encodedHeight, captureTimeNs,
frameType, rotation, completeFrame, qp);
frameType, rotation, qp);
}
}
}

View File

@ -78,16 +78,16 @@ public class GlShader {
}
/**
* Enable and upload a vertex array for attribute |label|. The vertex data is specified in
* |buffer| with |dimension| number of components per vertex.
* Enable and upload a vertex array for attribute `label`. The vertex data is specified in
* `buffer` with `dimension` number of components per vertex.
*/
public void setVertexAttribArray(String label, int dimension, FloatBuffer buffer) {
setVertexAttribArray(label, dimension, 0 /* stride */, buffer);
}
/**
* Enable and upload a vertex array for attribute |label|. The vertex data is specified in
* |buffer| with |dimension| number of components per vertex and specified |stride|.
* Enable and upload a vertex array for attribute `label`. The vertex data is specified in
* `buffer` with `dimension` number of components per vertex and specified `stride`.
*/
public void setVertexAttribArray(String label, int dimension, int stride, FloatBuffer buffer) {
if (program == -1) {

View File

@ -11,7 +11,7 @@
package org.webrtc;
import android.opengl.GLES20;
import android.opengl.GLException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
@ -22,9 +22,9 @@ import java.nio.FloatBuffer;
public class GlUtil {
private GlUtil() {}
public static class GlOutOfMemoryException extends RuntimeException {
public GlOutOfMemoryException(String msg) {
super(msg);
public static class GlOutOfMemoryException extends GLException {
public GlOutOfMemoryException(int error, String msg) {
super(error, msg);
}
}
@ -33,8 +33,8 @@ public class GlUtil {
int error = GLES20.glGetError();
if (error != GLES20.GL_NO_ERROR) {
throw error == GLES20.GL_OUT_OF_MEMORY
? new GlOutOfMemoryException(msg)
: new RuntimeException(msg + ": GLES20 error: " + error);
? new GlOutOfMemoryException(error, msg)
: new GLException(error, msg + ": GLES20 error: " + error);
}
}

View File

@ -11,7 +11,7 @@
package org.webrtc;
import android.media.MediaCodecInfo;
import android.support.annotation.Nullable;
import androidx.annotation.Nullable;
import java.util.Arrays;
/** Factory for Android hardware VideoDecoders. */

View File

@ -17,7 +17,7 @@ import static org.webrtc.MediaCodecUtils.QCOM_PREFIX;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.os.Build;
import android.support.annotation.Nullable;
import androidx.annotation.Nullable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
@ -27,6 +27,11 @@ import java.util.List;
public class HardwareVideoEncoderFactory implements VideoEncoderFactory {
private static final String TAG = "HardwareVideoEncoderFactory";
// We don't need periodic keyframes. But some HW encoders, Exynos in particular, fails to
// initialize with value -1 which should disable periodic keyframes according to the spec. Set it
// to 1 hour.
private static final int PERIODIC_KEY_FRAME_INTERVAL_S = 3600;
// Forced key frame interval - used to reduce color distortions on Qualcomm platforms.
private static final int QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_L_MS = 15000;
private static final int QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_M_MS = 20000;
@ -89,12 +94,7 @@ public class HardwareVideoEncoderFactory implements VideoEncoderFactory {
@Nullable
@Override
public VideoEncoder createEncoder(VideoCodecInfo input) {
// HW encoding is not supported below Android Kitkat.
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
return null;
}
VideoCodecMimeType type = VideoCodecMimeType.valueOf(input.name);
VideoCodecMimeType type = VideoCodecMimeType.valueOf(input.getName());
MediaCodecInfo info = findCodecForType(type);
if (info == null) {
@ -123,23 +123,18 @@ public class HardwareVideoEncoderFactory implements VideoEncoderFactory {
}
return new HardwareVideoEncoder(new MediaCodecWrapperFactoryImpl(), codecName, type,
surfaceColorFormat, yuvColorFormat, input.params, getKeyFrameIntervalSec(type),
surfaceColorFormat, yuvColorFormat, input.params, PERIODIC_KEY_FRAME_INTERVAL_S,
getForcedKeyFrameIntervalMs(type, codecName), createBitrateAdjuster(type, codecName),
sharedContext);
}
@Override
public VideoCodecInfo[] getSupportedCodecs() {
// HW encoding is not supported below Android Kitkat.
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
return new VideoCodecInfo[0];
}
List<VideoCodecInfo> supportedCodecInfos = new ArrayList<VideoCodecInfo>();
// Generate a list of supported codecs in order of preference:
// VP8, VP9, H264 (high profile), and H264 (baseline profile).
for (VideoCodecMimeType type : new VideoCodecMimeType[] {
VideoCodecMimeType.VP8, VideoCodecMimeType.VP9, VideoCodecMimeType.H264}) {
// VP8, VP9, H264 (high profile), H264 (baseline profile) and AV1.
for (VideoCodecMimeType type : new VideoCodecMimeType[] {VideoCodecMimeType.VP8,
VideoCodecMimeType.VP9, VideoCodecMimeType.H264, VideoCodecMimeType.AV1}) {
MediaCodecInfo codec = findCodecForType(type);
if (codec != null) {
String name = type.name();
@ -195,6 +190,10 @@ public class HardwareVideoEncoderFactory implements VideoEncoderFactory {
// Returns true if the given MediaCodecInfo indicates a hardware module that is supported on the
// current SDK.
private boolean isHardwareSupportedInCurrentSdk(MediaCodecInfo info, VideoCodecMimeType type) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
return info.isHardwareAccelerated();
}
switch (type) {
case VP8:
return isHardwareSupportedInCurrentSdkVp8(info);
@ -202,19 +201,20 @@ public class HardwareVideoEncoderFactory implements VideoEncoderFactory {
return isHardwareSupportedInCurrentSdkVp9(info);
case H264:
return isHardwareSupportedInCurrentSdkH264(info);
case AV1:
return false;
}
return false;
}
private boolean isHardwareSupportedInCurrentSdkVp8(MediaCodecInfo info) {
String name = info.getName();
// QCOM Vp8 encoder is supported in KITKAT or later.
return (name.startsWith(QCOM_PREFIX) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT)
// QCOM Vp8 encoder is always supported.
return name.startsWith(QCOM_PREFIX)
// Exynos VP8 encoder is supported in M or later.
|| (name.startsWith(EXYNOS_PREFIX) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.M)
// Intel Vp8 encoder is supported in LOLLIPOP or later, with the intel encoder enabled.
|| (name.startsWith(INTEL_PREFIX) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP
&& enableIntelVp8Encoder);
// Intel Vp8 encoder is always supported, with the intel encoder enabled.
|| (name.startsWith(INTEL_PREFIX) && enableIntelVp8Encoder);
}
private boolean isHardwareSupportedInCurrentSdkVp9(MediaCodecInfo info) {
@ -230,11 +230,8 @@ public class HardwareVideoEncoderFactory implements VideoEncoderFactory {
return false;
}
String name = info.getName();
// QCOM H264 encoder is supported in KITKAT or later.
return (name.startsWith(QCOM_PREFIX) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT)
// Exynos H264 encoder is supported in LOLLIPOP or later.
|| (name.startsWith(EXYNOS_PREFIX)
&& Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP);
// QCOM and Exynos H264 encoders are always supported.
return name.startsWith(QCOM_PREFIX) || name.startsWith(EXYNOS_PREFIX);
}
private boolean isMediaCodecAllowed(MediaCodecInfo info) {
@ -244,27 +241,15 @@ public class HardwareVideoEncoderFactory implements VideoEncoderFactory {
return codecAllowedPredicate.test(info);
}
private int getKeyFrameIntervalSec(VideoCodecMimeType type) {
switch (type) {
case VP8: // Fallthrough intended.
case VP9:
return 100;
case H264:
return 20;
}
throw new IllegalArgumentException("Unsupported VideoCodecMimeType " + type);
}
private int getForcedKeyFrameIntervalMs(VideoCodecMimeType type, String codecName) {
if (type == VideoCodecMimeType.VP8 && codecName.startsWith(QCOM_PREFIX)) {
if (Build.VERSION.SDK_INT == Build.VERSION_CODES.LOLLIPOP
|| Build.VERSION.SDK_INT == Build.VERSION_CODES.LOLLIPOP_MR1) {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) {
return QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_L_MS;
} else if (Build.VERSION.SDK_INT == Build.VERSION_CODES.M) {
return QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_M_MS;
} else if (Build.VERSION.SDK_INT > Build.VERSION_CODES.M) {
return QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_N_MS;
}
if (Build.VERSION.SDK_INT == Build.VERSION_CODES.M) {
return QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_M_MS;
}
return QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_N_MS;
}
// Other codecs don't need key frame forcing.
return 0;

View File

@ -10,7 +10,7 @@
package org.webrtc;
import android.support.annotation.Nullable;
import androidx.annotation.Nullable;
import java.util.Arrays;
import org.webrtc.PeerConnection;

View File

@ -0,0 +1,43 @@
/*
* Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
public final class IceCandidateErrorEvent {
/** The local IP address used to communicate with the STUN or TURN server. */
public final String address;
/** The port used to communicate with the STUN or TURN server. */
public final int port;
/**
* The STUN or TURN URL that identifies the STUN or TURN server for which the failure occurred.
*/
public final String url;
/**
* The numeric STUN error code returned by the STUN or TURN server. If no host candidate can reach
* the server, errorCode will be set to the value 701 which is outside the STUN error code range.
* This error is only fired once per server URL while in the RTCIceGatheringState of "gathering".
*/
public final int errorCode;
/**
* The STUN reason text returned by the STUN or TURN server. If the server could not be reached,
* errorText will be set to an implementation-specific value providing details about the error.
*/
public final String errorText;
@CalledByNative
public IceCandidateErrorEvent(
String address, int port, String url, int errorCode, String errorText) {
this.address = address;
this.port = port;
this.url = url;
this.errorCode = errorCode;
this.errorText = errorText;
}
}

View File

@ -10,7 +10,7 @@
package org.webrtc;
import android.support.annotation.Nullable;
import androidx.annotation.Nullable;
import java.nio.ByteBuffer;
import org.webrtc.VideoFrame.I420Buffer;

View File

@ -0,0 +1,25 @@
/*
* Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
public class LibaomAv1Encoder extends WrappedNativeVideoEncoder {
@Override
public long createNativeVideoEncoder() {
return nativeCreateEncoder();
}
static native long nativeCreateEncoder();
@Override
public boolean isHardwareEncoder() {
return false;
}
}

View File

@ -10,7 +10,7 @@
package org.webrtc;
import android.support.annotation.Nullable;
import androidx.annotation.Nullable;
import java.util.ArrayList;
import java.util.List;

View File

@ -10,7 +10,7 @@
package org.webrtc;
import android.support.annotation.Nullable;
import androidx.annotation.Nullable;
/** Java wrapper for a C++ MediaStreamTrackInterface. */
public class MediaStreamTrack {

View File

@ -18,12 +18,12 @@ import java.util.Map;
// Rtc histograms can be queried through the API, getAndReset().
// The returned map holds the name of a histogram and its samples.
//
// Example of |map| with one histogram:
// |name|: "WebRTC.Video.InputFramesPerSecond"
// |min|: 1
// |max|: 100
// |bucketCount|: 50
// |samples|: [30]:1
// Example of `map` with one histogram:
// `name`: "WebRTC.Video.InputFramesPerSecond"
// `min`: 1
// `max`: 100
// `bucketCount`: 50
// `samples`: [30]:1
//
// Most histograms are not updated frequently (e.g. most video metrics are an
// average over the call and recorded when a stream is removed).

View File

@ -0,0 +1,122 @@
/*
* Copyright 2020 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import androidx.annotation.Nullable;
import java.util.List;
/** Interface for detecting network changes */
public interface NetworkChangeDetector {
// java equivalent of c++ android_network_monitor.h / NetworkType.
public static enum ConnectionType {
CONNECTION_UNKNOWN,
CONNECTION_ETHERNET,
CONNECTION_WIFI,
CONNECTION_5G,
CONNECTION_4G,
CONNECTION_3G,
CONNECTION_2G,
CONNECTION_UNKNOWN_CELLULAR,
CONNECTION_BLUETOOTH,
CONNECTION_VPN,
CONNECTION_NONE
}
public static class IPAddress {
public final byte[] address;
public IPAddress(byte[] address) {
this.address = address;
}
@CalledByNative("IPAddress")
private byte[] getAddress() {
return address;
}
}
/** Java version of NetworkMonitor.NetworkInformation */
public static class NetworkInformation {
public final String name;
public final ConnectionType type;
// Used to specify the underlying network type if the type is CONNECTION_VPN.
public final ConnectionType underlyingTypeForVpn;
public final long handle;
public final IPAddress[] ipAddresses;
public NetworkInformation(String name, ConnectionType type, ConnectionType underlyingTypeForVpn,
long handle, IPAddress[] addresses) {
this.name = name;
this.type = type;
this.underlyingTypeForVpn = underlyingTypeForVpn;
this.handle = handle;
this.ipAddresses = addresses;
}
@CalledByNative("NetworkInformation")
private IPAddress[] getIpAddresses() {
return ipAddresses;
}
@CalledByNative("NetworkInformation")
private ConnectionType getConnectionType() {
return type;
}
@CalledByNative("NetworkInformation")
private ConnectionType getUnderlyingConnectionTypeForVpn() {
return underlyingTypeForVpn;
}
@CalledByNative("NetworkInformation")
private long getHandle() {
return handle;
}
@CalledByNative("NetworkInformation")
private String getName() {
return name;
}
};
/** Observer interface by which observer is notified of network changes. */
public static abstract class Observer {
/** Called when default network changes. */
public abstract void onConnectionTypeChanged(ConnectionType newConnectionType);
public abstract void onNetworkConnect(NetworkInformation networkInfo);
public abstract void onNetworkDisconnect(long networkHandle);
/**
* Called when network preference change for a (list of) connection type(s). (e.g WIFI) is
* `NOT_PREFERRED` or `NEUTRAL`.
*
* <p>note: `types` is a list of ConnectionTypes, so that all cellular types can be modified in
* one call.
*/
public abstract void onNetworkPreference(
List<ConnectionType> types, @NetworkPreference int preference);
// Add default impl. for down-stream tests.
public String getFieldTrialsString() {
return "";
}
}
public ConnectionType getCurrentConnectionType();
public boolean supportNetworkCallback();
@Nullable public List<NetworkInformation> getActiveNetworkList();
public void destroy();
}

View File

@ -0,0 +1,17 @@
/*
* Copyright 2020 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.content.Context;
public interface NetworkChangeDetectorFactory {
public NetworkChangeDetector create(NetworkChangeDetector.Observer observer, Context context);
}

View File

@ -10,14 +10,12 @@
package org.webrtc;
import static org.webrtc.NetworkMonitorAutoDetect.INVALID_NET_ID;
import android.content.Context;
import android.os.Build;
import android.support.annotation.Nullable;
import androidx.annotation.Nullable;
import java.util.ArrayList;
import java.util.List;
import org.webrtc.NetworkMonitorAutoDetect;
import org.webrtc.NetworkChangeDetector;
/**
* Borrowed from Chromium's
@ -32,7 +30,7 @@ public class NetworkMonitor {
* Alerted when the connection type of the network changes. The alert is fired on the UI thread.
*/
public interface NetworkObserver {
public void onConnectionTypeChanged(NetworkMonitorAutoDetect.ConnectionType connectionType);
public void onConnectionTypeChanged(NetworkChangeDetector.ConnectionType connectionType);
}
private static final String TAG = "NetworkMonitor";
@ -43,24 +41,43 @@ public class NetworkMonitor {
static final NetworkMonitor instance = new NetworkMonitor();
}
// Factory for creating NetworkChangeDetector.
private NetworkChangeDetectorFactory networkChangeDetectorFactory =
new NetworkChangeDetectorFactory() {
@Override
public NetworkChangeDetector create(
NetworkChangeDetector.Observer observer, Context context) {
return new NetworkMonitorAutoDetect(observer, context);
}
};
// Native observers of the connection type changes.
private final ArrayList<Long> nativeNetworkObservers;
// Java observers of the connection type changes.
private final ArrayList<NetworkObserver> networkObservers;
private final Object autoDetectLock = new Object();
private final Object networkChangeDetectorLock = new Object();
// Object that detects the connection type changes and brings up mobile networks.
@Nullable private NetworkMonitorAutoDetect autoDetect;
@Nullable private NetworkChangeDetector networkChangeDetector;
// Also guarded by autoDetectLock.
private int numObservers;
private volatile NetworkMonitorAutoDetect.ConnectionType currentConnectionType;
private volatile NetworkChangeDetector.ConnectionType currentConnectionType;
private NetworkMonitor() {
nativeNetworkObservers = new ArrayList<Long>();
networkObservers = new ArrayList<NetworkObserver>();
numObservers = 0;
currentConnectionType = NetworkMonitorAutoDetect.ConnectionType.CONNECTION_UNKNOWN;
currentConnectionType = NetworkChangeDetector.ConnectionType.CONNECTION_UNKNOWN;
}
/**
* Set the factory that will be used to create the network change detector.
* Needs to be called before the monitoring is starts.
*/
public void setNetworkChangeDetectorFactory(NetworkChangeDetectorFactory factory) {
assertIsTrue(numObservers == 0);
this.networkChangeDetectorFactory = factory;
}
// TODO(sakal): Remove once downstream dependencies have been updated.
@ -84,21 +101,26 @@ public class NetworkMonitor {
* multi-networking. This requires the embedding app have the platform ACCESS_NETWORK_STATE and
* CHANGE_NETWORK_STATE permission.
*/
public void startMonitoring(Context applicationContext) {
synchronized (autoDetectLock) {
public void startMonitoring(Context applicationContext, String fieldTrialsString) {
synchronized (networkChangeDetectorLock) {
++numObservers;
if (autoDetect == null) {
autoDetect = createAutoDetect(applicationContext);
if (networkChangeDetector == null) {
networkChangeDetector = createNetworkChangeDetector(applicationContext, fieldTrialsString);
}
currentConnectionType =
NetworkMonitorAutoDetect.getConnectionType(autoDetect.getCurrentNetworkState());
currentConnectionType = networkChangeDetector.getCurrentConnectionType();
}
}
/** Deprecated, use startMonitoring with fieldTrialsStringString argument. */
@Deprecated
public void startMonitoring(Context applicationContext) {
startMonitoring(applicationContext, "");
}
/** Deprecated, pass in application context in startMonitoring instead. */
@Deprecated
public void startMonitoring() {
startMonitoring(ContextUtils.getApplicationContext());
startMonitoring(ContextUtils.getApplicationContext(), "");
}
/**
@ -107,27 +129,35 @@ public class NetworkMonitor {
* CHANGE_NETWORK_STATE permission.
*/
@CalledByNative
private void startMonitoring(@Nullable Context applicationContext, long nativeObserver) {
Logging.d(TAG, "Start monitoring with native observer " + nativeObserver);
private void startMonitoring(
@Nullable Context applicationContext, long nativeObserver, String fieldTrialsString) {
Logging.d(TAG,
"Start monitoring with native observer " + nativeObserver
+ " fieldTrialsString: " + fieldTrialsString);
startMonitoring(
applicationContext != null ? applicationContext : ContextUtils.getApplicationContext());
// The native observers expect a network list update after they call startMonitoring.
applicationContext != null ? applicationContext : ContextUtils.getApplicationContext(),
fieldTrialsString);
synchronized (nativeNetworkObservers) {
nativeNetworkObservers.add(nativeObserver);
}
// The native observer expects a network list update after startMonitoring.
updateObserverActiveNetworkList(nativeObserver);
// currentConnectionType was updated in startMonitoring().
// Need to notify the native observers here.
notifyObserversOfConnectionTypeChange(currentConnectionType);
}
/** Stop network monitoring. If no one is monitoring networks, destroy and reset autoDetect. */
/**
* Stop network monitoring. If no one is monitoring networks, destroy and reset
* networkChangeDetector.
*/
public void stopMonitoring() {
synchronized (autoDetectLock) {
synchronized (networkChangeDetectorLock) {
if (--numObservers == 0) {
autoDetect.destroy();
autoDetect = null;
networkChangeDetector.destroy();
networkChangeDetector = null;
}
}
}
@ -144,8 +174,8 @@ public class NetworkMonitor {
// Returns true if network binding is supported on this platform.
@CalledByNative
private boolean networkBindingSupported() {
synchronized (autoDetectLock) {
return autoDetect != null && autoDetect.supportNetworkCallback();
synchronized (networkChangeDetectorLock) {
return networkChangeDetector != null && networkChangeDetector.supportNetworkCallback();
}
}
@ -154,27 +184,20 @@ public class NetworkMonitor {
return Build.VERSION.SDK_INT;
}
private NetworkMonitorAutoDetect.ConnectionType getCurrentConnectionType() {
private NetworkChangeDetector.ConnectionType getCurrentConnectionType() {
return currentConnectionType;
}
private long getCurrentDefaultNetId() {
synchronized (autoDetectLock) {
return autoDetect == null ? INVALID_NET_ID : autoDetect.getDefaultNetId();
}
}
private NetworkMonitorAutoDetect createAutoDetect(Context appContext) {
return new NetworkMonitorAutoDetect(new NetworkMonitorAutoDetect.Observer() {
private NetworkChangeDetector createNetworkChangeDetector(
Context appContext, String fieldTrialsString) {
return networkChangeDetectorFactory.create(new NetworkChangeDetector.Observer() {
@Override
public void onConnectionTypeChanged(
NetworkMonitorAutoDetect.ConnectionType newConnectionType) {
public void onConnectionTypeChanged(NetworkChangeDetector.ConnectionType newConnectionType) {
updateCurrentConnectionType(newConnectionType);
}
@Override
public void onNetworkConnect(NetworkMonitorAutoDetect.NetworkInformation networkInfo) {
public void onNetworkConnect(NetworkChangeDetector.NetworkInformation networkInfo) {
notifyObserversOfNetworkConnect(networkInfo);
}
@ -182,18 +205,28 @@ public class NetworkMonitor {
public void onNetworkDisconnect(long networkHandle) {
notifyObserversOfNetworkDisconnect(networkHandle);
}
@Override
public void onNetworkPreference(
List<NetworkChangeDetector.ConnectionType> types, int preference) {
notifyObserversOfNetworkPreference(types, preference);
}
@Override
public String getFieldTrialsString() {
return fieldTrialsString;
}
}, appContext);
}
private void updateCurrentConnectionType(
NetworkMonitorAutoDetect.ConnectionType newConnectionType) {
private void updateCurrentConnectionType(NetworkChangeDetector.ConnectionType newConnectionType) {
currentConnectionType = newConnectionType;
notifyObserversOfConnectionTypeChange(newConnectionType);
}
/** Alerts all observers of a connection change. */
private void notifyObserversOfConnectionTypeChange(
NetworkMonitorAutoDetect.ConnectionType newConnectionType) {
NetworkChangeDetector.ConnectionType newConnectionType) {
List<Long> nativeObservers = getNativeNetworkObserversSync();
for (Long nativeObserver : nativeObservers) {
nativeNotifyConnectionTypeChanged(nativeObserver);
@ -209,7 +242,7 @@ public class NetworkMonitor {
}
private void notifyObserversOfNetworkConnect(
NetworkMonitorAutoDetect.NetworkInformation networkInfo) {
NetworkChangeDetector.NetworkInformation networkInfo) {
List<Long> nativeObservers = getNativeNetworkObserversSync();
for (Long nativeObserver : nativeObservers) {
nativeNotifyOfNetworkConnect(nativeObserver, networkInfo);
@ -223,17 +256,28 @@ public class NetworkMonitor {
}
}
private void updateObserverActiveNetworkList(long nativeObserver) {
List<NetworkMonitorAutoDetect.NetworkInformation> networkInfoList;
synchronized (autoDetectLock) {
networkInfoList = (autoDetect == null) ? null : autoDetect.getActiveNetworkList();
private void notifyObserversOfNetworkPreference(
List<NetworkChangeDetector.ConnectionType> types, int preference) {
List<Long> nativeObservers = getNativeNetworkObserversSync();
for (NetworkChangeDetector.ConnectionType type : types) {
for (Long nativeObserver : nativeObservers) {
nativeNotifyOfNetworkPreference(nativeObserver, type, preference);
}
}
if (networkInfoList == null || networkInfoList.size() == 0) {
}
private void updateObserverActiveNetworkList(long nativeObserver) {
List<NetworkChangeDetector.NetworkInformation> networkInfoList;
synchronized (networkChangeDetectorLock) {
networkInfoList =
(networkChangeDetector == null) ? null : networkChangeDetector.getActiveNetworkList();
}
if (networkInfoList == null) {
return;
}
NetworkMonitorAutoDetect.NetworkInformation[] networkInfos =
new NetworkMonitorAutoDetect.NetworkInformation[networkInfoList.size()];
NetworkChangeDetector.NetworkInformation[] networkInfos =
new NetworkChangeDetector.NetworkInformation[networkInfoList.size()];
networkInfos = networkInfoList.toArray(networkInfos);
nativeNotifyOfActiveNetworkList(nativeObserver, networkInfos);
}
@ -278,38 +322,46 @@ public class NetworkMonitor {
/** Checks if there currently is connectivity. */
public static boolean isOnline() {
NetworkMonitorAutoDetect.ConnectionType connectionType =
getInstance().getCurrentConnectionType();
return connectionType != NetworkMonitorAutoDetect.ConnectionType.CONNECTION_NONE;
NetworkChangeDetector.ConnectionType connectionType = getInstance().getCurrentConnectionType();
return connectionType != NetworkChangeDetector.ConnectionType.CONNECTION_NONE;
}
private native void nativeNotifyConnectionTypeChanged(long nativeAndroidNetworkMonitor);
private native void nativeNotifyOfNetworkConnect(
long nativeAndroidNetworkMonitor, NetworkMonitorAutoDetect.NetworkInformation networkInfo);
long nativeAndroidNetworkMonitor, NetworkChangeDetector.NetworkInformation networkInfo);
private native void nativeNotifyOfNetworkDisconnect(
long nativeAndroidNetworkMonitor, long networkHandle);
private native void nativeNotifyOfActiveNetworkList(
long nativeAndroidNetworkMonitor, NetworkMonitorAutoDetect.NetworkInformation[] networkInfos);
long nativeAndroidNetworkMonitor, NetworkChangeDetector.NetworkInformation[] networkInfos);
private native void nativeNotifyOfNetworkPreference(
long nativeAndroidNetworkMonitor, NetworkChangeDetector.ConnectionType type, int preference);
// For testing only.
@Nullable
NetworkMonitorAutoDetect getNetworkMonitorAutoDetect() {
synchronized (autoDetectLock) {
return autoDetect;
NetworkChangeDetector getNetworkChangeDetector() {
synchronized (networkChangeDetectorLock) {
return networkChangeDetector;
}
}
// For testing only.
int getNumObservers() {
synchronized (autoDetectLock) {
synchronized (networkChangeDetectorLock) {
return numObservers;
}
}
// For testing only.
static NetworkMonitorAutoDetect createAndSetAutoDetectForTest(Context context) {
static NetworkMonitorAutoDetect createAndSetAutoDetectForTest(
Context context, String fieldTrialsString) {
NetworkMonitor networkMonitor = getInstance();
NetworkMonitorAutoDetect autoDetect = networkMonitor.createAutoDetect(context);
return networkMonitor.autoDetect = autoDetect;
NetworkChangeDetector networkChangeDetector =
networkMonitor.createNetworkChangeDetector(context, fieldTrialsString);
networkMonitor.networkChangeDetector = networkChangeDetector;
return (NetworkMonitorAutoDetect) networkChangeDetector;
}
}

View File

@ -28,93 +28,28 @@ import android.net.wifi.WifiManager;
import android.net.wifi.p2p.WifiP2pGroup;
import android.net.wifi.p2p.WifiP2pManager;
import android.os.Build;
import android.support.annotation.Nullable;
import android.telephony.TelephonyManager;
import androidx.annotation.GuardedBy;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.VisibleForTesting;
import java.net.InetAddress;
import java.net.NetworkInterface;
import java.net.SocketException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* Borrowed from Chromium's
* src/net/android/java/src/org/chromium/net/NetworkChangeNotifierAutoDetect.java
*
* Used by the NetworkMonitor to listen to platform changes in connectivity.
* Note that use of this class requires that the app have the platform
* ACCESS_NETWORK_STATE permission.
* <p>Used by the NetworkMonitor to listen to platform changes in connectivity. Note that use of
* this class requires that the app have the platform ACCESS_NETWORK_STATE permission.
*/
public class NetworkMonitorAutoDetect extends BroadcastReceiver {
public static enum ConnectionType {
CONNECTION_UNKNOWN,
CONNECTION_ETHERNET,
CONNECTION_WIFI,
CONNECTION_5G,
CONNECTION_4G,
CONNECTION_3G,
CONNECTION_2G,
CONNECTION_UNKNOWN_CELLULAR,
CONNECTION_BLUETOOTH,
CONNECTION_VPN,
CONNECTION_NONE
}
public static class IPAddress {
public final byte[] address;
public IPAddress(byte[] address) {
this.address = address;
}
@CalledByNative("IPAddress")
private byte[] getAddress() {
return address;
}
}
/** Java version of NetworkMonitor.NetworkInformation */
public static class NetworkInformation {
public final String name;
public final ConnectionType type;
// Used to specify the underlying network type if the type is CONNECTION_VPN.
public final ConnectionType underlyingTypeForVpn;
public final long handle;
public final IPAddress[] ipAddresses;
public NetworkInformation(String name, ConnectionType type, ConnectionType underlyingTypeForVpn,
long handle, IPAddress[] addresses) {
this.name = name;
this.type = type;
this.underlyingTypeForVpn = underlyingTypeForVpn;
this.handle = handle;
this.ipAddresses = addresses;
}
@CalledByNative("NetworkInformation")
private IPAddress[] getIpAddresses() {
return ipAddresses;
}
@CalledByNative("NetworkInformation")
private ConnectionType getConnectionType() {
return type;
}
@CalledByNative("NetworkInformation")
private ConnectionType getUnderlyingConnectionTypeForVpn() {
return underlyingTypeForVpn;
}
@CalledByNative("NetworkInformation")
private long getHandle() {
return handle;
}
@CalledByNative("NetworkInformation")
private String getName() {
return name;
}
};
public class NetworkMonitorAutoDetect extends BroadcastReceiver implements NetworkChangeDetector {
static class NetworkState {
private final boolean connected;
// Defined from ConnectivityManager.TYPE_XXX for non-mobile; for mobile, it is
@ -157,16 +92,26 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
return underlyingNetworkSubtypeForVpn;
}
}
/**
* The methods in this class get called when the network changes if the callback
* is registered with a proper network request. It is only available in Android Lollipop
* and above.
*/
@SuppressLint("NewApi")
private class SimpleNetworkCallback extends NetworkCallback {
@VisibleForTesting()
class SimpleNetworkCallback extends NetworkCallback {
@GuardedBy("availableNetworks") final Set<Network> availableNetworks;
SimpleNetworkCallback(Set<Network> availableNetworks) {
this.availableNetworks = availableNetworks;
}
@Override
public void onAvailable(Network network) {
Logging.d(TAG, "Network becomes available: " + network.toString());
Logging.d(TAG,
"Network"
+ " handle: " + networkToNetId(network)
+ " becomes available: " + network.toString());
synchronized (availableNetworks) {
availableNetworks.add(network);
}
onNetworkChanged(network);
}
@ -174,7 +119,9 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
public void onCapabilitiesChanged(Network network, NetworkCapabilities networkCapabilities) {
// A capabilities change may indicate the ConnectionType has changed,
// so forward the new NetworkInformation along to the observer.
Logging.d(TAG, "capabilities changed: " + networkCapabilities.toString());
Logging.d(TAG,
"handle: " + networkToNetId(network)
+ " capabilities changed: " + networkCapabilities.toString());
onNetworkChanged(network);
}
@ -182,7 +129,10 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
public void onLinkPropertiesChanged(Network network, LinkProperties linkProperties) {
// A link property change may indicate the IP address changes.
// so forward the new NetworkInformation to the observer.
Logging.d(TAG, "link properties changed: " + linkProperties.toString());
//
// linkProperties.toString() has PII that cannot be redacted
// very reliably, so do not include in log.
Logging.d(TAG, "handle: " + networkToNetId(network) + " link properties changed");
onNetworkChanged(network);
}
@ -190,13 +140,22 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
public void onLosing(Network network, int maxMsToLive) {
// Tell the network is going to lose in MaxMsToLive milliseconds.
// We may use this signal later.
Logging.d(
TAG, "Network " + network.toString() + " is about to lose in " + maxMsToLive + "ms");
Logging.d(TAG,
"Network"
+ " handle: " + networkToNetId(network) + ", " + network.toString()
+ " is about to lose in " + maxMsToLive + "ms");
}
@Override
public void onLost(Network network) {
Logging.d(TAG, "Network " + network.toString() + " is disconnected");
Logging.d(TAG,
"Network"
+ " handle: " + networkToNetId(network) + ", " + network.toString()
+ " is disconnected");
synchronized (availableNetworks) {
availableNetworks.remove(network);
}
observer.onNetworkDisconnect(networkToNetId(network));
}
@ -216,15 +175,43 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
*/
@Nullable private final ConnectivityManager connectivityManager;
ConnectivityManagerDelegate(Context context) {
connectivityManager =
(ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE);
/**
* Note: The availableNetworks set is instantiated in NetworkMonitorAutoDetect
* and the instance is mutated by SimpleNetworkCallback.
*/
@NonNull @GuardedBy("availableNetworks") private final Set<Network> availableNetworks;
/** field trials */
private final boolean getAllNetworksFromCache;
private final boolean requestVPN;
private final boolean includeOtherUidNetworks;
ConnectivityManagerDelegate(
Context context, Set<Network> availableNetworks, String fieldTrialsString) {
this((ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE),
availableNetworks, fieldTrialsString);
}
// For testing.
ConnectivityManagerDelegate() {
// All the methods below should be overridden.
connectivityManager = null;
@VisibleForTesting
ConnectivityManagerDelegate(ConnectivityManager connectivityManager,
Set<Network> availableNetworks, String fieldTrialsString) {
this.connectivityManager = connectivityManager;
this.availableNetworks = availableNetworks;
this.getAllNetworksFromCache =
checkFieldTrial(fieldTrialsString, "getAllNetworksFromCache", false);
this.requestVPN = checkFieldTrial(fieldTrialsString, "requestVPN", false);
this.includeOtherUidNetworks =
checkFieldTrial(fieldTrialsString, "includeOtherUidNetworks", false);
}
private static boolean checkFieldTrial(
String fieldTrialsString, String key, boolean defaultValue) {
if (fieldTrialsString.contains(key + ":true")) {
return true;
} else if (fieldTrialsString.contains(key + ":false")) {
return false;
}
return defaultValue;
}
/**
@ -239,7 +226,7 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
}
/**
* Returns connection type and status information about |network|.
* Returns connection type and status information about `network`.
* Only callable on Lollipop and newer releases.
*/
@SuppressLint("NewApi")
@ -253,9 +240,9 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
return new NetworkState(false, -1, -1, -1, -1);
}
// The general logic of handling a VPN in this method is as follows. getNetworkInfo will
// return the info of the network with the same id as in |network| when it is registered via
// ConnectivityManager.registerNetworkAgent in Android. |networkInfo| may or may not indicate
// the type TYPE_VPN if |network| is a VPN. To reliably detect the VPN interface, we need to
// return the info of the network with the same id as in `network` when it is registered via
// ConnectivityManager.registerNetworkAgent in Android. `networkInfo` may or may not indicate
// the type TYPE_VPN if `network` is a VPN. To reliably detect the VPN interface, we need to
// query the network capability as below in the case when networkInfo.getType() is not
// TYPE_VPN. On the other hand when networkInfo.getType() is TYPE_VPN, the only solution so
// far to obtain the underlying network information is to query the active network interface.
@ -265,7 +252,7 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
// getActiveNetworkInfo may thus give the wrong interface information, and one should note
// that getActiveNetworkInfo would return the default network interface if the VPN does not
// specify its underlying networks in the implementation. Therefore, we need further compare
// |network| to the active network. If they are not the same network, we will have to fall
// `network` to the active network. If they are not the same network, we will have to fall
// back to report an unknown network.
if (networkInfo.getType() != ConnectivityManager.TYPE_VPN) {
@ -276,15 +263,15 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
|| !networkCapabilities.hasTransport(NetworkCapabilities.TRANSPORT_VPN)) {
return getNetworkState(networkInfo);
}
// When |network| is in fact a VPN after querying its capability but |networkInfo| is not of
// type TYPE_VPN, |networkInfo| contains the info for the underlying network, and we return
// When `network` is in fact a VPN after querying its capability but `networkInfo` is not of
// type TYPE_VPN, `networkInfo` contains the info for the underlying network, and we return
// a NetworkState constructed from it.
return new NetworkState(networkInfo.isConnected(), ConnectivityManager.TYPE_VPN, -1,
networkInfo.getType(), networkInfo.getSubtype());
}
// When |networkInfo| is of type TYPE_VPN, which implies |network| is a VPN, we return the
// NetworkState of the active network via getActiveNetworkInfo(), if |network| is the active
// When `networkInfo` is of type TYPE_VPN, which implies `network` is a VPN, we return the
// NetworkState of the active network via getActiveNetworkInfo(), if `network` is the active
// network that supports the VPN. Otherwise, NetworkState of an unknown network with type -1
// will be returned.
//
@ -332,6 +319,13 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
if (connectivityManager == null) {
return new Network[0];
}
if (supportNetworkCallback() && getAllNetworksFromCache) {
synchronized (availableNetworks) {
return availableNetworks.toArray(new Network[0]);
}
}
return connectivityManager.getAllNetworks();
}
@ -410,8 +404,8 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
}
NetworkState networkState = getNetworkState(network);
ConnectionType connectionType = getConnectionType(networkState);
if (connectionType == ConnectionType.CONNECTION_NONE) {
NetworkChangeDetector.ConnectionType connectionType = getConnectionType(networkState);
if (connectionType == NetworkChangeDetector.ConnectionType.CONNECTION_NONE) {
// This may not be an error. The OS may signal a network event with connection type
// NONE when the network disconnects.
Logging.d(TAG, "Network " + network.toString() + " is disconnected");
@ -420,13 +414,14 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
// Some android device may return a CONNECTION_UNKNOWN_CELLULAR or CONNECTION_UNKNOWN type,
// which appears to be usable. Just log them here.
if (connectionType == ConnectionType.CONNECTION_UNKNOWN
|| connectionType == ConnectionType.CONNECTION_UNKNOWN_CELLULAR) {
if (connectionType == NetworkChangeDetector.ConnectionType.CONNECTION_UNKNOWN
|| connectionType == NetworkChangeDetector.ConnectionType.CONNECTION_UNKNOWN_CELLULAR) {
Logging.d(TAG, "Network " + network.toString() + " connection type is " + connectionType
+ " because it has type " + networkState.getNetworkType() + " and subtype "
+ networkState.getNetworkSubType());
}
// ConnectionType.CONNECTION_UNKNOWN if the network is not a VPN or the underlying network is
// NetworkChangeDetector.ConnectionType.CONNECTION_UNKNOWN if the network is not a VPN or the
// underlying network is
// unknown.
ConnectionType underlyingConnectionTypeForVpn =
getUnderlyingConnectionTypeForVpn(networkState);
@ -451,14 +446,26 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
&& capabilities.hasCapability(NetworkCapabilities.NET_CAPABILITY_INTERNET);
}
@SuppressLint("NewApi")
@VisibleForTesting()
NetworkRequest createNetworkRequest() {
// Requests the following capabilities by default: NOT_VPN, NOT_RESTRICTED, TRUSTED
NetworkRequest.Builder builder =
new NetworkRequest.Builder().addCapability(NetworkCapabilities.NET_CAPABILITY_INTERNET);
if (requestVPN) {
builder.removeCapability(NetworkCapabilities.NET_CAPABILITY_NOT_VPN);
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S && includeOtherUidNetworks) {
builder.setIncludeOtherUidNetworks(true);
}
return builder.build();
}
/** Only callable on Lollipop and newer releases. */
@SuppressLint("NewApi")
public void registerNetworkCallback(NetworkCallback networkCallback) {
connectivityManager.registerNetworkCallback(
new NetworkRequest.Builder()
.addCapability(NetworkCapabilities.NET_CAPABILITY_INTERNET)
.build(),
networkCallback);
connectivityManager.registerNetworkCallback(createNetworkRequest(), networkCallback);
}
/** Only callable on Lollipop and newer releases. */
@ -490,7 +497,7 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
}
public boolean supportNetworkCallback() {
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP && connectivityManager != null;
return connectivityManager != null;
}
}
@ -529,12 +536,12 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
// (NETWORK_UNSPECIFIED) for these addresses.
private static final int WIFI_P2P_NETWORK_HANDLE = 0;
private final Context context;
private final Observer observer;
private final NetworkChangeDetector.Observer observer;
// Network information about a WifiP2p (aka WiFi-Direct) network, or null if no such network is
// connected.
@Nullable private NetworkInformation wifiP2pNetworkInfo;
WifiDirectManagerDelegate(Observer observer, Context context) {
WifiDirectManagerDelegate(NetworkChangeDetector.Observer observer, Context context) {
this.context = context;
this.observer = observer;
IntentFilter intentFilter = new IntentFilter();
@ -599,9 +606,10 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
ipAddresses[i] = new IPAddress(interfaceAddresses.get(i).getAddress());
}
wifiP2pNetworkInfo =
new NetworkInformation(wifiP2pGroup.getInterface(), ConnectionType.CONNECTION_WIFI,
ConnectionType.CONNECTION_NONE, WIFI_P2P_NETWORK_HANDLE, ipAddresses);
wifiP2pNetworkInfo = new NetworkInformation(wifiP2pGroup.getInterface(),
NetworkChangeDetector.ConnectionType.CONNECTION_WIFI,
NetworkChangeDetector.ConnectionType.CONNECTION_NONE, WIFI_P2P_NETWORK_HANDLE,
ipAddresses);
observer.onNetworkConnect(wifiP2pNetworkInfo);
}
@ -614,11 +622,11 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
}
}
static final long INVALID_NET_ID = -1;
private static final long INVALID_NET_ID = -1;
private static final String TAG = "NetworkMonitorAutoDetect";
// Observer for the connection type change.
private final Observer observer;
private final NetworkChangeDetector.Observer observer;
private final IntentFilter intentFilter;
private final Context context;
// Used to request mobile network. It does not do anything except for keeping
@ -630,31 +638,22 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
private ConnectivityManagerDelegate connectivityManagerDelegate;
private WifiManagerDelegate wifiManagerDelegate;
private WifiDirectManagerDelegate wifiDirectManagerDelegate;
private static boolean includeWifiDirect;
@GuardedBy("availableNetworks") final Set<Network> availableNetworks = new HashSet<>();
private boolean isRegistered;
private ConnectionType connectionType;
private NetworkChangeDetector.ConnectionType connectionType;
private String wifiSSID;
/**
* Observer interface by which observer is notified of network changes.
*/
public static interface Observer {
/**
* Called when default network changes.
*/
public void onConnectionTypeChanged(ConnectionType newConnectionType);
public void onNetworkConnect(NetworkInformation networkInfo);
public void onNetworkDisconnect(long networkHandle);
}
/**
* Constructs a NetworkMonitorAutoDetect. Should only be called on UI thread.
*/
/** Constructs a NetworkMonitorAutoDetect. Should only be called on UI thread. */
@SuppressLint("NewApi")
public NetworkMonitorAutoDetect(Observer observer, Context context) {
public NetworkMonitorAutoDetect(NetworkChangeDetector.Observer observer, Context context) {
this.observer = observer;
this.context = context;
connectivityManagerDelegate = new ConnectivityManagerDelegate(context);
String fieldTrialsString = observer.getFieldTrialsString();
connectivityManagerDelegate =
new ConnectivityManagerDelegate(context, availableNetworks, fieldTrialsString);
wifiManagerDelegate = new WifiManagerDelegate(context);
final NetworkState networkState = connectivityManagerDelegate.getNetworkState();
@ -662,7 +661,7 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
wifiSSID = getWifiSSID(networkState);
intentFilter = new IntentFilter(ConnectivityManager.CONNECTIVITY_ACTION);
if (PeerConnectionFactory.fieldTrialsFindFullName("IncludeWifiDirect").equals("Enabled")) {
if (includeWifiDirect) {
wifiDirectManagerDelegate = new WifiDirectManagerDelegate(observer, context);
}
@ -678,7 +677,7 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
tempNetworkCallback = null;
}
mobileNetworkCallback = tempNetworkCallback;
allNetworkCallback = new SimpleNetworkCallback();
allNetworkCallback = new SimpleNetworkCallback(availableNetworks);
connectivityManagerDelegate.registerNetworkCallback(allNetworkCallback);
} else {
mobileNetworkCallback = null;
@ -686,6 +685,12 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
}
}
/** Enables WifiDirectManager. */
public static void setIncludeWifiDirect(boolean enable) {
includeWifiDirect = enable;
}
@Override
public boolean supportNetworkCallback() {
return connectivityManagerDelegate.supportNetworkCallback();
}
@ -712,8 +717,9 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
return isRegistered;
}
@Override
@Nullable
List<NetworkInformation> getActiveNetworkList() {
public List<NetworkInformation> getActiveNetworkList() {
List<NetworkInformation> connectivityManagerList =
connectivityManagerDelegate.getActiveNetworkList();
if (connectivityManagerList == null) {
@ -727,6 +733,7 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
return result;
}
@Override
public void destroy() {
if (allNetworkCallback != null) {
connectivityManagerDelegate.releaseCallback(allNetworkCallback);
@ -776,22 +783,24 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
return connectivityManagerDelegate.getDefaultNetId();
}
private static ConnectionType getConnectionType(
private static NetworkChangeDetector.ConnectionType getConnectionType(
boolean isConnected, int networkType, int networkSubtype) {
if (!isConnected) {
return ConnectionType.CONNECTION_NONE;
return NetworkChangeDetector.ConnectionType.CONNECTION_NONE;
}
switch (networkType) {
case ConnectivityManager.TYPE_ETHERNET:
return ConnectionType.CONNECTION_ETHERNET;
return NetworkChangeDetector.ConnectionType.CONNECTION_ETHERNET;
case ConnectivityManager.TYPE_WIFI:
return ConnectionType.CONNECTION_WIFI;
return NetworkChangeDetector.ConnectionType.CONNECTION_WIFI;
case ConnectivityManager.TYPE_WIMAX:
return ConnectionType.CONNECTION_4G;
return NetworkChangeDetector.ConnectionType.CONNECTION_4G;
case ConnectivityManager.TYPE_BLUETOOTH:
return ConnectionType.CONNECTION_BLUETOOTH;
return NetworkChangeDetector.ConnectionType.CONNECTION_BLUETOOTH;
case ConnectivityManager.TYPE_MOBILE:
case ConnectivityManager.TYPE_MOBILE_DUN:
case ConnectivityManager.TYPE_MOBILE_HIPRI:
// Use information from TelephonyManager to classify the connection.
switch (networkSubtype) {
case TelephonyManager.NETWORK_TYPE_GPRS:
@ -800,7 +809,7 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
case TelephonyManager.NETWORK_TYPE_1xRTT:
case TelephonyManager.NETWORK_TYPE_IDEN:
case TelephonyManager.NETWORK_TYPE_GSM:
return ConnectionType.CONNECTION_2G;
return NetworkChangeDetector.ConnectionType.CONNECTION_2G;
case TelephonyManager.NETWORK_TYPE_UMTS:
case TelephonyManager.NETWORK_TYPE_EVDO_0:
case TelephonyManager.NETWORK_TYPE_EVDO_A:
@ -811,30 +820,36 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
case TelephonyManager.NETWORK_TYPE_EHRPD:
case TelephonyManager.NETWORK_TYPE_HSPAP:
case TelephonyManager.NETWORK_TYPE_TD_SCDMA:
return ConnectionType.CONNECTION_3G;
return NetworkChangeDetector.ConnectionType.CONNECTION_3G;
case TelephonyManager.NETWORK_TYPE_LTE:
case TelephonyManager.NETWORK_TYPE_IWLAN:
return ConnectionType.CONNECTION_4G;
return NetworkChangeDetector.ConnectionType.CONNECTION_4G;
case TelephonyManager.NETWORK_TYPE_NR:
return ConnectionType.CONNECTION_5G;
return NetworkChangeDetector.ConnectionType.CONNECTION_5G;
default:
return ConnectionType.CONNECTION_UNKNOWN_CELLULAR;
return NetworkChangeDetector.ConnectionType.CONNECTION_UNKNOWN_CELLULAR;
}
case ConnectivityManager.TYPE_VPN:
return ConnectionType.CONNECTION_VPN;
return NetworkChangeDetector.ConnectionType.CONNECTION_VPN;
default:
return ConnectionType.CONNECTION_UNKNOWN;
return NetworkChangeDetector.ConnectionType.CONNECTION_UNKNOWN;
}
}
public static ConnectionType getConnectionType(NetworkState networkState) {
public static NetworkChangeDetector.ConnectionType getConnectionType(NetworkState networkState) {
return getConnectionType(networkState.isConnected(), networkState.getNetworkType(),
networkState.getNetworkSubType());
}
private static ConnectionType getUnderlyingConnectionTypeForVpn(NetworkState networkState) {
@Override
public NetworkChangeDetector.ConnectionType getCurrentConnectionType() {
return getConnectionType(getCurrentNetworkState());
}
private static NetworkChangeDetector.ConnectionType getUnderlyingConnectionTypeForVpn(
NetworkState networkState) {
if (networkState.getNetworkType() != ConnectivityManager.TYPE_VPN) {
return ConnectionType.CONNECTION_NONE;
return NetworkChangeDetector.ConnectionType.CONNECTION_NONE;
}
return getConnectionType(networkState.isConnected(),
networkState.getUnderlyingNetworkTypeForVpn(),
@ -842,7 +857,7 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
}
private String getWifiSSID(NetworkState networkState) {
if (getConnectionType(networkState) != ConnectionType.CONNECTION_WIFI)
if (getConnectionType(networkState) != NetworkChangeDetector.ConnectionType.CONNECTION_WIFI)
return "";
return wifiManagerDelegate.getWifiSSID();
}
@ -857,7 +872,7 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
}
private void connectionTypeChanged(NetworkState networkState) {
ConnectionType newConnectionType = getConnectionType(networkState);
NetworkChangeDetector.ConnectionType newConnectionType = getConnectionType(networkState);
String newWifiSSID = getWifiSSID(networkState);
if (newConnectionType == connectionType && newWifiSSID.equals(wifiSSID))
return;

View File

@ -1,3 +1,3 @@
per-file Camera*=sakal@webrtc.org
per-file Histogram.java=sakal@webrtc.org
per-file Metrics.java=sakal@webrtc.org
per-file Camera*=xalep@webrtc.org
per-file Histogram.java=xalep@webrtc.org
per-file Metrics.java=xalep@webrtc.org

View File

@ -10,7 +10,7 @@
package org.webrtc;
import android.support.annotation.Nullable;
import androidx.annotation.Nullable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
@ -118,6 +118,9 @@ public class PeerConnection {
/** Triggered when a new ICE candidate has been found. */
@CalledByNative("Observer") void onIceCandidate(IceCandidate candidate);
/** Triggered when gathering of an ICE candidate failed. */
default @CalledByNative("Observer") void onIceCandidateError(IceCandidateErrorEvent event) {}
/** Triggered when some ICE candidates have been removed. */
@CalledByNative("Observer") void onIceCandidatesRemoved(IceCandidate[] candidates);
@ -141,7 +144,14 @@ public class PeerConnection {
* Triggered when a new track is signaled by the remote peer, as a result of
* setRemoteDescription.
*/
@CalledByNative("Observer") void onAddTrack(RtpReceiver receiver, MediaStream[] mediaStreams);
@CalledByNative("Observer")
default void onAddTrack(RtpReceiver receiver, MediaStream[] mediaStreams){};
/**
* Triggered when a previously added remote track is removed by the remote
* peer, as a result of setRemoteDescription.
*/
@CalledByNative("Observer") default void onRemoveTrack(RtpReceiver receiver){};
/**
* Triggered when the signaling from SetRemoteDescription indicates that a transceiver
@ -162,9 +172,9 @@ public class PeerConnection {
public final String password;
public final TlsCertPolicy tlsCertPolicy;
// If the URIs in |urls| only contain IP addresses, this field can be used
// If the URIs in `urls` only contain IP addresses, this field can be used
// to indicate the hostname, which may be necessary for TLS (using the SNI
// extension). If |urls| itself contains the hostname, this isn't
// extension). If `urls` itself contains the hostname, this isn't
// necessary.
public final String hostname;
@ -422,29 +432,29 @@ public class PeerConnection {
/**
* Java version of webrtc::SdpSemantics.
*
* Configure the SDP semantics used by this PeerConnection. Note that the
* WebRTC 1.0 specification requires UNIFIED_PLAN semantics. The
* RtpTransceiver API is only available with UNIFIED_PLAN semantics.
* Configure the SDP semantics used by this PeerConnection. By default, this
* is UNIFIED_PLAN which is compliant to the WebRTC 1.0 specification. It is
* possible to overrwite this to the deprecated PLAN_B SDP format, but note
* that PLAN_B will be deleted at some future date, see
* https://crbug.com/webrtc/13528.
*
* <p>PLAN_B will cause PeerConnection to create offers and answers with at
* most one audio and one video m= section with multiple RtpSenders and
* RtpReceivers specified as multiple a=ssrc lines within the section. This
* will also cause PeerConnection to ignore all but the first m= section of
* the same media type.
*
* <p>UNIFIED_PLAN will cause PeerConnection to create offers and answers with
* UNIFIED_PLAN will cause PeerConnection to create offers and answers with
* multiple m= sections where each m= section maps to one RtpSender and one
* RtpReceiver (an RtpTransceiver), either both audio or both video. This
* will also cause PeerConnection to ignore all but the first a=ssrc lines
* that form a Plan B stream.
*
* <p>For users who wish to send multiple audio/video streams and need to stay
* interoperable with legacy WebRTC implementations, specify PLAN_B.
*
* <p>For users who wish to send multiple audio/video streams and/or wish to
* use the new RtpTransceiver API, specify UNIFIED_PLAN.
* PLAN_B will cause PeerConnection to create offers and answers with at most
* one audio and one video m= section with multiple RtpSenders and
* RtpReceivers specified as multiple a=ssrc lines within the section. This
* will also cause PeerConnection to ignore all but the first m= section of
* the same media type.
*/
public enum SdpSemantics { PLAN_B, UNIFIED_PLAN }
public enum SdpSemantics {
// TODO(https://crbug.com/webrtc/13528): Remove support for PLAN_B.
@Deprecated PLAN_B,
UNIFIED_PLAN
}
/** Java version of PeerConnectionInterface.RTCConfiguration */
// TODO(qingsi): Resolve the naming inconsistency of fields with/without units.
@ -501,6 +511,9 @@ public class PeerConnection {
// to keep NAT bindings open.
// The default value in the implementation is used if this field is null.
@Nullable public Integer stunCandidateKeepaliveIntervalMs;
// The interval in milliseconds of pings sent when the connection is stable and writable.
// The default value in the implementation is used if this field is null.
@Nullable public Integer stableWritableConnectionPingIntervalMs;
public boolean disableIPv6OnWifi;
// By default, PeerConnection will use a limited number of IPv6 network
// interfaces, in order to avoid too many ICE candidate pairs being created
@ -511,14 +524,11 @@ public class PeerConnection {
// These values will be overridden by MediaStream constraints if deprecated constraints-based
// create peerconnection interface is used.
public boolean disableIpv6;
public boolean enableDscp;
public boolean enableCpuOveruseDetection;
public boolean enableRtpDataChannel;
public boolean suspendBelowMinBitrate;
@Nullable public Integer screencastMinBitrate;
@Nullable public Boolean combinedAudioVideoBwe;
@Nullable public Boolean enableDtlsSrtp;
// Use "Unknown" to represent no preference of adapter types, not the
// preference of adapters of unknown types.
public AdapterType networkPreference;
@ -550,6 +560,19 @@ public class PeerConnection {
*/
@Nullable public String turnLoggingId;
/**
* Allow implicit rollback of local description when remote description
* conflicts with local description.
* See: https://w3c.github.io/webrtc-pc/#dom-peerconnection-setremotedescription
*/
public boolean enableImplicitRollback;
/**
* Control if "a=extmap-allow-mixed" is included in the offer.
* See: https://www.chromestatus.com/feature/6269234631933952
*/
public boolean offerExtmapAllowMixed;
// TODO(deadbeef): Instead of duplicating the defaults here, we should do
// something to pick up the defaults from C++. The Objective-C equivalent
// of RTCConfiguration does that.
@ -577,22 +600,22 @@ public class PeerConnection {
iceUnwritableTimeMs = null;
iceUnwritableMinChecks = null;
stunCandidateKeepaliveIntervalMs = null;
stableWritableConnectionPingIntervalMs = null;
disableIPv6OnWifi = false;
maxIPv6Networks = 5;
disableIpv6 = false;
enableDscp = false;
enableCpuOveruseDetection = true;
enableRtpDataChannel = false;
suspendBelowMinBitrate = false;
screencastMinBitrate = null;
combinedAudioVideoBwe = null;
enableDtlsSrtp = null;
networkPreference = AdapterType.UNKNOWN;
sdpSemantics = SdpSemantics.PLAN_B;
sdpSemantics = SdpSemantics.UNIFIED_PLAN;
activeResetSrtpParams = false;
cryptoOptions = null;
turnLoggingId = null;
allowCodecSwitching = null;
enableImplicitRollback = false;
offerExtmapAllowMixed = true;
}
@CalledByNative("RTCConfiguration")
@ -722,6 +745,12 @@ public class PeerConnection {
return stunCandidateKeepaliveIntervalMs;
}
@Nullable
@CalledByNative("RTCConfiguration")
Integer getStableWritableConnectionPingIntervalMs() {
return stableWritableConnectionPingIntervalMs;
}
@CalledByNative("RTCConfiguration")
boolean getDisableIPv6OnWifi() {
return disableIPv6OnWifi;
@ -738,11 +767,6 @@ public class PeerConnection {
return turnCustomizer;
}
@CalledByNative("RTCConfiguration")
boolean getDisableIpv6() {
return disableIpv6;
}
@CalledByNative("RTCConfiguration")
boolean getEnableDscp() {
return enableDscp;
@ -753,11 +777,6 @@ public class PeerConnection {
return enableCpuOveruseDetection;
}
@CalledByNative("RTCConfiguration")
boolean getEnableRtpDataChannel() {
return enableRtpDataChannel;
}
@CalledByNative("RTCConfiguration")
boolean getSuspendBelowMinBitrate() {
return suspendBelowMinBitrate;
@ -775,12 +794,6 @@ public class PeerConnection {
return combinedAudioVideoBwe;
}
@Nullable
@CalledByNative("RTCConfiguration")
Boolean getEnableDtlsSrtp() {
return enableDtlsSrtp;
}
@CalledByNative("RTCConfiguration")
AdapterType getNetworkPreference() {
return networkPreference;
@ -813,6 +826,16 @@ public class PeerConnection {
String getTurnLoggingId() {
return turnLoggingId;
}
@CalledByNative("RTCConfiguration")
boolean getEnableImplicitRollback() {
return enableImplicitRollback;
}
@CalledByNative("RTCConfiguration")
boolean getOfferExtmapAllowMixed() {
return offerExtmapAllowMixed;
}
};
private final List<MediaStream> localStreams = new ArrayList<>();
@ -858,6 +881,10 @@ public class PeerConnection {
nativeCreateAnswer(observer, constraints);
}
public void setLocalDescription(SdpObserver observer) {
nativeSetLocalDescriptionAutomatically(observer);
}
public void setLocalDescription(SdpObserver observer, SessionDescription sdp) {
nativeSetLocalDescription(observer, sdp);
}
@ -866,6 +893,13 @@ public class PeerConnection {
nativeSetRemoteDescription(observer, sdp);
}
/**
* Tells the PeerConnection that ICE should be restarted.
*/
public void restartIce() {
nativeRestartIce();
}
/**
* Enables/disables playout of received audio streams. Enabled by default.
*
@ -896,6 +930,11 @@ public class PeerConnection {
return nativeAddIceCandidate(candidate.sdpMid, candidate.sdpMLineIndex, candidate.sdp);
}
public void addIceCandidate(IceCandidate candidate, AddIceObserver observer) {
nativeAddIceCandidateWithObserver(
candidate.sdpMid, candidate.sdpMLineIndex, candidate.sdp, observer);
}
public boolean removeIceCandidates(final IceCandidate[] candidates) {
return nativeRemoveIceCandidates(candidates);
}
@ -1055,7 +1094,7 @@ public class PeerConnection {
* transceiver will cause future calls to CreateOffer to add a media description
* for the corresponding transceiver.
*
* <p>The initial value of |mid| in the returned transceiver is null. Setting a
* <p>The initial value of `mid` in the returned transceiver is null. Setting a
* new session description may change it to a non-null value.
*
* <p>https://w3c.github.io/webrtc-pc/#dom-rtcpeerconnection-addtransceiver
@ -1130,6 +1169,22 @@ public class PeerConnection {
nativeNewGetStats(callback);
}
/**
* Gets stats using the new stats collection API, see webrtc/api/stats/. These
* will replace old stats collection API when the new API has matured enough.
*/
public void getStats(RtpSender sender, RTCStatsCollectorCallback callback) {
nativeNewGetStatsSender(sender.getNativeRtpSender(), callback);
}
/**
* Gets stats using the new stats collection API, see webrtc/api/stats/. These
* will replace old stats collection API when the new API has matured enough.
*/
public void getStats(RtpReceiver receiver, RTCStatsCollectorCallback callback) {
nativeNewGetStatsReceiver(receiver.getNativeRtpReceiver(), callback);
}
/**
* Limits the bandwidth allocated for all RTP streams sent by this
* PeerConnection. Pass null to leave a value unchanged.
@ -1240,8 +1295,10 @@ public class PeerConnection {
private native DataChannel nativeCreateDataChannel(String label, DataChannel.Init init);
private native void nativeCreateOffer(SdpObserver observer, MediaConstraints constraints);
private native void nativeCreateAnswer(SdpObserver observer, MediaConstraints constraints);
private native void nativeSetLocalDescriptionAutomatically(SdpObserver observer);
private native void nativeSetLocalDescription(SdpObserver observer, SessionDescription sdp);
private native void nativeSetRemoteDescription(SdpObserver observer, SessionDescription sdp);
private native void nativeRestartIce();
private native void nativeSetAudioPlayout(boolean playout);
private native void nativeSetAudioRecording(boolean recording);
private native boolean nativeSetBitrate(Integer min, Integer current, Integer max);
@ -1255,11 +1312,15 @@ public class PeerConnection {
private native boolean nativeSetConfiguration(RTCConfiguration config);
private native boolean nativeAddIceCandidate(
String sdpMid, int sdpMLineIndex, String iceCandidateSdp);
private native void nativeAddIceCandidateWithObserver(
String sdpMid, int sdpMLineIndex, String iceCandidateSdp, AddIceObserver observer);
private native boolean nativeRemoveIceCandidates(final IceCandidate[] candidates);
private native boolean nativeAddLocalStream(long stream);
private native void nativeRemoveLocalStream(long stream);
private native boolean nativeOldGetStats(StatsObserver observer, long nativeTrack);
private native void nativeNewGetStats(RTCStatsCollectorCallback callback);
private native void nativeNewGetStatsSender(long sender, RTCStatsCollectorCallback callback);
private native void nativeNewGetStatsReceiver(long receiver, RTCStatsCollectorCallback callback);
private native RtpSender nativeCreateSender(String kind, String stream_id);
private native List<RtpSender> nativeGetSenders();
private native List<RtpReceiver> nativeGetReceivers();

View File

@ -10,7 +10,7 @@
package org.webrtc;
import android.support.annotation.Nullable;
import androidx.annotation.Nullable;
/**
* PeerConnectionDependencies holds all PeerConnection dependencies that are

View File

@ -12,7 +12,7 @@ package org.webrtc;
import android.content.Context;
import android.os.Process;
import android.support.annotation.Nullable;
import androidx.annotation.Nullable;
import java.util.List;
import org.webrtc.Logging.Severity;
import org.webrtc.PeerConnection;
@ -133,7 +133,7 @@ public class PeerConnectionFactory {
public static class Options {
// Keep in sync with webrtc/rtc_base/network.h!
//
// These bit fields are defined for |networkIgnoreMask| below.
// These bit fields are defined for `networkIgnoreMask` below.
static final int ADAPTER_TYPE_UNKNOWN = 0;
static final int ADAPTER_TYPE_ETHERNET = 1 << 0;
static final int ADAPTER_TYPE_WIFI = 1 << 1;
@ -404,6 +404,7 @@ public class PeerConnectionFactory {
public PeerConnection createPeerConnection(List<PeerConnection.IceServer> iceServers,
MediaConstraints constraints, PeerConnection.Observer observer) {
PeerConnection.RTCConfiguration rtcConfig = new PeerConnection.RTCConfiguration(iceServers);
rtcConfig.sdpSemantics = PeerConnection.SdpSemantics.UNIFIED_PLAN;
return createPeerConnection(rtcConfig, constraints, observer);
}
@ -411,6 +412,7 @@ public class PeerConnectionFactory {
public PeerConnection createPeerConnection(
List<PeerConnection.IceServer> iceServers, PeerConnection.Observer observer) {
PeerConnection.RTCConfiguration rtcConfig = new PeerConnection.RTCConfiguration(iceServers);
rtcConfig.sdpSemantics = PeerConnection.SdpSemantics.UNIFIED_PLAN;
return createPeerConnection(rtcConfig, observer);
}
@ -547,15 +549,12 @@ public class PeerConnectionFactory {
/**
* Print the Java stack traces for the critical threads used by PeerConnectionFactory, namely;
* signaling thread, worker thread, and network thread. If printNativeStackTraces is true, also
* attempt to print the C++ stack traces for these (and some other) threads.
* attempt to print the C++ stack traces for these threads.
*/
public void printInternalStackTraces(boolean printNativeStackTraces) {
printStackTrace(signalingThread, printNativeStackTraces);
printStackTrace(workerThread, printNativeStackTraces);
printStackTrace(networkThread, printNativeStackTraces);
if (printNativeStackTraces) {
nativePrintStackTracesOfRegisteredThreads();
}
}
@CalledByNative
@ -616,5 +615,4 @@ public class PeerConnectionFactory {
private static native void nativeInjectLoggable(JNILogging jniLogging, int severity);
private static native void nativeDeleteLoggable();
private static native void nativePrintStackTrace(int tid);
private static native void nativePrintStackTracesOfRegisteredThreads();
}

View File

@ -11,7 +11,7 @@
package org.webrtc;
import android.media.MediaCodecInfo;
import android.support.annotation.Nullable;
import androidx.annotation.Nullable;
import java.util.Arrays;
/** Factory for Android platform software VideoDecoders. */

View File

@ -14,8 +14,8 @@ import java.util.Map;
/**
* Java version of webrtc::RTCStats. Represents an RTCStats object, as
* described in https://w3c.github.io/webrtc-stats/. The |id|, |timestampUs|
* and |type| accessors have the same meaning for this class as for the
* described in https://w3c.github.io/webrtc-stats/. The `id`, `timestampUs`
* and `type` accessors have the same meaning for this class as for the
* RTCStats dictionary. Each RTCStatsReport produced by getStats contains
* multiple RTCStats objects; one for each underlying object (codec, stream,
* transport, etc.) that was inspected to produce the stats.
@ -62,6 +62,7 @@ public class RTCStats {
* - Double
* - String
* - The array form of any of the above (e.g., Integer[])
* - Map of String keys to BigInteger / Double values
*/
public Map<String, Object> getMembers() {
return members;

View File

@ -123,9 +123,9 @@ public class RendererCommon {
// clipped.
// SCALE_ASPECT_BALANCED - Compromise between FIT and FILL. Video frame will fill as much as
// possible of the view while maintaining aspect ratio, under the constraint that at least
// |BALANCED_VISIBLE_FRACTION| of the frame content will be shown.
// `BALANCED_VISIBLE_FRACTION` of the frame content will be shown.
public static enum ScalingType { SCALE_ASPECT_FIT, SCALE_ASPECT_FILL, SCALE_ASPECT_BALANCED }
// The minimum fraction of the frame content that will be shown for |SCALE_ASPECT_BALANCED|.
// The minimum fraction of the frame content that will be shown for `SCALE_ASPECT_BALANCED`.
// This limits excessive cropping when adjusting display size.
private static float BALANCED_VISIBLE_FRACTION = 0.5625f;
@ -209,7 +209,7 @@ public class RendererCommon {
}
/**
* Move |matrix| transformation origin to (0.5, 0.5). This is the origin for texture coordinates
* Move `matrix` transformation origin to (0.5, 0.5). This is the origin for texture coordinates
* that are in the range 0 to 1.
*/
private static void adjustOrigin(float[] matrix) {

View File

@ -10,7 +10,7 @@
package org.webrtc;
import android.support.annotation.Nullable;
import androidx.annotation.Nullable;
import java.lang.Double;
import java.lang.String;
import java.util.List;
@ -79,6 +79,9 @@ public class RtpParameters {
// SSRC to be used by this encoding.
// Can't be changed between getParameters/setParameters.
public Long ssrc;
// Set to true to allow dynamic frame length changes for audio:
// https://w3c.github.io/webrtc-extensions/#dom-rtcrtpencodingparameters-adaptiveptime
public boolean adaptiveAudioPacketTime;
// This constructor is useful for creating simulcast layers.
public Encoding(String rid, boolean active, Double scaleResolutionDownBy) {
@ -90,7 +93,8 @@ public class RtpParameters {
@CalledByNative("Encoding")
Encoding(String rid, boolean active, double bitratePriority, @Priority int networkPriority,
Integer maxBitrateBps, Integer minBitrateBps, Integer maxFramerate,
Integer numTemporalLayers, Double scaleResolutionDownBy, Long ssrc) {
Integer numTemporalLayers, Double scaleResolutionDownBy, Long ssrc,
boolean adaptiveAudioPacketTime) {
this.rid = rid;
this.active = active;
this.bitratePriority = bitratePriority;
@ -101,6 +105,7 @@ public class RtpParameters {
this.numTemporalLayers = numTemporalLayers;
this.scaleResolutionDownBy = scaleResolutionDownBy;
this.ssrc = ssrc;
this.adaptiveAudioPacketTime = adaptiveAudioPacketTime;
}
@Nullable
@ -159,6 +164,11 @@ public class RtpParameters {
Long getSsrc() {
return ssrc;
}
@CalledByNative("Encoding")
boolean getAdaptivePTime() {
return adaptiveAudioPacketTime;
}
}
public static class Codec {

View File

@ -10,7 +10,7 @@
package org.webrtc;
import android.support.annotation.Nullable;
import androidx.annotation.Nullable;
import org.webrtc.MediaStreamTrack;
/** Java wrapper for a C++ RtpReceiverInterface. */
@ -49,6 +49,12 @@ public class RtpReceiver {
return nativeGetId(nativeRtpReceiver);
}
/** Returns a pointer to webrtc::RtpReceiverInterface. */
long getNativeRtpReceiver() {
checkRtpReceiverExists();
return nativeRtpReceiver;
}
@CalledByNative
public void dispose() {
checkRtpReceiverExists();

View File

@ -10,8 +10,9 @@
package org.webrtc;
import android.support.annotation.Nullable;
import androidx.annotation.Nullable;
import java.util.List;
import org.webrtc.MediaStreamTrack;
/** Java wrapper for a C++ RtpSenderInterface. */
public class RtpSender {
@ -27,8 +28,12 @@ public class RtpSender {
long nativeTrack = nativeGetTrack(nativeRtpSender);
cachedTrack = MediaStreamTrack.createMediaStreamTrack(nativeTrack);
long nativeDtmfSender = nativeGetDtmfSender(nativeRtpSender);
dtmfSender = (nativeDtmfSender != 0) ? new DtmfSender(nativeDtmfSender) : null;
if (nativeGetMediaType(nativeRtpSender).equalsIgnoreCase(MediaStreamTrack.AUDIO_TRACK_KIND)) {
long nativeDtmfSender = nativeGetDtmfSender(nativeRtpSender);
dtmfSender = (nativeDtmfSender != 0) ? new DtmfSender(nativeDtmfSender) : null;
} else {
dtmfSender = null;
}
}
/**
@ -39,7 +44,7 @@ public class RtpSender {
*
* @param takeOwnership If true, the RtpSender takes ownership of the track
* from the caller, and will auto-dispose of it when no
* longer needed. |takeOwnership| should only be used if
* longer needed. `takeOwnership` should only be used if
* the caller owns the track; it is not appropriate when
* the track is owned by, for example, another RtpSender
* or a MediaStream.
@ -143,4 +148,6 @@ public class RtpSender {
private static native String nativeGetId(long rtpSender);
private static native void nativeSetFrameEncryptor(long rtpSender, long nativeFrameEncryptor);
private static native String nativeGetMediaType(long rtpSender);
};

View File

@ -38,7 +38,8 @@ public class RtpTransceiver {
SEND_RECV(0),
SEND_ONLY(1),
RECV_ONLY(2),
INACTIVE(3);
INACTIVE(3),
STOPPED(4);
private final int nativeIndex;
@ -200,19 +201,40 @@ public class RtpTransceiver {
* sendrecv, sendonly, recvonly, or inactive.
* https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-direction
*/
public void setDirection(RtpTransceiverDirection rtpTransceiverDirection) {
public boolean setDirection(RtpTransceiverDirection rtpTransceiverDirection) {
checkRtpTransceiverExists();
nativeSetDirection(nativeRtpTransceiver, rtpTransceiverDirection);
return nativeSetDirection(nativeRtpTransceiver, rtpTransceiverDirection);
}
/**
* The Stop method irreversibly stops the RtpTransceiver. The sender of this
* transceiver will no longer send, the receiver will no longer receive.
* https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-stop
* The Stop method will for the time being call the StopInternal method.
* After a migration procedure, stop() will be equivalent to StopStandard.
*/
public void stop() {
checkRtpTransceiverExists();
nativeStop(nativeRtpTransceiver);
nativeStopInternal(nativeRtpTransceiver);
}
/**
* The StopInternal method stops the RtpTransceiver, like Stop, but goes
* immediately to Stopped state.
*/
public void stopInternal() {
checkRtpTransceiverExists();
nativeStopInternal(nativeRtpTransceiver);
}
/**
* The StopStandard method irreversibly stops the RtpTransceiver. The sender
* of this transceiver will no longer send, the receiver will no longer
* receive.
*
* <p>The transceiver will enter Stopping state and signal NegotiationNeeded.
* https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-stop
*/
public void stopStandard() {
checkRtpTransceiverExists();
nativeStopStandard(nativeRtpTransceiver);
}
@CalledByNative
@ -237,7 +259,8 @@ public class RtpTransceiver {
private static native boolean nativeStopped(long rtpTransceiver);
private static native RtpTransceiverDirection nativeDirection(long rtpTransceiver);
private static native RtpTransceiverDirection nativeCurrentDirection(long rtpTransceiver);
private static native void nativeStop(long rtpTransceiver);
private static native void nativeSetDirection(
private static native void nativeStopInternal(long rtpTransceiver);
private static native void nativeStopStandard(long rtpTransceiver);
private static native boolean nativeSetDirection(
long rtpTransceiver, RtpTransceiverDirection rtpTransceiverDirection);
}

View File

@ -10,7 +10,6 @@
package org.webrtc;
import android.annotation.TargetApi;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
@ -18,8 +17,8 @@ import android.hardware.display.DisplayManager;
import android.hardware.display.VirtualDisplay;
import android.media.projection.MediaProjection;
import android.media.projection.MediaProjectionManager;
import android.support.annotation.Nullable;
import android.view.Surface;
import androidx.annotation.Nullable;
/**
* An implementation of VideoCapturer to capture the screen content as a video stream.
@ -31,10 +30,7 @@ import android.view.Surface;
* place on the HandlerThread of the given {@code SurfaceTextureHelper}. When done with each frame,
* the native code returns the buffer to the {@code SurfaceTextureHelper} to be used for new
* frames. At any time, at most one frame is being processed.
*
* @note This class is only supported on Android Lollipop and above.
*/
@TargetApi(21)
public class ScreenCapturerAndroid implements VideoCapturer, VideoSink {
private static final int DISPLAY_FLAGS =
DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC | DisplayManager.VIRTUAL_DISPLAY_FLAG_PRESENTATION;
@ -75,6 +71,11 @@ public class ScreenCapturerAndroid implements VideoCapturer, VideoSink {
}
}
@Nullable
public MediaProjection getMediaProjection() {
return mediaProjection;
}
@Override
// TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
@SuppressWarnings("NoSynchronizedMethodCheck")

View File

@ -22,7 +22,8 @@ public class SessionDescription {
public static enum Type {
OFFER,
PRANSWER,
ANSWER;
ANSWER,
ROLLBACK;
public String canonicalForm() {
return name().toLowerCase(Locale.US);

View File

@ -10,45 +10,44 @@
package org.webrtc;
import android.support.annotation.Nullable;
import java.util.ArrayList;
import java.util.HashMap;
import androidx.annotation.Nullable;
import java.util.Arrays;
import java.util.List;
public class SoftwareVideoDecoderFactory implements VideoDecoderFactory {
@Deprecated
@Nullable
@Override
public VideoDecoder createDecoder(String codecType) {
return createDecoder(new VideoCodecInfo(codecType, new HashMap<>()));
private static final String TAG = "SoftwareVideoDecoderFactory";
private final long nativeFactory;
public SoftwareVideoDecoderFactory() {
this.nativeFactory = nativeCreateFactory();
}
@Nullable
@Override
public VideoDecoder createDecoder(VideoCodecInfo codecType) {
if (codecType.getName().equalsIgnoreCase("VP8")) {
return new LibvpxVp8Decoder();
}
if (codecType.getName().equalsIgnoreCase("VP9") && LibvpxVp9Decoder.nativeIsSupported()) {
return new LibvpxVp9Decoder();
public VideoDecoder createDecoder(VideoCodecInfo info) {
long nativeDecoder = nativeCreateDecoder(nativeFactory, info);
if (nativeDecoder == 0) {
Logging.w(TAG, "Trying to create decoder for unsupported format. " + info);
return null;
}
return null;
return new WrappedNativeVideoDecoder() {
@Override
public long createNativeVideoDecoder() {
return nativeDecoder;
}
};
}
@Override
public VideoCodecInfo[] getSupportedCodecs() {
return supportedCodecs();
return nativeGetSupportedCodecs(nativeFactory).toArray(new VideoCodecInfo[0]);
}
static VideoCodecInfo[] supportedCodecs() {
List<VideoCodecInfo> codecs = new ArrayList<VideoCodecInfo>();
private static native long nativeCreateFactory();
codecs.add(new VideoCodecInfo("VP8", new HashMap<>()));
if (LibvpxVp9Decoder.nativeIsSupported()) {
codecs.add(new VideoCodecInfo("VP9", new HashMap<>()));
}
private static native long nativeCreateDecoder(long factory, VideoCodecInfo videoCodecInfo);
return codecs.toArray(new VideoCodecInfo[codecs.size()]);
}
private static native List<VideoCodecInfo> nativeGetSupportedCodecs(long factory);
}

View File

@ -10,38 +10,49 @@
package org.webrtc;
import android.support.annotation.Nullable;
import java.util.ArrayList;
import java.util.HashMap;
import androidx.annotation.Nullable;
import java.util.Arrays;
import java.util.List;
public class SoftwareVideoEncoderFactory implements VideoEncoderFactory {
private static final String TAG = "SoftwareVideoEncoderFactory";
private final long nativeFactory;
public SoftwareVideoEncoderFactory() {
this.nativeFactory = nativeCreateFactory();
}
@Nullable
@Override
public VideoEncoder createEncoder(VideoCodecInfo info) {
if (info.name.equalsIgnoreCase("VP8")) {
return new LibvpxVp8Encoder();
}
if (info.name.equalsIgnoreCase("VP9") && LibvpxVp9Encoder.nativeIsSupported()) {
return new LibvpxVp9Encoder();
long nativeEncoder = nativeCreateEncoder(nativeFactory, info);
if (nativeEncoder == 0) {
Logging.w(TAG, "Trying to create encoder for unsupported format. " + info);
return null;
}
return null;
return new WrappedNativeVideoEncoder() {
@Override
public long createNativeVideoEncoder() {
return nativeEncoder;
}
@Override
public boolean isHardwareEncoder() {
return false;
}
};
}
@Override
public VideoCodecInfo[] getSupportedCodecs() {
return supportedCodecs();
return nativeGetSupportedCodecs(nativeFactory).toArray(new VideoCodecInfo[0]);
}
static VideoCodecInfo[] supportedCodecs() {
List<VideoCodecInfo> codecs = new ArrayList<VideoCodecInfo>();
private static native long nativeCreateFactory();
codecs.add(new VideoCodecInfo("VP8", new HashMap<>()));
if (LibvpxVp9Encoder.nativeIsSupported()) {
codecs.add(new VideoCodecInfo("VP9", new HashMap<>()));
}
private static native long nativeCreateEncoder(long factory, VideoCodecInfo videoCodecInfo);
return codecs.toArray(new VideoCodecInfo[codecs.size()]);
}
private static native List<VideoCodecInfo> nativeGetSupportedCodecs(long factory);
}

View File

@ -42,9 +42,9 @@ public class SurfaceEglRenderer extends EglRenderer implements SurfaceHolder.Cal
}
/**
* Initialize this class, sharing resources with |sharedContext|. The custom |drawer| will be used
* Initialize this class, sharing resources with `sharedContext`. The custom `drawer` will be used
* for drawing frames on the EGLSurface. This class is responsible for calling release() on
* |drawer|. It is allowed to call init() to reinitialize the renderer after a previous
* `drawer`. It is allowed to call init() to reinitialize the renderer after a previous
* init()/release() cycle.
*/
public void init(final EglBase.Context sharedContext,
@ -125,7 +125,7 @@ public class SurfaceEglRenderer extends EglRenderer implements SurfaceHolder.Cal
logD("surfaceChanged: format: " + format + " size: " + width + "x" + height);
}
// Update frame dimensions and report any changes to |rendererEvents|.
// Update frame dimensions and report any changes to `rendererEvents`.
private void updateFrameDimensionsAndReportEvents(VideoFrame frame) {
synchronized (layoutLock) {
if (isRenderingPaused) {

View File

@ -17,7 +17,7 @@ import android.opengl.GLES20;
import android.os.Build;
import android.os.Handler;
import android.os.HandlerThread;
import android.support.annotation.Nullable;
import androidx.annotation.Nullable;
import java.util.concurrent.Callable;
import org.webrtc.EglBase.Context;
import org.webrtc.TextureBufferImpl.RefCountMonitor;
@ -48,7 +48,7 @@ public class SurfaceTextureHelper {
private static final String TAG = "SurfaceTextureHelper";
/**
* Construct a new SurfaceTextureHelper sharing OpenGL resources with |sharedContext|. A dedicated
* Construct a new SurfaceTextureHelper sharing OpenGL resources with `sharedContext`. A dedicated
* thread and handler is created for handling the SurfaceTexture. May return null if EGL fails to
* initialize a pixel buffer surface and make it current. If alignTimestamps is true, the frame
* timestamps will be aligned to rtc::TimeNanos(). If frame timestamps are aligned to
@ -66,7 +66,7 @@ public class SurfaceTextureHelper {
// The onFrameAvailable() callback will be executed on the SurfaceTexture ctor thread. See:
// http://grepcode.com/file/repository.grepcode.com/java/ext/com.google.android/android/5.1.1_r1/android/graphics/SurfaceTexture.java#195.
// Therefore, in order to control the callback thread on API lvl < 21, the SurfaceTextureHelper
// is constructed on the |handler| thread.
// is constructed on the `handler` thread.
return ThreadUtils.invokeAtFrontUninterruptibly(handler, new Callable<SurfaceTextureHelper>() {
@Nullable
@Override
@ -147,7 +147,7 @@ public class SurfaceTextureHelper {
@Nullable private final TimestampAligner timestampAligner;
private final FrameRefMonitor frameRefMonitor;
// These variables are only accessed from the |handler| thread.
// These variables are only accessed from the `handler` thread.
@Nullable private VideoSink listener;
// The possible states of this class.
private boolean hasPendingTexture;
@ -156,7 +156,7 @@ public class SurfaceTextureHelper {
private int frameRotation;
private int textureWidth;
private int textureHeight;
// |pendingListener| is set in setListener() and the runnable is posted to the handler thread.
// `pendingListener` is set in setListener() and the runnable is posted to the handler thread.
// setListener() is not allowed to be called again before stopListening(), so this is thread safe.
@Nullable private VideoSink pendingListener;
final Runnable setListenerRunnable = new Runnable() {
@ -198,28 +198,18 @@ public class SurfaceTextureHelper {
oesTextureId = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
surfaceTexture = new SurfaceTexture(oesTextureId);
setOnFrameAvailableListener(surfaceTexture, (SurfaceTexture st) -> {
surfaceTexture.setOnFrameAvailableListener(st -> {
if (hasPendingTexture) {
Logging.d(TAG, "A frame is already pending, dropping frame.");
}
hasPendingTexture = true;
tryDeliverTextureFrame();
}, handler);
}
@TargetApi(21)
private static void setOnFrameAvailableListener(SurfaceTexture surfaceTexture,
SurfaceTexture.OnFrameAvailableListener listener, Handler handler) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
surfaceTexture.setOnFrameAvailableListener(listener, handler);
} else {
// The documentation states that the listener will be called on an arbitrary thread, but in
// pratice, it is always the thread on which the SurfaceTexture was constructed. There are
// assertions in place in case this ever changes. For API >= 21, we use the new API to
// explicitly specify the handler.
surfaceTexture.setOnFrameAvailableListener(listener);
}
}
/**
* Start to stream textures to the given |listener|. If you need to change listener, you need to
* Start to stream textures to the given `listener`. If you need to change listener, you need to
* call stopListening() first.
*/
public void startListening(final VideoSink listener) {
@ -327,7 +317,7 @@ public class SurfaceTextureHelper {
}
/**
* Posts to the correct thread to convert |textureBuffer| to I420.
* Posts to the correct thread to convert `textureBuffer` to I420.
*
* @deprecated Use toI420() instead.
*/

View File

@ -64,7 +64,7 @@ public class SurfaceViewRenderer extends SurfaceView
}
/**
* Initialize this class, sharing resources with |sharedContext|. It is allowed to call init() to
* Initialize this class, sharing resources with `sharedContext`. It is allowed to call init() to
* reinitialize the renderer after a previous init()/release() cycle.
*/
public void init(EglBase.Context sharedContext, RendererCommon.RendererEvents rendererEvents) {
@ -72,9 +72,9 @@ public class SurfaceViewRenderer extends SurfaceView
}
/**
* Initialize this class, sharing resources with |sharedContext|. The custom |drawer| will be used
* Initialize this class, sharing resources with `sharedContext`. The custom `drawer` will be used
* for drawing frames on the EGLSurface. This class is responsible for calling release() on
* |drawer|. It is allowed to call init() to reinitialize the renderer after a previous
* `drawer`. It is allowed to call init() to reinitialize the renderer after a previous
* init()/release() cycle.
*/
public void init(final EglBase.Context sharedContext,

View File

@ -12,7 +12,7 @@ package org.webrtc;
import android.graphics.Matrix;
import android.os.Handler;
import android.support.annotation.Nullable;
import androidx.annotation.Nullable;
/**
* Android texture buffer that glues together the necessary information together with a generic
@ -169,6 +169,7 @@ public class TextureBufferImpl implements VideoFrame.TextureBuffer {
* existing buffer is unchanged. The given transform matrix is applied first when texture
* coordinates are still in the unmodified [0, 1] range.
*/
@Override
public TextureBufferImpl applyTransformMatrix(
Matrix transformMatrix, int newWidth, int newHeight) {
return applyTransformMatrix(transformMatrix, /* unscaledWidth= */ newWidth,

View File

@ -31,7 +31,7 @@ public class TimestampAligner {
/**
* Translates camera timestamps to the same timescale as is used by rtc::TimeNanos().
* |cameraTimeNs| is assumed to be accurate, but with an unknown epoch and clock drift. Returns
* `cameraTimeNs` is assumed to be accurate, but with an unknown epoch and clock drift. Returns
* the translated timestamp.
*/
public long translateTimestamp(long cameraTimeNs) {

View File

@ -10,7 +10,7 @@
package org.webrtc;
import android.support.annotation.Nullable;
import androidx.annotation.Nullable;
import java.util.Arrays;
import java.util.Locale;
import java.util.Map;
@ -69,6 +69,11 @@ public class VideoCodecInfo {
return Arrays.hashCode(values);
}
@Override
public String toString() {
return "VideoCodec{" + name + " " + params + "}";
}
@CalledByNative
String getName() {
return name;

View File

@ -15,6 +15,7 @@ package org.webrtc;
* video_error_codes.h.
*/
public enum VideoCodecStatus {
TARGET_BITRATE_OVERSHOOT(5),
REQUEST_SLI(2),
NO_OUTPUT(1),
OK(0),
@ -26,8 +27,7 @@ public enum VideoCodecStatus {
TIMEOUT(-6),
UNINITIALIZED(-7),
ERR_REQUEST_SLI(-12),
FALLBACK_SOFTWARE(-13),
TARGET_BITRATE_OVERSHOOT(-14);
FALLBACK_SOFTWARE(-13);
private final int number;

View File

@ -86,11 +86,6 @@ public interface VideoDecoder {
* Request the decoder to decode a frame.
*/
@CalledByNative VideoCodecStatus decode(EncodedImage frame, DecodeInfo info);
/**
* The decoder should return true if it prefers late decoding. That is, it can not decode
* infinite number of frames before the decoded frame is consumed.
*/
@CalledByNative boolean getPrefersLateDecoding();
/**
* Should return a descriptive name for the implementation. Gets called once and cached. May be
* called from arbitrary thread.

View File

@ -10,7 +10,7 @@
package org.webrtc;
import android.support.annotation.Nullable;
import androidx.annotation.Nullable;
/** Factory for creating VideoDecoders. */
public interface VideoDecoderFactory {
@ -18,18 +18,7 @@ public interface VideoDecoderFactory {
* Creates a VideoDecoder for the given codec. Supports the same codecs supported by
* VideoEncoderFactory.
*/
@Deprecated
@Nullable
default VideoDecoder createDecoder(String codecType) {
throw new UnsupportedOperationException("Deprecated and not implemented.");
}
/** Creates a decoder for the given video codec. */
@Nullable
@CalledByNative
default VideoDecoder createDecoder(VideoCodecInfo info) {
return createDecoder(info.getName());
}
@Nullable @CalledByNative VideoDecoder createDecoder(VideoCodecInfo info);
/**
* Enumerates the list of supported video codecs.

View File

@ -10,7 +10,7 @@
package org.webrtc;
import android.support.annotation.Nullable;
import androidx.annotation.Nullable;
import org.webrtc.EncodedImage;
/**
@ -86,6 +86,8 @@ public interface VideoEncoder {
public class CodecSpecificInfoH264 extends CodecSpecificInfo {}
public class CodecSpecificInfoAV1 extends CodecSpecificInfo {}
/**
* Represents bitrate allocated for an encoder to produce frames. Bitrate can be divided between
* spatial and temporal layers.
@ -234,9 +236,64 @@ public interface VideoEncoder {
}
}
/** Rate control parameters. */
public class RateControlParameters {
/**
* Adjusted target bitrate, per spatial/temporal layer. May be lower or higher than the target
* depending on encoder behaviour.
*/
public final BitrateAllocation bitrate;
/**
* Target framerate, in fps. A value <= 0.0 is invalid and should be interpreted as framerate
* target not available. In this case the encoder should fall back to the max framerate
* specified in `codec_settings` of the last InitEncode() call.
*/
public final double framerateFps;
@CalledByNative("RateControlParameters")
public RateControlParameters(BitrateAllocation bitrate, double framerateFps) {
this.bitrate = bitrate;
this.framerateFps = framerateFps;
}
}
/**
* Metadata about the Encoder.
*/
public class EncoderInfo {
/**
* The width and height of the incoming video frames should be divisible by
* |requested_resolution_alignment|
*/
public final int requestedResolutionAlignment;
/**
* Same as above but if true, each simulcast layer should also be divisible by
* |requested_resolution_alignment|.
*/
public final boolean applyAlignmentToAllSimulcastLayers;
public EncoderInfo(
int requestedResolutionAlignment, boolean applyAlignmentToAllSimulcastLayers) {
this.requestedResolutionAlignment = requestedResolutionAlignment;
this.applyAlignmentToAllSimulcastLayers = applyAlignmentToAllSimulcastLayers;
}
@CalledByNative("EncoderInfo")
public int getRequestedResolutionAlignment() {
return requestedResolutionAlignment;
}
@CalledByNative("EncoderInfo")
public boolean getApplyAlignmentToAllSimulcastLayers() {
return applyAlignmentToAllSimulcastLayers;
}
}
public interface Callback {
/**
* Old encoders assume that the byte buffer held by |frame| is not accessed after the call to
* Old encoders assume that the byte buffer held by `frame` is not accessed after the call to
* this method returns. If the pipeline downstream needs to hold on to the buffer, it then has
* to make its own copy. We want to move to a model where no copying is needed, and instead use
* retain()/release() to signal to the encoder when it is safe to reuse the buffer.
@ -294,7 +351,14 @@ public interface VideoEncoder {
@CalledByNative VideoCodecStatus encode(VideoFrame frame, EncodeInfo info);
/** Sets the bitrate allocation and the target framerate for the encoder. */
@CalledByNative VideoCodecStatus setRateAllocation(BitrateAllocation allocation, int framerate);
VideoCodecStatus setRateAllocation(BitrateAllocation allocation, int framerate);
/** Sets the bitrate allocation and the target framerate for the encoder. */
default @CalledByNative VideoCodecStatus setRates(RateControlParameters rcParameters) {
// Round frame rate up to avoid overshoots.
int framerateFps = (int) Math.ceil(rcParameters.framerateFps);
return setRateAllocation(rcParameters.bitrate, framerateFps);
}
/** Any encoder that wants to use WebRTC provided quality scaler must implement this method. */
@CalledByNative ScalingSettings getScalingSettings();
@ -312,4 +376,10 @@ public interface VideoEncoder {
* called from arbitrary thread.
*/
@CalledByNative String getImplementationName();
@CalledByNative
default EncoderInfo getEncoderInfo() {
return new EncoderInfo(
/* requestedResolutionAlignment= */ 1, /* applyAlignmentToAllSimulcastLayers= */ false);
}
}

View File

@ -10,7 +10,7 @@
package org.webrtc;
import android.support.annotation.Nullable;
import androidx.annotation.Nullable;
/** Factory for creating VideoEncoders. */
public interface VideoEncoderFactory {
@ -24,6 +24,16 @@ public interface VideoEncoderFactory {
*/
@Nullable @CalledByNative("VideoEncoderSelector") VideoCodecInfo onAvailableBitrate(int kbps);
/**
* Called every time the encoder input resolution change. Returns null if the encoder selector
* prefers to keep the current encoder or a VideoCodecInfo if a new encoder is preferred.
*/
@Nullable
@CalledByNative("VideoEncoderSelector")
default VideoCodecInfo onResolutionChange(int widht, int height) {
return null;
}
/**
* Called when the currently used encoder signal itself as broken. Returns null if the encoder
* selector prefers to keep the current encoder or a VideoCodecInfo if a new encoder is

View File

@ -13,6 +13,7 @@ package org.webrtc;
import android.graphics.Matrix;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import androidx.annotation.Nullable;
import java.nio.ByteBuffer;
/**
@ -34,6 +35,15 @@ public class VideoFrame implements RefCounted {
* and the buffer needs to be returned to the VideoSource as soon as all references are gone.
*/
public interface Buffer extends RefCounted {
/**
* Representation of the underlying buffer. Currently, only NATIVE and I420 are supported.
*/
@CalledByNative("Buffer")
@VideoFrameBufferType
default int getBufferType() {
return VideoFrameBufferType.NATIVE;
}
/**
* Resolution of the buffer in pixels.
*/
@ -44,15 +54,18 @@ public class VideoFrame implements RefCounted {
* Returns a memory-backed frame in I420 format. If the pixel data is in another format, a
* conversion will take place. All implementations must provide a fallback to I420 for
* compatibility with e.g. the internal WebRTC software encoders.
*
* <p> Conversion may fail, for example if reading the pixel data from a texture fails. If the
* conversion fails, null is returned.
*/
@CalledByNative("Buffer") I420Buffer toI420();
@Nullable @CalledByNative("Buffer") I420Buffer toI420();
@Override @CalledByNative("Buffer") void retain();
@Override @CalledByNative("Buffer") void release();
/**
* Crops a region defined by |cropx|, |cropY|, |cropWidth| and |cropHeight|. Scales it to size
* |scaleWidth| x |scaleHeight|.
* Crops a region defined by `cropx`, `cropY`, `cropWidth` and `cropHeight`. Scales it to size
* `scaleWidth` x `scaleHeight`.
*/
@CalledByNative("Buffer")
Buffer cropAndScale(
@ -63,6 +76,11 @@ public class VideoFrame implements RefCounted {
* Interface for I420 buffers.
*/
public interface I420Buffer extends Buffer {
@Override
default int getBufferType() {
return VideoFrameBufferType.I420;
}
/**
* Returns a direct ByteBuffer containing Y-plane data. The buffer capacity is at least
* getStrideY() * getHeight() bytes. The position of the returned buffer is ignored and must
@ -118,6 +136,16 @@ public class VideoFrame implements RefCounted {
* the coordinate that should be used to sample that location from the buffer.
*/
Matrix getTransformMatrix();
/**
* Create a new TextureBufferImpl with an applied transform matrix and a new size. The existing
* buffer is unchanged. The given transform matrix is applied first when texture coordinates are
* still in the unmodified [0, 1] range.
*/
default TextureBuffer applyTransformMatrix(
Matrix transformMatrix, int newWidth, int newHeight) {
throw new UnsupportedOperationException("Not implemented");
}
}
private final Buffer buffer;

View File

@ -13,7 +13,7 @@ package org.webrtc;
import android.graphics.Matrix;
import android.graphics.Point;
import android.opengl.GLES20;
import android.support.annotation.Nullable;
import androidx.annotation.Nullable;
import java.nio.ByteBuffer;
/**
@ -61,7 +61,7 @@ public class VideoFrameDrawer {
@Nullable private int[] yuvTextures;
/**
* Upload |planes| into OpenGL textures, taking stride into consideration.
* Upload `planes` into OpenGL textures, taking stride into consideration.
*
* @return Array of three texture indices corresponding to Y-, U-, and V-plane respectively.
*/
@ -145,8 +145,8 @@ public class VideoFrameDrawer {
private int renderWidth;
private int renderHeight;
// Calculate the frame size after |renderMatrix| is applied. Stores the output in member variables
// |renderWidth| and |renderHeight| to avoid allocations since this function is called for every
// Calculate the frame size after `renderMatrix` is applied. Stores the output in member variables
// `renderWidth` and `renderHeight` to avoid allocations since this function is called for every
// frame.
private void calculateTransformedRenderSize(
int frameWidth, int frameHeight, @Nullable Matrix renderMatrix) {
@ -155,7 +155,7 @@ public class VideoFrameDrawer {
renderHeight = frameHeight;
return;
}
// Transform the texture coordinates (in the range [0, 1]) according to |renderMatrix|.
// Transform the texture coordinates (in the range [0, 1]) according to `renderMatrix`.
renderMatrix.mapPoints(dstPoints, srcPoints);
// Multiply with the width and height to get the positions in terms of pixels.

View File

@ -10,7 +10,7 @@
package org.webrtc;
import android.support.annotation.Nullable;
import androidx.annotation.Nullable;
/**
* Lightweight abstraction for an object that can receive video frames, process them, and pass them

View File

@ -10,7 +10,7 @@
package org.webrtc;
import android.support.annotation.Nullable;
import androidx.annotation.Nullable;
/**
* Java wrapper of native AndroidVideoTrackSource.

View File

@ -31,11 +31,6 @@ public abstract class WrappedNativeVideoDecoder implements VideoDecoder {
throw new UnsupportedOperationException("Not implemented.");
}
@Override
public final boolean getPrefersLateDecoding() {
throw new UnsupportedOperationException("Not implemented.");
}
@Override
public final String getImplementationName() {
throw new UnsupportedOperationException("Not implemented.");

View File

@ -12,6 +12,8 @@ package org.webrtc;
import android.graphics.Matrix;
import android.opengl.GLES20;
import android.opengl.GLException;
import androidx.annotation.Nullable;
import java.nio.ByteBuffer;
import org.webrtc.VideoFrame.I420Buffer;
import org.webrtc.VideoFrame.TextureBuffer;
@ -20,7 +22,9 @@ import org.webrtc.VideoFrame.TextureBuffer;
* Class for converting OES textures to a YUV ByteBuffer. It can be constructed on any thread, but
* should only be operated from a single thread with an active EGL context.
*/
public class YuvConverter {
public final class YuvConverter {
private static final String TAG = "YuvConverter";
private static final String FRAGMENT_SHADER =
// Difference in texture coordinate corresponding to one
// sub-pixel in the x direction.
@ -32,9 +36,7 @@ public class YuvConverter {
// Since the alpha read from the texture is always 1, this could
// be written as a mat4 x vec4 multiply. However, that seems to
// give a worse framerate, possibly because the additional
// multiplies by 1.0 consume resources. TODO(nisse): Could also
// try to do it as a vec3 x mat3x4, followed by an add in of a
// constant vector.
// multiplies by 1.0 consume resources.
+ " gl_FragColor.r = coeffs.a + dot(coeffs.rgb,\n"
+ " sample(tc - 1.5 * xUnit).rgb);\n"
+ " gl_FragColor.g = coeffs.a + dot(coeffs.rgb,\n"
@ -122,9 +124,17 @@ public class YuvConverter {
}
/** Converts the texture buffer to I420. */
@Nullable
public I420Buffer convert(TextureBuffer inputTextureBuffer) {
threadChecker.checkIsOnValidThread();
try {
return convertInternal(inputTextureBuffer);
} catch (GLException e) {
Logging.w(TAG, "Failed to convert TextureBuffer", e);
}
return null;
}
private I420Buffer convertInternal(TextureBuffer inputTextureBuffer) {
TextureBuffer preparedBuffer = (TextureBuffer) videoFrameDrawer.prepareBufferForViewportSize(
inputTextureBuffer, inputTextureBuffer.getWidth(), inputTextureBuffer.getHeight());
@ -141,7 +151,7 @@ public class YuvConverter {
// +----+----+
//
// In memory, we use the same stride for all of Y, U and V. The
// U data starts at offset |height| * |stride| from the Y data,
// U data starts at offset `height` * `stride` from the Y data,
// and the V data starts at at offset |stride/2| from the U
// data, with rows of U and V data alternating.
//
@ -149,12 +159,12 @@ public class YuvConverter {
// a single byte per pixel (EGL10.EGL_COLOR_BUFFER_TYPE,
// EGL10.EGL_LUMINANCE_BUFFER,), but that seems to be
// unsupported by devices. So do the following hack: Allocate an
// RGBA buffer, of width |stride|/4. To render each of these
// RGBA buffer, of width `stride`/4. To render each of these
// large pixels, sample the texture at 4 different x coordinates
// and store the results in the four components.
//
// Since the V data needs to start on a boundary of such a
// larger pixel, it is not sufficient that |stride| is even, it
// larger pixel, it is not sufficient that `stride` is even, it
// has to be a multiple of 8 pixels.
final int frameWidth = preparedBuffer.getWidth();
final int frameHeight = preparedBuffer.getHeight();

View File

@ -14,55 +14,93 @@ import java.nio.ByteBuffer;
/** Wraps libyuv methods to Java. All passed byte buffers must be direct byte buffers. */
public class YuvHelper {
/** Helper method for copying I420 to tightly packed destination buffer. */
/**
* Copy I420 Buffer to a contiguously allocated buffer.
* <p> In Android, MediaCodec can request a buffer of a specific layout with the stride and
* slice-height (or plane height), and this function is used in this case.
* <p> For more information, see
* https://cs.android.com/android/platform/superproject/+/64fea7e5726daebc40f46890100837c01091100d:frameworks/base/media/java/android/media/MediaFormat.java;l=568
* @param dstStrideY the stride of output buffers' Y plane.
* @param dstSliceHeightY the slice-height of output buffer's Y plane.
* @param dstStrideU the stride of output buffers' U (and V) plane.
* @param dstSliceHeightU the slice-height of output buffer's U (and V) plane
*/
public static void I420Copy(ByteBuffer srcY, int srcStrideY, ByteBuffer srcU, int srcStrideU,
ByteBuffer srcV, int srcStrideV, ByteBuffer dst, int width, int height) {
final int chromaHeight = (height + 1) / 2;
final int chromaWidth = (width + 1) / 2;
ByteBuffer srcV, int srcStrideV, ByteBuffer dst, int dstWidth, int dstHeight, int dstStrideY,
int dstSliceHeightY, int dstStrideU, int dstSliceHeightU) {
final int chromaWidth = (dstWidth + 1) / 2;
final int chromaHeight = (dstHeight + 1) / 2;
final int minSize = width * height + chromaWidth * chromaHeight * 2;
if (dst.capacity() < minSize) {
final int dstStartY = 0;
final int dstEndY = dstStartY + dstStrideY * dstHeight;
final int dstStartU = dstStartY + dstStrideY * dstSliceHeightY;
final int dstEndU = dstStartU + dstStrideU * chromaHeight;
final int dstStartV = dstStartU + dstStrideU * dstSliceHeightU;
// The last line doesn't need any padding, so use chromaWidth
// to calculate the exact end position.
final int dstEndV = dstStartV + dstStrideU * (chromaHeight - 1) + chromaWidth;
if (dst.capacity() < dstEndV) {
throw new IllegalArgumentException("Expected destination buffer capacity to be at least "
+ minSize + " was " + dst.capacity());
+ dstEndV + " was " + dst.capacity());
}
final int startY = 0;
final int startU = height * width;
final int startV = startU + chromaHeight * chromaWidth;
dst.position(startY);
dst.limit(dstEndY);
dst.position(dstStartY);
final ByteBuffer dstY = dst.slice();
dst.position(startU);
dst.limit(dstEndU);
dst.position(dstStartU);
final ByteBuffer dstU = dst.slice();
dst.position(startV);
dst.limit(dstEndV);
dst.position(dstStartV);
final ByteBuffer dstV = dst.slice();
nativeI420Copy(srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dstY, width, dstU,
chromaWidth, dstV, chromaWidth, width, height);
I420Copy(srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dstY, dstStrideY, dstU,
dstStrideU, dstV, dstStrideU, dstWidth, dstHeight);
}
/** Helper method for copying I420 to tightly packed destination buffer. */
public static void I420Copy(ByteBuffer srcY, int srcStrideY, ByteBuffer srcU, int srcStrideU,
ByteBuffer srcV, int srcStrideV, ByteBuffer dst, int dstWidth, int dstHeight) {
I420Copy(srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dst, dstWidth, dstHeight,
dstWidth, dstHeight, (dstWidth + 1) / 2, (dstHeight + 1) / 2);
}
/**
* Copy I420 Buffer to a contiguously allocated buffer.
* @param dstStrideY the stride of output buffers' Y plane.
* @param dstSliceHeightY the slice-height of output buffer's Y plane.
*/
public static void I420ToNV12(ByteBuffer srcY, int srcStrideY, ByteBuffer srcU, int srcStrideU,
ByteBuffer srcV, int srcStrideV, ByteBuffer dst, int dstWidth, int dstHeight, int dstStrideY,
int dstSliceHeightY) {
final int chromaHeight = (dstHeight + 1) / 2;
final int chromaWidth = (dstWidth + 1) / 2;
final int dstStartY = 0;
final int dstEndY = dstStartY + dstStrideY * dstHeight;
final int dstStartUV = dstStartY + dstStrideY * dstSliceHeightY;
final int dstEndUV = dstStartUV + chromaWidth * chromaHeight * 2;
if (dst.capacity() < dstEndUV) {
throw new IllegalArgumentException("Expected destination buffer capacity to be at least "
+ dstEndUV + " was " + dst.capacity());
}
dst.limit(dstEndY);
dst.position(dstStartY);
final ByteBuffer dstY = dst.slice();
dst.limit(dstEndUV);
dst.position(dstStartUV);
final ByteBuffer dstUV = dst.slice();
I420ToNV12(srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dstY, dstStrideY, dstUV,
chromaWidth * 2, dstWidth, dstHeight);
}
/** Helper method for copying I420 to tightly packed NV12 destination buffer. */
public static void I420ToNV12(ByteBuffer srcY, int srcStrideY, ByteBuffer srcU, int srcStrideU,
ByteBuffer srcV, int srcStrideV, ByteBuffer dst, int width, int height) {
final int chromaWidth = (width + 1) / 2;
final int chromaHeight = (height + 1) / 2;
final int minSize = width * height + chromaWidth * chromaHeight * 2;
if (dst.capacity() < minSize) {
throw new IllegalArgumentException("Expected destination buffer capacity to be at least "
+ minSize + " was " + dst.capacity());
}
final int startY = 0;
final int startUV = height * width;
dst.position(startY);
final ByteBuffer dstY = dst.slice();
dst.position(startUV);
final ByteBuffer dstUV = dst.slice();
nativeI420ToNV12(srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dstY, width, dstUV,
chromaWidth * 2, width, height);
ByteBuffer srcV, int srcStrideV, ByteBuffer dst, int dstWidth, int dstHeight) {
I420ToNV12(srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dst, dstWidth, dstHeight,
dstWidth, dstHeight);
}
/** Helper method for rotating I420 to tightly packed destination buffer. */
@ -109,9 +147,18 @@ public class YuvHelper {
src, srcStride, dstY, dstStrideY, dstU, dstStrideU, dstV, dstStrideV, width, height);
}
/**
* Copies I420 to the I420 dst buffer.
* <p> Unlike `libyuv::I420Copy`, this function checks if the height <= 0, so flipping is not
* supported.
*/
public static void I420Copy(ByteBuffer srcY, int srcStrideY, ByteBuffer srcU, int srcStrideU,
ByteBuffer srcV, int srcStrideV, ByteBuffer dstY, int dstStrideY, ByteBuffer dstU,
int dstStrideU, ByteBuffer dstV, int dstStrideV, int width, int height) {
if (srcY == null || srcU == null || srcV == null || dstY == null || dstU == null || dstV == null
|| width <= 0 || height <= 0) {
throw new IllegalArgumentException("Invalid I420Copy input arguments");
}
nativeI420Copy(srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dstY, dstStrideY, dstU,
dstStrideU, dstV, dstStrideV, width, height);
}
@ -119,6 +166,10 @@ public class YuvHelper {
public static void I420ToNV12(ByteBuffer srcY, int srcStrideY, ByteBuffer srcU, int srcStrideU,
ByteBuffer srcV, int srcStrideV, ByteBuffer dstY, int dstStrideY, ByteBuffer dstUV,
int dstStrideUV, int width, int height) {
if (srcY == null || srcU == null || srcV == null || dstY == null || dstUV == null || width <= 0
|| height <= 0) {
throw new IllegalArgumentException("Invalid I420ToNV12 input arguments");
}
nativeI420ToNV12(srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dstY, dstStrideY, dstUV,
dstStrideUV, width, height);
}

View File

@ -11,10 +11,12 @@
package org.webrtc.audio;
import android.content.Context;
import android.media.AudioAttributes;
import android.media.AudioDeviceInfo;
import android.media.AudioManager;
import android.os.Build;
import android.support.annotation.RequiresApi;
import androidx.annotation.RequiresApi;
import java.util.concurrent.ScheduledExecutorService;
import org.webrtc.JniCommon;
import org.webrtc.Logging;
@ -31,6 +33,7 @@ public class JavaAudioDeviceModule implements AudioDeviceModule {
public static class Builder {
private final Context context;
private ScheduledExecutorService scheduler;
private final AudioManager audioManager;
private int inputSampleRate;
private int outputSampleRate;
@ -45,12 +48,22 @@ public class JavaAudioDeviceModule implements AudioDeviceModule {
private boolean useHardwareNoiseSuppressor = isBuiltInNoiseSuppressorSupported();
private boolean useStereoInput;
private boolean useStereoOutput;
private AudioAttributes audioAttributes;
private boolean useLowLatency;
private boolean enableVolumeLogger;
private Builder(Context context) {
this.context = context;
this.audioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
this.inputSampleRate = WebRtcAudioManager.getSampleRate(audioManager);
this.outputSampleRate = WebRtcAudioManager.getSampleRate(audioManager);
this.useLowLatency = false;
this.enableVolumeLogger = true;
}
public Builder setScheduler(ScheduledExecutorService scheduler) {
this.scheduler = scheduler;
return this;
}
/**
@ -186,11 +199,33 @@ public class JavaAudioDeviceModule implements AudioDeviceModule {
return this;
}
/**
* Control if the low-latency mode should be used. The default is disabled.
*/
public Builder setUseLowLatency(boolean useLowLatency) {
this.useLowLatency = useLowLatency;
return this;
}
/**
* Set custom {@link AudioAttributes} to use.
*/
public Builder setAudioAttributes(AudioAttributes audioAttributes) {
this.audioAttributes = audioAttributes;
return this;
}
/** Disables the volume logger on the audio output track. */
public Builder setEnableVolumeLogger(boolean enableVolumeLogger) {
this.enableVolumeLogger = enableVolumeLogger;
return this;
}
/**
* Construct an AudioDeviceModule based on the supplied arguments. The caller takes ownership
* and is responsible for calling release().
*/
public AudioDeviceModule createAudioDeviceModule() {
public JavaAudioDeviceModule createAudioDeviceModule() {
Logging.d(TAG, "createAudioDeviceModule");
if (useHardwareNoiseSuppressor) {
Logging.d(TAG, "HW NS will be used.");
@ -208,11 +243,22 @@ public class JavaAudioDeviceModule implements AudioDeviceModule {
}
Logging.d(TAG, "HW AEC will not be used.");
}
final WebRtcAudioRecord audioInput = new WebRtcAudioRecord(context, audioManager, audioSource,
audioFormat, audioRecordErrorCallback, audioRecordStateCallback, samplesReadyCallback,
useHardwareAcousticEchoCanceler, useHardwareNoiseSuppressor);
final WebRtcAudioTrack audioOutput = new WebRtcAudioTrack(
context, audioManager, audioTrackErrorCallback, audioTrackStateCallback);
// Low-latency mode was introduced in API version 26, see
// https://developer.android.com/reference/android/media/AudioTrack#PERFORMANCE_MODE_LOW_LATENCY
final int MIN_LOW_LATENCY_SDK_VERSION = 26;
if (useLowLatency && Build.VERSION.SDK_INT >= MIN_LOW_LATENCY_SDK_VERSION) {
Logging.d(TAG, "Low latency mode will be used.");
}
ScheduledExecutorService executor = this.scheduler;
if (executor == null) {
executor = WebRtcAudioRecord.newDefaultScheduler();
}
final WebRtcAudioRecord audioInput = new WebRtcAudioRecord(context, executor, audioManager,
audioSource, audioFormat, audioRecordErrorCallback, audioRecordStateCallback,
samplesReadyCallback, useHardwareAcousticEchoCanceler, useHardwareNoiseSuppressor);
final WebRtcAudioTrack audioOutput =
new WebRtcAudioTrack(context, audioManager, audioAttributes, audioTrackErrorCallback,
audioTrackStateCallback, useLowLatency, enableVolumeLogger);
return new JavaAudioDeviceModule(context, audioManager, audioInput, audioOutput,
inputSampleRate, outputSampleRate, useStereoInput, useStereoOutput);
}