Annotate libjingle_peerconnection_java with @Nullable.

Bug: webrtc:8881
Change-Id: Ida2ef6c003567d19529c21629c916ed40e8de3a6
Reviewed-on: https://webrtc-review.googlesource.com/63380
Commit-Queue: Sami Kalliomäki <sakal@webrtc.org>
Reviewed-by: Paulina Hensman <phensman@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#22563}
This commit is contained in:
Sami Kalliomäki
2018-03-22 13:32:44 +01:00
committed by Commit Bot
parent 12d6a49e97
commit e7592d8d5f
47 changed files with 277 additions and 170 deletions

View File

@ -14,6 +14,7 @@ import android.content.Context;
import android.media.MediaRecorder;
import android.os.Handler;
import android.os.SystemClock;
import javax.annotation.Nullable;
import android.view.Surface;
import android.view.WindowManager;
import java.io.IOException;
@ -152,8 +153,8 @@ class Camera1Session implements CameraSession {
}
private Camera1Session(Events events, boolean captureToTexture, Context applicationContext,
SurfaceTextureHelper surfaceTextureHelper, MediaRecorder mediaRecorder, int cameraId,
android.hardware.Camera camera, android.hardware.Camera.CameraInfo info,
SurfaceTextureHelper surfaceTextureHelper, @Nullable MediaRecorder mediaRecorder,
int cameraId, android.hardware.Camera camera, android.hardware.Camera.CameraInfo info,
CaptureFormat captureFormat, long constructionTimeNs) {
Logging.d(TAG, "Create new camera1 session on camera " + cameraId);

View File

@ -24,6 +24,7 @@ import android.hardware.camera2.CaptureFailure;
import android.hardware.camera2.CaptureRequest;
import android.media.MediaRecorder;
import android.os.Handler;
import javax.annotation.Nullable;
import android.util.Range;
import android.view.Surface;
import android.view.WindowManager;
@ -51,7 +52,7 @@ class Camera2Session implements CameraSession {
private final Context applicationContext;
private final CameraManager cameraManager;
private final SurfaceTextureHelper surfaceTextureHelper;
private final Surface mediaRecorderSurface;
@Nullable private final Surface mediaRecorderSurface;
private final String cameraId;
private final int width;
private final int height;
@ -65,11 +66,11 @@ class Camera2Session implements CameraSession {
private CaptureFormat captureFormat;
// Initialized when camera opens
private CameraDevice cameraDevice;
private Surface surface;
@Nullable private CameraDevice cameraDevice;
@Nullable private Surface surface;
// Initialized when capture session is created
private CameraCaptureSession captureSession;
@Nullable private CameraCaptureSession captureSession;
// State
private SessionState state = SessionState.RUNNING;
@ -304,7 +305,8 @@ class Camera2Session implements CameraSession {
private Camera2Session(CreateSessionCallback callback, Events events, Context applicationContext,
CameraManager cameraManager, SurfaceTextureHelper surfaceTextureHelper,
MediaRecorder mediaRecorder, String cameraId, int width, int height, int framerate) {
@Nullable MediaRecorder mediaRecorder, String cameraId, int width, int height,
int framerate) {
Logging.d(TAG, "Create new camera2 session on camera " + cameraId);
constructionTimeNs = System.nanoTime();

View File

@ -14,6 +14,7 @@ import android.content.Context;
import android.media.MediaRecorder;
import android.os.Handler;
import android.os.Looper;
import javax.annotation.Nullable;
import java.util.Arrays;
@SuppressWarnings("deprecation")
@ -37,9 +38,10 @@ abstract class CameraCapturer implements CameraVideoCapturer {
private final static int OPEN_CAMERA_TIMEOUT = 10000;
private final CameraEnumerator cameraEnumerator;
private final CameraEventsHandler eventsHandler;
@Nullable private final CameraEventsHandler eventsHandler;
private final Handler uiThreadHandler;
@Nullable
private final CameraSession.CreateSessionCallback createSessionCallback =
new CameraSession.CreateSessionCallback() {
@Override
@ -125,6 +127,7 @@ abstract class CameraCapturer implements CameraVideoCapturer {
}
};
@Nullable
private final CameraSession.Events cameraSessionEventsHandler = new CameraSession.Events() {
@Override
public void onCameraOpening() {
@ -203,31 +206,31 @@ abstract class CameraCapturer implements CameraVideoCapturer {
// Initialized on initialize
// -------------------------
private Handler cameraThreadHandler;
@Nullable private Handler cameraThreadHandler;
private Context applicationContext;
private CapturerObserver capturerObserver;
private SurfaceTextureHelper surfaceHelper;
@Nullable private SurfaceTextureHelper surfaceHelper;
private final Object stateLock = new Object();
private boolean sessionOpening; /* guarded by stateLock */
private CameraSession currentSession; /* guarded by stateLock */
@Nullable private CameraSession currentSession; /* guarded by stateLock */
private String cameraName; /* guarded by stateLock */
private int width; /* guarded by stateLock */
private int height; /* guarded by stateLock */
private int framerate; /* guarded by stateLock */
private int openAttemptsRemaining; /* guarded by stateLock */
private SwitchState switchState = SwitchState.IDLE; /* guarded by stateLock */
private CameraSwitchHandler switchEventsHandler; /* guarded by stateLock */
@Nullable private CameraSwitchHandler switchEventsHandler; /* guarded by stateLock */
// Valid from onDone call until stopCapture, otherwise null.
private CameraStatistics cameraStatistics; /* guarded by stateLock */
@Nullable private CameraStatistics cameraStatistics; /* guarded by stateLock */
private boolean firstFrameObserved; /* guarded by stateLock */
// Variables used on camera thread - do not require stateLock synchronization.
private MediaRecorderState mediaRecorderState = MediaRecorderState.IDLE;
private MediaRecorderHandler mediaRecorderEventsHandler;
@Nullable private MediaRecorderHandler mediaRecorderEventsHandler;
public CameraCapturer(
String cameraName, CameraEventsHandler eventsHandler, CameraEnumerator cameraEnumerator) {
public CameraCapturer(String cameraName, @Nullable CameraEventsHandler eventsHandler,
CameraEnumerator cameraEnumerator) {
if (eventsHandler == null) {
eventsHandler = new CameraEventsHandler() {
@Override
@ -262,8 +265,8 @@ abstract class CameraCapturer implements CameraVideoCapturer {
}
@Override
public void initialize(SurfaceTextureHelper surfaceTextureHelper, Context applicationContext,
CapturerObserver capturerObserver) {
public void initialize(@Nullable SurfaceTextureHelper surfaceTextureHelper,
Context applicationContext, CapturerObserver capturerObserver) {
this.applicationContext = applicationContext;
this.capturerObserver = capturerObserver;
this.surfaceHelper = surfaceTextureHelper;
@ -412,14 +415,15 @@ abstract class CameraCapturer implements CameraVideoCapturer {
}
}
private void reportCameraSwitchError(String error, CameraSwitchHandler switchEventsHandler) {
private void reportCameraSwitchError(
String error, @Nullable CameraSwitchHandler switchEventsHandler) {
Logging.e(TAG, error);
if (switchEventsHandler != null) {
switchEventsHandler.onCameraSwitchError(error);
}
}
private void switchCameraInternal(final CameraSwitchHandler switchEventsHandler) {
private void switchCameraInternal(@Nullable final CameraSwitchHandler switchEventsHandler) {
Logging.d(TAG, "switchCamera internal");
final String[] deviceNames = cameraEnumerator.getDeviceNames();
@ -476,7 +480,7 @@ abstract class CameraCapturer implements CameraVideoCapturer {
}
private void reportUpdateMediaRecorderError(
String error, MediaRecorderHandler mediaRecoderEventsHandler) {
String error, @Nullable MediaRecorderHandler mediaRecoderEventsHandler) {
checkIsOnCameraThread();
Logging.e(TAG, error);
if (mediaRecoderEventsHandler != null) {
@ -485,7 +489,7 @@ abstract class CameraCapturer implements CameraVideoCapturer {
}
private void updateMediaRecorderInternal(
MediaRecorder mediaRecorder, MediaRecorderHandler mediaRecoderEventsHandler) {
@Nullable MediaRecorder mediaRecorder, MediaRecorderHandler mediaRecoderEventsHandler) {
checkIsOnCameraThread();
boolean addMediaRecorder = (mediaRecorder != null);
Logging.d(TAG,

View File

@ -13,6 +13,7 @@ package org.webrtc;
import android.graphics.Canvas;
import android.graphics.Rect;
import android.graphics.SurfaceTexture;
import javax.annotation.Nullable;
import android.view.Surface;
import android.view.SurfaceHolder;
import javax.microedition.khronos.egl.EGL10;
@ -31,7 +32,7 @@ class EglBase10 implements EglBase {
private final EGL10 egl;
private EGLContext eglContext;
private EGLConfig eglConfig;
@Nullable private EGLConfig eglConfig;
private EGLDisplay eglDisplay;
private EGLSurface eglSurface = EGL10.EGL_NO_SURFACE;
@ -102,11 +103,13 @@ class EglBase10 implements EglBase {
@Override
public void setKeepScreenOn(boolean b) {}
@Nullable
@Override
public Canvas lockCanvas() {
return null;
}
@Nullable
@Override
public Canvas lockCanvas(Rect rect) {
return null;
@ -115,6 +118,7 @@ class EglBase10 implements EglBase {
@Override
public void unlockCanvasAndPost(Canvas canvas) {}
@Nullable
@Override
public Rect getSurfaceFrame() {
return null;
@ -301,7 +305,7 @@ class EglBase10 implements EglBase {
// Return an EGLConfig, or die trying.
private EGLContext createEglContext(
Context sharedContext, EGLDisplay eglDisplay, EGLConfig eglConfig) {
@Nullable Context sharedContext, EGLDisplay eglDisplay, EGLConfig eglConfig) {
if (sharedContext != null && sharedContext.eglContext == EGL10.EGL_NO_CONTEXT) {
throw new RuntimeException("Invalid sharedContext");
}

View File

@ -19,6 +19,7 @@ import android.opengl.EGLDisplay;
import android.opengl.EGLExt;
import android.opengl.EGLSurface;
import android.os.Build;
import javax.annotation.Nullable;
import android.view.Surface;
import org.webrtc.EglBase;
@ -33,7 +34,7 @@ class EglBase14 implements EglBase {
private static final int EGLExt_SDK_VERSION = android.os.Build.VERSION_CODES.JELLY_BEAN_MR2;
private static final int CURRENT_SDK_VERSION = android.os.Build.VERSION.SDK_INT;
private EGLContext eglContext;
private EGLConfig eglConfig;
@Nullable private EGLConfig eglConfig;
private EGLDisplay eglDisplay;
private EGLSurface eglSurface = EGL14.EGL_NO_SURFACE;
@ -262,7 +263,7 @@ class EglBase14 implements EglBase {
// Return an EGLConfig, or die trying.
private static EGLContext createEglContext(
EglBase14.Context sharedContext, EGLDisplay eglDisplay, EGLConfig eglConfig) {
@Nullable EglBase14.Context sharedContext, EGLDisplay eglDisplay, EGLConfig eglConfig) {
if (sharedContext != null && sharedContext.egl14Context == EGL14.EGL_NO_CONTEXT) {
throw new RuntimeException("Invalid sharedContext");
}

View File

@ -15,6 +15,7 @@ import android.media.MediaCodec;
import android.media.MediaCodecInfo.CodecCapabilities;
import android.media.MediaFormat;
import android.os.SystemClock;
import javax.annotation.Nullable;
import android.view.Surface;
import java.io.IOException;
import java.nio.ByteBuffer;
@ -71,7 +72,7 @@ class HardwareVideoDecoder
// Output thread runs a loop which polls MediaCodec for decoded output buffers. It reformats
// those buffers into VideoFrames and delivers them to the callback. Variable is set on decoder
// thread and is immutable while the codec is running.
private Thread outputThread;
@Nullable private Thread outputThread;
// Checker that ensures work is run on the output thread.
private ThreadChecker outputThreadChecker;
@ -81,7 +82,7 @@ class HardwareVideoDecoder
private ThreadChecker decoderThreadChecker;
private volatile boolean running = false;
private volatile Exception shutdownException = null;
@Nullable private volatile Exception shutdownException = null;
// Dimensions (width, height, stride, and sliceHeight) may be accessed by either the decode thread
// or the output thread. Accesses should be protected with this lock.
@ -101,8 +102,8 @@ class HardwareVideoDecoder
private final EglBase.Context sharedContext;
// Valid and immutable while the decoder is running.
private SurfaceTextureHelper surfaceTextureHelper;
private Surface surface = null;
@Nullable private SurfaceTextureHelper surfaceTextureHelper;
@Nullable private Surface surface = null;
private static class DecodedTextureMetadata {
final int width;
@ -123,14 +124,14 @@ class HardwareVideoDecoder
// Metadata for the last frame rendered to the texture.
private final Object renderedTextureMetadataLock = new Object();
private DecodedTextureMetadata renderedTextureMetadata;
@Nullable private DecodedTextureMetadata renderedTextureMetadata;
// Decoding proceeds asynchronously. This callback returns decoded frames to the caller. Valid
// and immutable while the decoder is running.
private Callback callback;
@Nullable private Callback callback;
// Valid and immutable while the decoder is running.
private MediaCodec codec = null;
@Nullable private MediaCodec codec = null;
HardwareVideoDecoder(
String codecName, VideoCodecType codecType, int colorFormat, EglBase.Context sharedContext) {

View File

@ -17,6 +17,7 @@ import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.opengl.GLES20;
import android.os.Bundle;
import javax.annotation.Nullable;
import android.view.Surface;
import java.io.IOException;
import java.nio.ByteBuffer;
@ -81,16 +82,16 @@ class HardwareVideoEncoder implements VideoEncoder {
private boolean automaticResizeOn;
// --- Valid and immutable while an encoding session is running.
private MediaCodec codec;
@Nullable private MediaCodec codec;
// Thread that delivers encoded frames to the user callback.
private Thread outputThread;
@Nullable private Thread outputThread;
// EGL base wrapping the shared texture context. Holds hooks to both the shared context and the
// input surface. Making this base current allows textures from the context to be drawn onto the
// surface.
private EglBase14 textureEglBase;
@Nullable private EglBase14 textureEglBase;
// Input surface for the codec. The encoder will draw input textures onto this surface.
private Surface textureInputSurface;
@Nullable private Surface textureInputSurface;
private int width;
private int height;
@ -102,7 +103,7 @@ class HardwareVideoEncoder implements VideoEncoder {
// --- Only accessed on the output thread.
// Contents of the last observed config frame output by the MediaCodec. Used by H.264.
private ByteBuffer configBuffer = null;
@Nullable private ByteBuffer configBuffer = null;
private int adjustedBitrate;
// Whether the encoder is running. Volatile so that the output thread can watch this value and
@ -110,7 +111,7 @@ class HardwareVideoEncoder implements VideoEncoder {
private volatile boolean running = false;
// Any exception thrown during shutdown. The output thread releases the MediaCodec and uses this
// value to send exceptions thrown during release back to the encoder thread.
private volatile Exception shutdownException = null;
@Nullable private volatile Exception shutdownException = null;
/**
* Creates a new HardwareVideoEncoder with the given codecName, codecType, colorFormat, key frame

View File

@ -14,6 +14,7 @@ import android.annotation.TargetApi;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecInfo.CodecCapabilities;
import javax.annotation.Nullable;
/** Container class for static constants and helpers used with MediaCodec. */
@TargetApi(18)
@ -54,7 +55,8 @@ class MediaCodecUtils {
// Color formats supported by texture mode encoding - in order of preference.
static final int[] TEXTURE_COLOR_FORMATS = {MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface};
static Integer selectColorFormat(int[] supportedColorFormats, CodecCapabilities capabilities) {
static @Nullable Integer selectColorFormat(
int[] supportedColorFormats, CodecCapabilities capabilities) {
for (int supportedColorFormat : supportedColorFormats) {
for (int codecColorFormat : capabilities.colorFormats) {
if (codecColorFormat == supportedColorFormat) {

View File

@ -11,6 +11,7 @@
package org.webrtc;
// Explicit imports necessary for JNI generation.
import javax.annotation.Nullable;
import org.webrtc.VideoEncoder;
import java.nio.ByteBuffer;
@ -23,11 +24,13 @@ class VideoEncoderWrapper {
return scalingSettings.on;
}
@Nullable
@CalledByNative
static Integer getScalingSettingsLow(VideoEncoder.ScalingSettings scalingSettings) {
return scalingSettings.low;
}
@Nullable
@CalledByNative
static Integer getScalingSettingsHigh(VideoEncoder.ScalingSettings scalingSettings) {
return scalingSettings.high;

View File

@ -19,6 +19,7 @@ import android.media.audiofx.NoiseSuppressor;
import android.os.Build;
import java.util.List;
import java.util.UUID;
import javax.annotation.Nullable;
import org.webrtc.Logging;
// This class wraps control of three different platform effects. Supported
@ -40,12 +41,12 @@ class WebRtcAudioEffects {
// Contains the available effect descriptors returned from the
// AudioEffect.getEffects() call. This result is cached to avoid doing the
// slow OS call multiple times.
private static Descriptor[] cachedEffects = null;
private static @Nullable Descriptor[] cachedEffects = null;
// Contains the audio effect objects. Created in enable() and destroyed
// in release().
private AcousticEchoCanceler aec = null;
private NoiseSuppressor ns = null;
private @Nullable AcousticEchoCanceler aec = null;
private @Nullable NoiseSuppressor ns = null;
// Affects the final state given to the setEnabled() method on each effect.
// The default state is set to "disabled" but each effect can also be enabled
@ -293,7 +294,7 @@ class WebRtcAudioEffects {
// Returns the cached copy of the audio effects array, if available, or
// queries the operating system for the list of effects.
private static Descriptor[] getAvailableEffects() {
private static @Nullable Descriptor[] getAvailableEffects() {
if (cachedEffects != null) {
return cachedEffects;
}

View File

@ -20,6 +20,7 @@ import android.media.AudioTrack;
import android.os.Build;
import java.util.Timer;
import java.util.TimerTask;
import javax.annotation.Nullable;
import org.webrtc.Logging;
import org.webrtc.CalledByNative;
@ -100,7 +101,7 @@ class WebRtcAudioManager {
private static final int TIMER_PERIOD_IN_SECONDS = 30;
private final AudioManager audioManager;
private Timer timer;
private @Nullable Timer timer;
public VolumeLogger(AudioManager audioManager) {
this.audioManager = audioManager;

View File

@ -19,12 +19,13 @@ import java.lang.System;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.concurrent.TimeUnit;
import javax.annotation.Nullable;
import org.webrtc.CalledByNative;
import org.webrtc.Logging;
import org.webrtc.NativeClassQualifiedName;
import org.webrtc.ThreadUtils;
import org.webrtc.audio.AudioDeviceModule.AudioRecordErrorCallback;
import org.webrtc.audio.AudioDeviceModule.AudioRecordStartErrorCode;
import org.webrtc.CalledByNative;
import org.webrtc.NativeClassQualifiedName;
import org.webrtc.audio.AudioDeviceModule.SamplesReadyCallback;
class WebRtcAudioRecord {
@ -56,24 +57,24 @@ class WebRtcAudioRecord {
private final long nativeAudioRecord;
private WebRtcAudioEffects effects = null;
private @Nullable WebRtcAudioEffects effects = null;
private ByteBuffer byteBuffer;
private @Nullable ByteBuffer byteBuffer;
private AudioRecord audioRecord = null;
private AudioRecordThread audioThread = null;
private @Nullable AudioRecord audioRecord = null;
private @Nullable AudioRecordThread audioThread = null;
private static volatile boolean microphoneMute = false;
private byte[] emptyBytes;
private static AudioRecordErrorCallback errorCallback = null;
private static @Nullable AudioRecordErrorCallback errorCallback = null;
public static void setErrorCallback(AudioRecordErrorCallback errorCallback) {
Logging.d(TAG, "Set error callback");
WebRtcAudioRecord.errorCallback = errorCallback;
}
private static SamplesReadyCallback audioSamplesReadyCallback = null;
private static @Nullable SamplesReadyCallback audioSamplesReadyCallback = null;
public static void setOnAudioSamplesReady(SamplesReadyCallback callback) {
audioSamplesReadyCallback = callback;

View File

@ -20,6 +20,7 @@ import android.media.AudioTrack;
import android.os.Process;
import java.lang.Thread;
import java.nio.ByteBuffer;
import javax.annotation.Nullable;
import org.webrtc.ContextUtils;
import org.webrtc.Logging;
import org.webrtc.ThreadUtils;
@ -82,15 +83,15 @@ class WebRtcAudioTrack {
private ByteBuffer byteBuffer;
private AudioTrack audioTrack = null;
private AudioTrackThread audioThread = null;
private @Nullable AudioTrack audioTrack = null;
private @Nullable AudioTrackThread audioThread = null;
// Samples to be played are replaced by zeros if |speakerMute| is set to true.
// Can be used to ensure that the speaker is fully muted.
private static volatile boolean speakerMute = false;
private byte[] emptyBytes;
private static AudioTrackErrorCallback errorCallback = null;
private static @Nullable AudioTrackErrorCallback errorCallback = null;
public static void setErrorCallback(AudioTrackErrorCallback errorCallback) {
Logging.d(TAG, "Set extended error callback");