Remove redundant initializers from WebRTC Java code.

Removes redundant field initializers such as null, 0 and false.

Bug: webrtc:9742
Change-Id: I1e54f6c6000885cf95f7af8e2701875a78445497
Reviewed-on: https://webrtc-review.googlesource.com/99481
Reviewed-by: Henrik Andreassson <henrika@webrtc.org>
Reviewed-by: Artem Titov <titovartem@webrtc.org>
Commit-Queue: Sami Kalliomäki <sakal@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#24676}
This commit is contained in:
Sami Kalliomäki
2018-09-11 11:11:47 +02:00
committed by Commit Bot
parent ef73f59de6
commit 3d50a31aad
35 changed files with 118 additions and 124 deletions

View File

@ -65,9 +65,9 @@ public class AppRTCAudioManager {
private AudioManagerEvents audioManagerEvents;
private AudioManagerState amState;
private int savedAudioMode = AudioManager.MODE_INVALID;
private boolean savedIsSpeakerPhoneOn = false;
private boolean savedIsMicrophoneMute = false;
private boolean hasWiredHeadset = false;
private boolean savedIsSpeakerPhoneOn;
private boolean savedIsMicrophoneMute;
private boolean hasWiredHeadset;
// Default audio device; speaker phone for video calls or earpiece for audio
// only calls.
@ -93,8 +93,7 @@ public class AppRTCAudioManager {
// relative to the view screen of a device and can therefore be used to
// assist device switching (close to ear <=> use headset earpiece if
// available, far from ear <=> use speaker phone).
@Nullable
private AppRTCProximitySensor proximitySensor = null;
@Nullable private AppRTCProximitySensor proximitySensor;
// Handles all tasks related to Bluetooth headset devices.
private final AppRTCBluetoothManager bluetoothManager;

View File

@ -16,8 +16,8 @@ import android.hardware.SensorEvent;
import android.hardware.SensorEventListener;
import android.hardware.SensorManager;
import android.os.Build;
import javax.annotation.Nullable;
import android.util.Log;
import javax.annotation.Nullable;
import org.appspot.apprtc.util.AppRTCUtils;
import org.webrtc.ThreadUtils;
@ -40,9 +40,8 @@ public class AppRTCProximitySensor implements SensorEventListener {
private final Runnable onSensorStateListener;
private final SensorManager sensorManager;
@Nullable
private Sensor proximitySensor = null;
private boolean lastStateReportIsNear = false;
@Nullable private Sensor proximitySensor;
private boolean lastStateReportIsNear;
/** Construction */
static AppRTCProximitySensor create(Context context, Runnable sensorStateListener) {

View File

@ -152,14 +152,12 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
private final ProxyVideoSink remoteProxyRenderer = new ProxyVideoSink();
private final ProxyVideoSink localProxyVideoSink = new ProxyVideoSink();
@Nullable
private PeerConnectionClient peerConnectionClient = null;
@Nullable private PeerConnectionClient peerConnectionClient;
@Nullable
private AppRTCClient appRtcClient;
@Nullable
private SignalingParameters signalingParameters;
@Nullable
private AppRTCAudioManager audioManager = null;
@Nullable private AppRTCAudioManager audioManager;
@Nullable
private SurfaceViewRenderer pipRenderer;
@Nullable
@ -176,9 +174,9 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
private boolean iceConnected;
private boolean isError;
private boolean callControlFragmentVisible = true;
private long callStartedTimeMs = 0;
private long callStartedTimeMs;
private boolean micEnabled = true;
private boolean screencaptureEnabled = false;
private boolean screencaptureEnabled;
private static Intent mediaProjectionPermissionResultData;
private static int mediaProjectionPermissionResultCode;
// True if local view is in the fullscreen renderer.

View File

@ -30,10 +30,10 @@ public class CaptureQualityController implements SeekBar.OnSeekBarChangeListener
private static final int FRAMERATE_THRESHOLD = 15;
private TextView captureFormatText;
private CallFragment.OnCallEvents callEvents;
private int width = 0;
private int height = 0;
private int framerate = 0;
private double targetBandwidth = 0;
private int width;
private int height;
private int framerate;
private double targetBandwidth;
public CaptureQualityController(
TextView captureFormatText, CallFragment.OnCallEvents callEvents) {

View File

@ -45,7 +45,7 @@ public class ConnectActivity extends Activity {
private static final String TAG = "ConnectActivity";
private static final int CONNECTION_REQUEST = 1;
private static final int REMOVE_FAVORITE_INDEX = 0;
private static boolean commandLineRun = false;
private static boolean commandLineRun;
private ImageButton addFavoriteButton;
private EditText roomEditText;

View File

@ -182,8 +182,7 @@ public class PeerConnectionClient {
private RtcEventLog rtcEventLog;
// Implements the WebRtcAudioRecordSamplesReadyCallback interface and writes
// recorded audio samples to an output file.
@Nullable
private RecordedAudioToFileController saveRecordedAudioToFile = null;
@Nullable private RecordedAudioToFileController saveRecordedAudioToFile;
/**
* Peer connection parameters.

View File

@ -12,14 +12,14 @@ package org.appspot.apprtc;
import android.media.AudioFormat;
import android.os.Environment;
import javax.annotation.Nullable;
import android.util.Log;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.concurrent.ExecutorService;
import javax.annotation.Nullable;
import org.webrtc.audio.JavaAudioDeviceModule;
import org.webrtc.audio.JavaAudioDeviceModule.SamplesReadyCallback;
import org.webrtc.voiceengine.WebRtcAudioRecord;
@ -36,10 +36,9 @@ public class RecordedAudioToFileController
private final Object lock = new Object();
private final ExecutorService executor;
@Nullable
private OutputStream rawAudioFileOutputStream = null;
@Nullable private OutputStream rawAudioFileOutputStream;
private boolean isRunning;
private long fileSizeInBytes = 0;
private long fileSizeInBytes;
public RecordedAudioToFileController(ExecutorService executor) {
Log.d(TAG, "ctor");

View File

@ -88,7 +88,7 @@ public class PeerConnectionClientTest implements PeerConnectionEvents {
private static class MockSink implements VideoSink {
// These are protected by 'this' since we gets called from worker threads.
private String rendererName;
private boolean renderFrameCalled = false;
private boolean renderFrameCalled;
// Thread-safe in itself.
private CountDownLatch doneRendering;

View File

@ -41,20 +41,20 @@ public class WebRtcAudioEffects {
// Contains the available effect descriptors returned from the
// AudioEffect.getEffects() call. This result is cached to avoid doing the
// slow OS call multiple times.
private static @Nullable Descriptor[] cachedEffects = null;
private static @Nullable Descriptor[] cachedEffects;
// Contains the audio effect objects. Created in enable() and destroyed
// in release().
private @Nullable AcousticEchoCanceler aec = null;
private @Nullable NoiseSuppressor ns = null;
private @Nullable AcousticEchoCanceler aec;
private @Nullable NoiseSuppressor ns;
// Affects the final state given to the setEnabled() method on each effect.
// The default state is set to "disabled" but each effect can also be enabled
// by calling setAEC() and setNS().
// To enable an effect, both the shouldEnableXXX member and the static
// canUseXXX() must be true.
private boolean shouldEnableAec = false;
private boolean shouldEnableNs = false;
private boolean shouldEnableAec;
private boolean shouldEnableNs;
// Checks if the device implements Acoustic Echo Cancellation (AEC).
// Returns true if the device implements AEC, false otherwise.

View File

@ -43,11 +43,11 @@ public class WebRtcAudioManager {
private static final boolean blacklistDeviceForAAudioUsage = true;
// Use mono as default for both audio directions.
private static boolean useStereoOutput = false;
private static boolean useStereoInput = false;
private static boolean useStereoOutput;
private static boolean useStereoInput;
private static boolean blacklistDeviceForOpenSLESUsage = false;
private static boolean blacklistDeviceForOpenSLESUsageIsOverridden = false;
private static boolean blacklistDeviceForOpenSLESUsage;
private static boolean blacklistDeviceForOpenSLESUsageIsOverridden;
// Call this method to override the default list of blacklisted devices
// specified in WebRtcAudioUtils.BLACKLISTED_OPEN_SL_ES_MODELS.
@ -151,7 +151,7 @@ public class WebRtcAudioManager {
private final long nativeAudioManager;
private final AudioManager audioManager;
private boolean initialized = false;
private boolean initialized;
private int nativeSampleRate;
private int nativeChannels;

View File

@ -52,14 +52,14 @@ public class WebRtcAudioRecord {
private final long nativeAudioRecord;
private @Nullable WebRtcAudioEffects effects = null;
private @Nullable WebRtcAudioEffects effects;
private ByteBuffer byteBuffer;
private @Nullable AudioRecord audioRecord = null;
private @Nullable AudioRecordThread audioThread = null;
private @Nullable AudioRecord audioRecord;
private @Nullable AudioRecordThread audioThread;
private static volatile boolean microphoneMute = false;
private static volatile boolean microphoneMute;
private byte[] emptyBytes;
// Audio recording error handler functions.
@ -74,7 +74,7 @@ public class WebRtcAudioRecord {
void onWebRtcAudioRecordError(String errorMessage);
}
private static @Nullable WebRtcAudioRecordErrorCallback errorCallback = null;
private static @Nullable WebRtcAudioRecordErrorCallback errorCallback;
public static void setErrorCallback(WebRtcAudioRecordErrorCallback errorCallback) {
Logging.d(TAG, "Set error callback");
@ -124,7 +124,7 @@ public class WebRtcAudioRecord {
void onWebRtcAudioRecordSamplesReady(AudioSamples samples);
}
private static @Nullable WebRtcAudioRecordSamplesReadyCallback audioSamplesReadyCallback = null;
private static @Nullable WebRtcAudioRecordSamplesReadyCallback audioSamplesReadyCallback;
public static void setOnAudioSamplesReady(WebRtcAudioRecordSamplesReadyCallback callback) {
audioSamplesReadyCallback = callback;

View File

@ -80,12 +80,12 @@ public class WebRtcAudioTrack {
private ByteBuffer byteBuffer;
private @Nullable AudioTrack audioTrack = null;
private @Nullable AudioTrackThread audioThread = null;
private @Nullable AudioTrack audioTrack;
private @Nullable AudioTrackThread audioThread;
// Samples to be played are replaced by zeros if |speakerMute| is set to true.
// Can be used to ensure that the speaker is fully muted.
private static volatile boolean speakerMute = false;
private static volatile boolean speakerMute;
private byte[] emptyBytes;
// Audio playout/track error handler functions.
@ -108,8 +108,8 @@ public class WebRtcAudioTrack {
void onWebRtcAudioTrackError(String errorMessage);
}
private static @Nullable WebRtcAudioTrackErrorCallback errorCallbackOld = null;
private static @Nullable ErrorCallback errorCallback = null;
private static @Nullable WebRtcAudioTrackErrorCallback errorCallbackOld;
private static @Nullable ErrorCallback errorCallback;
@Deprecated
public static void setErrorCallback(WebRtcAudioTrackErrorCallback errorCallback) {

View File

@ -64,12 +64,12 @@ public final class WebRtcAudioUtils {
private static final int DEFAULT_SAMPLE_RATE_HZ = 16000;
private static int defaultSampleRateHz = DEFAULT_SAMPLE_RATE_HZ;
// Set to true if setDefaultSampleRateHz() has been called.
private static boolean isDefaultSampleRateOverridden = false;
private static boolean isDefaultSampleRateOverridden;
// By default, utilize hardware based audio effects for AEC and NS when
// available.
private static boolean useWebRtcBasedAcousticEchoCanceler = false;
private static boolean useWebRtcBasedNoiseSuppressor = false;
private static boolean useWebRtcBasedAcousticEchoCanceler;
private static boolean useWebRtcBasedNoiseSuppressor;
// Call these methods if any hardware based effect shall be replaced by a
// software based version provided by the WebRTC stack instead.

View File

@ -22,7 +22,7 @@ public class DataChannel {
// Optional unsigned short in WebIDL, -1 means unspecified.
public int maxRetransmits = -1;
public String protocol = "";
public boolean negotiated = false;
public boolean negotiated;
// Optional unsigned short in WebIDL, -1 means unspecified.
public int id = -1;

View File

@ -21,9 +21,9 @@ import android.os.SystemClock;
import android.view.Surface;
import java.nio.ByteBuffer;
import java.util.ArrayDeque;
import java.util.HashMap;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Queue;
@ -156,9 +156,9 @@ public class MediaCodecVideoDecoder {
private static final int MAX_QUEUED_OUTPUTBUFFERS = 3;
// Active running decoder instance. Set in initDecode() (called from native code)
// and reset to null in release() call.
@Nullable private static MediaCodecVideoDecoder runningInstance = null;
@Nullable private static MediaCodecVideoDecoderErrorCallback errorCallback = null;
private static int codecErrors = 0;
@Nullable private static MediaCodecVideoDecoder runningInstance;
@Nullable private static MediaCodecVideoDecoderErrorCallback errorCallback;
private static int codecErrors;
// List of disabled codec types - can be set from application.
private static Set<String> hwDecoderDisabledTypes = new HashSet<String>();
@Nullable private static EglBase eglBase;
@ -228,7 +228,7 @@ public class MediaCodecVideoDecoder {
// The below variables are only used when decoding to a Surface.
@Nullable private TextureListener textureListener;
private int droppedFrames;
@Nullable private Surface surface = null;
@Nullable private Surface surface;
private final Queue<DecodedOutputBuffer> dequeuedSurfaceOutputBuffers =
new ArrayDeque<DecodedOutputBuffer>();

View File

@ -23,8 +23,8 @@ import android.os.Bundle;
import android.view.Surface;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
@ -164,9 +164,9 @@ public class MediaCodecVideoEncoder {
// Active running encoder instance. Set in initEncode() (called from native code)
// and reset to null in release() call.
@Nullable private static MediaCodecVideoEncoder runningInstance = null;
@Nullable private static MediaCodecVideoEncoderErrorCallback errorCallback = null;
private static int codecErrors = 0;
@Nullable private static MediaCodecVideoEncoder runningInstance;
@Nullable private static MediaCodecVideoEncoderErrorCallback errorCallback;
private static int codecErrors;
// List of disabled codec types - can be set from application.
private static Set<String> hwEncoderDisabledTypes = new HashSet<String>();
@Nullable private static EglBase staticEglBase;
@ -348,7 +348,7 @@ public class MediaCodecVideoEncoder {
private long lastKeyFrameMs;
// SPS and PPS NALs (Config frame) for H.264.
@Nullable private ByteBuffer configData = null;
@Nullable private ByteBuffer configData;
// MediaCodec error handler - invoked when critical error happens which may prevent
// further use of media codec API. Now it means that one of media codec instances

View File

@ -526,7 +526,7 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
private final Observer observer;
// Network information about a WifiP2p (aka WiFi-Direct) network, or null if no such network is
// connected.
@Nullable private NetworkInformation wifiP2pNetworkInfo = null;
@Nullable private NetworkInformation wifiP2pNetworkInfo;
WifiDirectManagerDelegate(Observer observer, Context context) {
this.context = context;

View File

@ -29,7 +29,7 @@ public class PeerConnectionFactory {
private static final String VIDEO_CAPTURER_THREAD_NAME = "VideoCapturerThread";
private final long nativeFactory;
private static volatile boolean internalTracerInitialized = false;
private static volatile boolean internalTracerInitialized;
@Nullable private static Thread networkThread;
@Nullable private static Thread workerThread;
@Nullable private static Thread signalingThread;
@ -63,11 +63,11 @@ public class PeerConnectionFactory {
public static class Builder {
private final Context applicationContext;
private String fieldTrials = "";
private boolean enableInternalTracer = false;
private boolean enableInternalTracer;
private NativeLibraryLoader nativeLibraryLoader = new NativeLibrary.DefaultLoader();
private String nativeLibraryName = "jingle_peerconnection_so";
@Nullable private Loggable loggable = null;
@Nullable private Severity loggableSeverity = null;
@Nullable private Loggable loggable;
@Nullable private Severity loggableSeverity;
Builder(Context applicationContext) {
this.applicationContext = applicationContext;

View File

@ -47,9 +47,9 @@ public class ScreenCapturerAndroid implements VideoCapturer, VideoSink {
@Nullable private VirtualDisplay virtualDisplay;
@Nullable private SurfaceTextureHelper surfaceTextureHelper;
@Nullable private CapturerObserver capturerObserver;
private long numCapturedFrames = 0;
private long numCapturedFrames;
@Nullable private MediaProjection mediaProjection;
private boolean isDisposed = false;
private boolean isDisposed;
@Nullable private MediaProjectionManager mediaProjectionManager;
/**

View File

@ -28,7 +28,7 @@ public class SurfaceEglRenderer extends EglRenderer implements SurfaceHolder.Cal
private RendererCommon.RendererEvents rendererEvents;
private final Object layoutLock = new Object();
private boolean isRenderingPaused = false;
private boolean isRenderingPaused;
private boolean isFirstFrameRendered;
private int rotatedFrameWidth;
private int rotatedFrameHeight;

View File

@ -71,9 +71,9 @@ public class SurfaceTextureHelper {
// These variables are only accessed from the |handler| thread.
@Nullable private VideoSink listener;
// The possible states of this class.
private boolean hasPendingTexture = false;
private volatile boolean isTextureInUse = false;
private boolean isQuitting = false;
private boolean hasPendingTexture;
private volatile boolean isTextureInUse;
private boolean isQuitting;
private int frameRotation;
private int textureWidth;
private int textureHeight;

View File

@ -39,9 +39,9 @@ class CameraVideoCapturerTestFixtures {
static private class RendererCallbacks implements VideoSink {
private final Object frameLock = new Object();
private int framesRendered = 0;
private int width = 0;
private int height = 0;
private int framesRendered;
private int width;
private int height;
@Override
public void onFrame(VideoFrame frame) {
@ -102,7 +102,7 @@ class CameraVideoCapturerTestFixtures {
}
static private class FakeCapturerObserver implements CapturerObserver {
private int framesCaptured = 0;
private int framesCaptured;
private @Nullable VideoFrame videoFrame;
final private Object frameLock = new Object();
final private Object capturerStartLock = new Object();

View File

@ -272,7 +272,7 @@ public class HardwareVideoEncoderTest {
// # Test fields
private final Object referencedFramesLock = new Object();
private int referencedFrames = 0;
private int referencedFrames;
private Runnable releaseFrameCallback = new Runnable() {
@Override

View File

@ -58,7 +58,7 @@ public class NetworkMonitorTest {
* Listens for alerts fired by the NetworkMonitor when network status changes.
*/
private static class NetworkMonitorTestObserver implements NetworkMonitor.NetworkObserver {
private boolean receivedNotification = false;
private boolean receivedNotification;
@Override
public void onConnectionTypeChanged(ConnectionType connectionType) {
@ -159,7 +159,7 @@ public class NetworkMonitorTest {
}
private static final Object lock = new Object();
private static @Nullable Handler uiThreadHandler = null;
private static @Nullable Handler uiThreadHandler;
private NetworkMonitorAutoDetect receiver;
private MockConnectivityManagerDelegate connectivityDelegate;

View File

@ -50,7 +50,7 @@ import org.webrtc.PeerConnection.SignalingState;
@RunWith(BaseJUnit4ClassRunner.class)
public class PeerConnectionTest {
private static final int TIMEOUT_SECONDS = 20;
private @Nullable TreeSet<String> threadsBeforeTest = null;
private @Nullable TreeSet<String> threadsBeforeTest;
@Before
public void setUp() {
@ -64,13 +64,13 @@ public class PeerConnectionTest {
implements PeerConnection.Observer, VideoSink, DataChannel.Observer, StatsObserver,
RTCStatsCollectorCallback, RtpReceiver.Observer {
private final String name;
private int expectedIceCandidates = 0;
private int expectedErrors = 0;
private int expectedRenegotiations = 0;
private int expectedWidth = 0;
private int expectedHeight = 0;
private int expectedFramesDelivered = 0;
private int expectedTracksAdded = 0;
private int expectedIceCandidates;
private int expectedErrors;
private int expectedRenegotiations;
private int expectedWidth;
private int expectedHeight;
private int expectedFramesDelivered;
private int expectedTracksAdded;
private Queue<SignalingState> expectedSignalingChanges = new ArrayDeque<>();
private Queue<IceConnectionState> expectedIceConnectionChanges = new ArrayDeque<>();
private Queue<IceGatheringState> expectedIceGatheringChanges = new ArrayDeque<>();
@ -82,12 +82,12 @@ public class PeerConnectionTest {
private Queue<DataChannel.Buffer> expectedBuffers = new ArrayDeque<>();
private Queue<DataChannel.State> expectedStateChanges = new ArrayDeque<>();
private Queue<String> expectedRemoteDataChannelLabels = new ArrayDeque<>();
private int expectedOldStatsCallbacks = 0;
private int expectedNewStatsCallbacks = 0;
private int expectedOldStatsCallbacks;
private int expectedNewStatsCallbacks;
private List<StatsReport[]> gotStatsReports = new ArrayList<>();
private final HashSet<MediaStream> gotRemoteStreams = new HashSet<>();
private int expectedFirstAudioPacket = 0;
private int expectedFirstVideoPacket = 0;
private int expectedFirstAudioPacket;
private int expectedFirstVideoPacket;
public ObserverExpectations(String name) {
this.name = name;
@ -532,9 +532,9 @@ public class PeerConnectionTest {
}
private static class SdpObserverLatch implements SdpObserver {
private boolean success = false;
private @Nullable SessionDescription sdp = null;
private @Nullable String error = null;
private boolean success;
private @Nullable SessionDescription sdp;
private @Nullable String error;
private CountDownLatch latch = new CountDownLatch(1);
public SdpObserverLatch() {}
@ -1444,7 +1444,7 @@ public class PeerConnectionTest {
final VideoTrack videoTrack = factory.createVideoTrack("video", videoSource);
class FrameCounter implements VideoSink {
private int count = 0;
private int count;
public int getCount() {
return count;

View File

@ -85,8 +85,8 @@ class AndroidVideoDecoder implements VideoDecoder, VideoSink {
// caller and must be used to call initDecode, decode, and release.
private ThreadChecker decoderThreadChecker;
private volatile boolean running = false;
@Nullable private volatile Exception shutdownException = null;
private volatile boolean running;
@Nullable private volatile Exception shutdownException;
// Dimensions (width, height, stride, and sliceHeight) may be accessed by either the decode thread
// or the output thread. Accesses should be protected with this lock.
@ -107,7 +107,7 @@ class AndroidVideoDecoder implements VideoDecoder, VideoSink {
private final @Nullable EglBase.Context sharedContext;
// Valid and immutable while the decoder is running.
@Nullable private SurfaceTextureHelper surfaceTextureHelper;
@Nullable private Surface surface = null;
@Nullable private Surface surface;
private static class DecodedTextureMetadata {
final long presentationTimestampUs;
@ -128,7 +128,7 @@ class AndroidVideoDecoder implements VideoDecoder, VideoSink {
@Nullable private Callback callback;
// Valid and immutable while the decoder is running.
@Nullable private MediaCodecWrapper codec = null;
@Nullable private MediaCodecWrapper codec;
AndroidVideoDecoder(MediaCodecWrapperFactory mediaCodecWrapperFactory, String codecName,
VideoCodecType codecType, int colorFormat, @Nullable EglBase.Context sharedContext) {

View File

@ -12,8 +12,8 @@ package org.webrtc;
/** BitrateAdjuster that tracks bitrate and framerate but does not adjust them. */
class BaseBitrateAdjuster implements BitrateAdjuster {
protected int targetBitrateBps = 0;
protected int targetFps = 0;
protected int targetBitrateBps;
protected int targetFps;
@Override
public void setTargets(int targetBitrateBps, int targetFps) {

View File

@ -49,7 +49,7 @@ class Camera1Session implements CameraSession {
private final long constructionTimeNs; // Construction time of this class.
private SessionState state;
private boolean firstFrameReported = false;
private boolean firstFrameReported;
// TODO(titovartem) make correct fix during webrtc:9175
@SuppressWarnings("ByteBufferBackingArray")

View File

@ -71,7 +71,7 @@ class Camera2Session implements CameraSession {
// State
private SessionState state = SessionState.RUNNING;
private boolean firstFrameReported = false;
private boolean firstFrameReported;
// Used only for stats. Only used on the camera thread.
private final long constructionTimeNs; // Construction time of this class.

View File

@ -26,9 +26,9 @@ class DynamicBitrateAdjuster extends BaseBitrateAdjuster {
private static final double BITS_PER_BYTE = 8.0;
// How far the codec has deviated above (or below) the target bitrate (tracked in bytes).
private double deviationBytes = 0;
private double timeSinceLastAdjustmentMs = 0;
private int bitrateAdjustmentScaleExp = 0;
private double deviationBytes;
private double timeSinceLastAdjustmentMs;
private int bitrateAdjustmentScaleExp;
@Override
public void setTargets(int targetBitrateBps, int targetFps) {

View File

@ -104,15 +104,15 @@ class HardwareVideoEncoder implements VideoEncoder {
// --- Only accessed on the output thread.
// Contents of the last observed config frame output by the MediaCodec. Used by H.264.
@Nullable private ByteBuffer configBuffer = null;
@Nullable private ByteBuffer configBuffer;
private int adjustedBitrate;
// Whether the encoder is running. Volatile so that the output thread can watch this value and
// exit when the encoder stops.
private volatile boolean running = false;
private volatile boolean running;
// Any exception thrown during shutdown. The output thread releases the MediaCodec and uses this
// value to send exceptions thrown during release back to the encoder thread.
@Nullable private volatile Exception shutdownException = null;
@Nullable private volatile Exception shutdownException;
/**
* Creates a new HardwareVideoEncoder with the given codecName, codecType, colorFormat, key frame

View File

@ -28,7 +28,7 @@ class NativeLibrary {
}
private static Object lock = new Object();
private static boolean libraryLoaded = false;
private static boolean libraryLoaded;
/**
* Loads the native library. Clients should call PeerConnectionFactory.initialize. It will call

View File

@ -41,18 +41,18 @@ class WebRtcAudioEffects {
// Contains the available effect descriptors returned from the
// AudioEffect.getEffects() call. This result is cached to avoid doing the
// slow OS call multiple times.
private static @Nullable Descriptor[] cachedEffects = null;
private static @Nullable Descriptor[] cachedEffects;
// Contains the audio effect objects. Created in enable() and destroyed
// in release().
private @Nullable AcousticEchoCanceler aec = null;
private @Nullable NoiseSuppressor ns = null;
private @Nullable AcousticEchoCanceler aec;
private @Nullable NoiseSuppressor ns;
// Affects the final state given to the setEnabled() method on each effect.
// The default state is set to "disabled" but each effect can also be enabled
// by calling setAEC() and setNS().
private boolean shouldEnableAec = false;
private boolean shouldEnableNs = false;
private boolean shouldEnableAec;
private boolean shouldEnableNs;
// Returns true if all conditions for supporting HW Acoustic Echo Cancellation (AEC) are
// fulfilled.

View File

@ -63,10 +63,10 @@ class WebRtcAudioRecord {
private @Nullable ByteBuffer byteBuffer;
private @Nullable AudioRecord audioRecord = null;
private @Nullable AudioRecordThread audioThread = null;
private @Nullable AudioRecord audioRecord;
private @Nullable AudioRecordThread audioThread;
private volatile boolean microphoneMute = false;
private volatile boolean microphoneMute;
private byte[] emptyBytes;
private final @Nullable AudioRecordErrorCallback errorCallback;

View File

@ -21,11 +21,11 @@ import android.os.Process;
import java.lang.Thread;
import java.nio.ByteBuffer;
import javax.annotation.Nullable;
import org.webrtc.CalledByNative;
import org.webrtc.Logging;
import org.webrtc.ThreadUtils;
import org.webrtc.audio.JavaAudioDeviceModule.AudioTrackErrorCallback;
import org.webrtc.audio.JavaAudioDeviceModule.AudioTrackStartErrorCode;
import org.webrtc.CalledByNative;
class WebRtcAudioTrack {
private static final String TAG = "WebRtcAudioTrackExternal";
@ -69,13 +69,13 @@ class WebRtcAudioTrack {
private ByteBuffer byteBuffer;
private @Nullable AudioTrack audioTrack = null;
private @Nullable AudioTrackThread audioThread = null;
private @Nullable AudioTrack audioTrack;
private @Nullable AudioTrackThread audioThread;
private final VolumeLogger volumeLogger;
// Samples to be played are replaced by zeros if |speakerMute| is set to true.
// Can be used to ensure that the speaker is fully muted.
private volatile boolean speakerMute = false;
private volatile boolean speakerMute;
private byte[] emptyBytes;
private final @Nullable AudioTrackErrorCallback errorCallback;