From 6c966eaf17184c9b134b003ad062c5be69a2214f Mon Sep 17 00:00:00 2001 From: Paulina Hensman Date: Thu, 4 Oct 2018 16:54:13 +0200 Subject: [PATCH] Remove @SuppressLint(NewApi) and guard @TargetApi methods MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Also rename runningOnLollipopOrHigher() etc in WebRtcAudioUtils to runningOnApi21OrHigher() etc since mapping API numbers to names is error prone. Bug: webrtc:9818 Change-Id: I4a71de72e3891ca2b6fc2341db9131bb2db4cce7 Reviewed-on: https://webrtc-review.googlesource.com/c/103820 Reviewed-by: Sami Kalliomäki Reviewed-by: Henrik Andreassson Commit-Queue: Paulina Hensman Cr-Commit-Position: refs/heads/master@{#25009} --- .../voiceengine/WebRtcAudioEffects.java | 16 ++--- .../voiceengine/WebRtcAudioManager.java | 24 +++---- .../webrtc/voiceengine/WebRtcAudioRecord.java | 5 +- .../webrtc/voiceengine/WebRtcAudioTrack.java | 61 ++++++++---------- .../webrtc/voiceengine/WebRtcAudioUtils.java | 62 ++++--------------- .../org/webrtc/audio/WebRtcAudioEffects.java | 11 ++-- .../org/webrtc/audio/WebRtcAudioManager.java | 17 +++-- .../org/webrtc/audio/WebRtcAudioRecord.java | 5 +- .../org/webrtc/audio/WebRtcAudioTrack.java | 56 +++++++---------- .../org/webrtc/audio/WebRtcAudioUtils.java | 62 ++++--------------- 10 files changed, 110 insertions(+), 209 deletions(-) diff --git a/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioEffects.java b/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioEffects.java index 447927de7b..7d09bf865e 100644 --- a/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioEffects.java +++ b/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioEffects.java @@ -10,7 +10,6 @@ package org.webrtc.voiceengine; -import android.annotation.TargetApi; import android.media.audiofx.AcousticEchoCanceler; import android.media.audiofx.AudioEffect; import android.media.audiofx.AudioEffect.Descriptor; @@ -96,8 +95,9 @@ public class WebRtcAudioEffects { // Returns true if the platform AEC should be excluded based on its UUID. // AudioEffect.queryEffects() can throw IllegalStateException. - @TargetApi(18) private static boolean isAcousticEchoCancelerExcludedByUUID() { + if (Build.VERSION.SDK_INT < 18) + return false; for (Descriptor d : getAvailableEffects()) { if (d.type.equals(AudioEffect.EFFECT_TYPE_AEC) && d.uuid.equals(AOSP_ACOUSTIC_ECHO_CANCELER)) { @@ -109,8 +109,9 @@ public class WebRtcAudioEffects { // Returns true if the platform NS should be excluded based on its UUID. // AudioEffect.queryEffects() can throw IllegalStateException. - @TargetApi(18) private static boolean isNoiseSuppressorExcludedByUUID() { + if (Build.VERSION.SDK_INT < 18) + return false; for (Descriptor d : getAvailableEffects()) { if (d.type.equals(AudioEffect.EFFECT_TYPE_NS) && d.uuid.equals(AOSP_NOISE_SUPPRESSOR)) { return true; @@ -120,14 +121,16 @@ public class WebRtcAudioEffects { } // Returns true if the device supports Acoustic Echo Cancellation (AEC). - @TargetApi(18) private static boolean isAcousticEchoCancelerEffectAvailable() { + if (Build.VERSION.SDK_INT < 18) + return false; return isEffectTypeAvailable(AudioEffect.EFFECT_TYPE_AEC); } // Returns true if the device supports Noise Suppression (NS). - @TargetApi(18) private static boolean isNoiseSuppressorEffectAvailable() { + if (Build.VERSION.SDK_INT < 18) + return false; return isEffectTypeAvailable(AudioEffect.EFFECT_TYPE_NS); } @@ -274,9 +277,8 @@ public class WebRtcAudioEffects { // AudioEffect.Descriptor array that are actually not available on the device. // As an example: Samsung Galaxy S6 includes an AGC in the descriptor but // AutomaticGainControl.isAvailable() returns false. - @TargetApi(18) private boolean effectTypeIsVoIP(UUID type) { - if (!WebRtcAudioUtils.runningOnJellyBeanMR2OrHigher()) + if (Build.VERSION.SDK_INT < 18) return false; return (AudioEffect.EFFECT_TYPE_AEC.equals(type) && isAcousticEchoCancelerSupported()) diff --git a/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioManager.java b/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioManager.java index 7a76494054..c4d6176756 100644 --- a/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioManager.java +++ b/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioManager.java @@ -10,7 +10,6 @@ package org.webrtc.voiceengine; -import android.annotation.TargetApi; import android.content.Context; import android.content.pm.PackageManager; import android.media.AudioFormat; @@ -259,14 +258,13 @@ public class WebRtcAudioManager { // as well. The NDK doc states that: "As of API level 21, lower latency // audio input is supported on select devices. To take advantage of this // feature, first confirm that lower latency output is available". - return WebRtcAudioUtils.runningOnLollipopOrHigher() && isLowLatencyOutputSupported(); + return Build.VERSION.SDK_INT >= 21 && isLowLatencyOutputSupported(); } // Returns true if the device has professional audio level of functionality // and therefore supports the lowest possible round-trip latency. - @TargetApi(23) private boolean isProAudioSupported() { - return WebRtcAudioUtils.runningOnMarshmallowOrHigher() + return Build.VERSION.SDK_INT >= 23 && ContextUtils.getApplicationContext().getPackageManager().hasSystemFeature( PackageManager.FEATURE_AUDIO_PRO); } @@ -277,7 +275,7 @@ public class WebRtcAudioManager { if (blacklistDeviceForAAudioUsage) { Logging.w(TAG, "AAudio support is currently disabled on all devices!"); } - return !blacklistDeviceForAAudioUsage && WebRtcAudioUtils.runningOnOreoMR1OrHigher(); + return !blacklistDeviceForAAudioUsage && Build.VERSION.SDK_INT >= 27; } // Returns the native output sample rate for this device's output stream. @@ -297,28 +295,24 @@ public class WebRtcAudioManager { } // No overrides available. Deliver best possible estimate based on default // Android AudioManager APIs. - final int sampleRateHz; - if (WebRtcAudioUtils.runningOnJellyBeanMR1OrHigher()) { - sampleRateHz = getSampleRateOnJellyBeanMR10OrHigher(); - } else { - sampleRateHz = WebRtcAudioUtils.getDefaultSampleRateHz(); - } + final int sampleRateHz = getSampleRateForApiLevel(); Logging.d(TAG, "Sample rate is set to " + sampleRateHz + " Hz"); return sampleRateHz; } - @TargetApi(17) - private int getSampleRateOnJellyBeanMR10OrHigher() { + private int getSampleRateForApiLevel() { + if (Build.VERSION.SDK_INT < 17) { + return WebRtcAudioUtils.getDefaultSampleRateHz(); + } String sampleRateString = audioManager.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE); return (sampleRateString == null) ? WebRtcAudioUtils.getDefaultSampleRateHz() : Integer.parseInt(sampleRateString); } // Returns the native output buffer size for low-latency output streams. - @TargetApi(17) private int getLowLatencyOutputFramesPerBuffer() { assertTrue(isLowLatencyOutputSupported()); - if (!WebRtcAudioUtils.runningOnJellyBeanMR1OrHigher()) { + if (Build.VERSION.SDK_INT < 17) { return DEFAULT_FRAME_PER_BUFFER; } String framesPerBuffer = diff --git a/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioRecord.java b/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioRecord.java index 31f8022411..a4fc62bddf 100644 --- a/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioRecord.java +++ b/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioRecord.java @@ -10,10 +10,10 @@ package org.webrtc.voiceengine; -import android.annotation.TargetApi; import android.media.AudioFormat; import android.media.AudioRecord; import android.media.MediaRecorder.AudioSource; +import android.os.Build; import android.os.Process; import java.lang.System; import java.nio.ByteBuffer; @@ -332,9 +332,8 @@ public class WebRtcAudioRecord { + "sample rate: " + audioRecord.getSampleRate()); } - @TargetApi(23) private void logMainParametersExtended() { - if (WebRtcAudioUtils.runningOnMarshmallowOrHigher()) { + if (Build.VERSION.SDK_INT >= 23) { Logging.d(TAG, "AudioRecord: " // The frame count of the native AudioRecord buffer. + "buffer size in frames: " + audioRecord.getBufferSizeInFrames()); diff --git a/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java b/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java index 196e8f649f..8b287ece40 100644 --- a/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java +++ b/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java @@ -10,13 +10,13 @@ package org.webrtc.voiceengine; -import android.annotation.SuppressLint; import android.annotation.TargetApi; import android.content.Context; import android.media.AudioAttributes; import android.media.AudioFormat; import android.media.AudioManager; import android.media.AudioTrack; +import android.os.Build; import android.os.Process; import java.lang.Thread; import java.nio.ByteBuffer; @@ -61,19 +61,14 @@ public class WebRtcAudioTrack { } private static int getDefaultUsageAttribute() { - if (WebRtcAudioUtils.runningOnLollipopOrHigher()) { - return getDefaultUsageAttributeOnLollipopOrHigher(); + if (Build.VERSION.SDK_INT >= 21) { + return AudioAttributes.USAGE_VOICE_COMMUNICATION; } else { - // Not used on SDKs lower than L. + // Not used on SDKs lower than 21. return 0; } } - @TargetApi(21) - private static int getDefaultUsageAttributeOnLollipopOrHigher() { - return AudioAttributes.USAGE_VOICE_COMMUNICATION; - } - private final long nativeAudioTrack; private final AudioManager audioManager; private final ThreadUtils.ThreadChecker threadChecker = new ThreadUtils.ThreadChecker(); @@ -159,12 +154,7 @@ public class WebRtcAudioTrack { byteBuffer.put(emptyBytes); byteBuffer.position(0); } - int bytesWritten = 0; - if (WebRtcAudioUtils.runningOnLollipopOrHigher()) { - bytesWritten = writeOnLollipop(audioTrack, byteBuffer, sizeInBytes); - } else { - bytesWritten = writePreLollipop(audioTrack, byteBuffer, sizeInBytes); - } + int bytesWritten = writeBytes(audioTrack, byteBuffer, sizeInBytes); if (bytesWritten != sizeInBytes) { Logging.e(TAG, "AudioTrack.write played invalid number of bytes: " + bytesWritten); // If a write() returns a negative value, an error has occurred. @@ -198,13 +188,12 @@ public class WebRtcAudioTrack { } } - @TargetApi(21) - private int writeOnLollipop(AudioTrack audioTrack, ByteBuffer byteBuffer, int sizeInBytes) { - return audioTrack.write(byteBuffer, sizeInBytes, AudioTrack.WRITE_BLOCKING); - } - - private int writePreLollipop(AudioTrack audioTrack, ByteBuffer byteBuffer, int sizeInBytes) { - return audioTrack.write(byteBuffer.array(), byteBuffer.arrayOffset(), sizeInBytes); + private int writeBytes(AudioTrack audioTrack, ByteBuffer byteBuffer, int sizeInBytes) { + if (Build.VERSION.SDK_INT >= 21) { + return audioTrack.write(byteBuffer, sizeInBytes, AudioTrack.WRITE_BLOCKING); + } else { + return audioTrack.write(byteBuffer.array(), byteBuffer.arrayOffset(), sizeInBytes); + } } // Stops the inner thread loop which results in calling AudioTrack.stop(). @@ -266,7 +255,7 @@ public class WebRtcAudioTrack { // Create an AudioTrack object and initialize its associated audio buffer. // The size of this buffer determines how long an AudioTrack can play // before running out of data. - if (WebRtcAudioUtils.runningOnLollipopOrHigher()) { + if (Build.VERSION.SDK_INT >= 21) { // If we are on API level 21 or higher, it is possible to use a special AudioTrack // constructor that uses AudioAttributes and AudioFormat as input. It allows us to // supersede the notion of stream types for defining the behavior of audio playback, @@ -370,11 +359,8 @@ public class WebRtcAudioTrack { return true; } - // TODO(bugs.webrtc.org/8580): Call requires API level 21 (current min is 16): - // `android.media.AudioManager#isVolumeFixed`: NewApi [warning] - @SuppressLint("NewApi") private boolean isVolumeFixed() { - if (!WebRtcAudioUtils.runningOnLollipopOrHigher()) + if (Build.VERSION.SDK_INT < 21) return false; return audioManager.isVolumeFixed(); } @@ -437,29 +423,36 @@ public class WebRtcAudioTrack { AudioFormat.ENCODING_PCM_16BIT, bufferSizeInBytes, AudioTrack.MODE_STREAM); } - @TargetApi(24) - private void logMainParametersExtended() { - if (WebRtcAudioUtils.runningOnMarshmallowOrHigher()) { + private void logBufferSizeInFrames() { + if (Build.VERSION.SDK_INT >= 23) { Logging.d(TAG, "AudioTrack: " // The effective size of the AudioTrack buffer that the app writes to. + "buffer size in frames: " + audioTrack.getBufferSizeInFrames()); } - if (WebRtcAudioUtils.runningOnNougatOrHigher()) { - Logging.d(TAG, "AudioTrack: " + } + + private void logBufferCapacityInFrames() { + if (Build.VERSION.SDK_INT >= 24) { + Logging.d(TAG, + "AudioTrack: " // Maximum size of the AudioTrack buffer in frames. + "buffer capacity in frames: " + audioTrack.getBufferCapacityInFrames()); } } + private void logMainParametersExtended() { + logBufferSizeInFrames(); + logBufferCapacityInFrames(); + } + // Prints the number of underrun occurrences in the application-level write // buffer since the AudioTrack was created. An underrun occurs if the app does // not write audio data quickly enough, causing the buffer to underflow and a // potential audio glitch. // TODO(henrika): keep track of this value in the field and possibly add new // UMA stat if needed. - @TargetApi(24) private void logUnderrunCount() { - if (WebRtcAudioUtils.runningOnNougatOrHigher()) { + if (Build.VERSION.SDK_INT >= 24) { Logging.d(TAG, "underrun count: " + audioTrack.getUnderrunCount()); } } diff --git a/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioUtils.java b/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioUtils.java index f0d7fd6bc4..dba5dcfa6a 100644 --- a/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioUtils.java +++ b/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioUtils.java @@ -15,8 +15,6 @@ import static android.media.AudioManager.MODE_IN_COMMUNICATION; import static android.media.AudioManager.MODE_NORMAL; import static android.media.AudioManager.MODE_RINGTONE; -import android.annotation.SuppressLint; -import android.annotation.TargetApi; import android.content.Context; import android.content.pm.PackageManager; import android.media.AudioDeviceInfo; @@ -166,41 +164,6 @@ public final class WebRtcAudioUtils { return Arrays.asList(WebRtcAudioUtils.BLACKLISTED_NS_MODELS); } - public static boolean runningOnJellyBeanMR1OrHigher() { - // November 2012: Android 4.2. API Level 17. - return Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1; - } - - public static boolean runningOnJellyBeanMR2OrHigher() { - // July 24, 2013: Android 4.3. API Level 18. - return Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2; - } - - public static boolean runningOnLollipopOrHigher() { - // API Level 21. - return Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP; - } - - public static boolean runningOnMarshmallowOrHigher() { - // API Level 23. - return Build.VERSION.SDK_INT >= Build.VERSION_CODES.M; - } - - public static boolean runningOnNougatOrHigher() { - // API Level 24. - return Build.VERSION.SDK_INT >= Build.VERSION_CODES.N; - } - - public static boolean runningOnOreoOrHigher() { - // API Level 26. - return Build.VERSION.SDK_INT >= Build.VERSION_CODES.O; - } - - public static boolean runningOnOreoMR1OrHigher() { - // API Level 27. - return Build.VERSION.SDK_INT >= Build.VERSION_CODES.O_MR1; - } - // Helper method for building a string of thread information. public static String getThreadInfo() { return "@[name=" + Thread.currentThread().getName() + ", id=" + Thread.currentThread().getId() @@ -255,9 +218,13 @@ public final class WebRtcAudioUtils { + "BT SCO: " + audioManager.isBluetoothScoOn()); } - // TODO(bugs.webrtc.org/8580): Call requires API level 21 (current min is 16): - // `android.media.AudioManager#isVolumeFixed`: NewApi [warning] - @SuppressLint("NewApi") + private static boolean isVolumeFixed(AudioManager audioManager) { + if (Build.VERSION.SDK_INT < 21) { + return false; + } + return audioManager.isVolumeFixed(); + } + // Adds volume information for all possible stream types. private static void logAudioStateVolume(String tag, AudioManager audioManager) { final int[] streams = { @@ -269,12 +236,9 @@ public final class WebRtcAudioUtils { AudioManager.STREAM_SYSTEM }; Logging.d(tag, "Audio State: "); - boolean fixedVolume = false; - if (WebRtcAudioUtils.runningOnLollipopOrHigher()) { - fixedVolume = audioManager.isVolumeFixed(); - // Some devices may not have volume controls and might use a fixed volume. - Logging.d(tag, " fixed volume=" + fixedVolume); - } + // Some devices may not have volume controls and might use a fixed volume. + boolean fixedVolume = isVolumeFixed(audioManager); + Logging.d(tag, " fixed volume=" + fixedVolume); if (!fixedVolume) { for (int stream : streams) { StringBuilder info = new StringBuilder(); @@ -287,17 +251,15 @@ public final class WebRtcAudioUtils { } } - @TargetApi(23) private static void logIsStreamMute( String tag, AudioManager audioManager, int stream, StringBuilder info) { - if (WebRtcAudioUtils.runningOnMarshmallowOrHigher()) { + if (Build.VERSION.SDK_INT >= 23) { info.append(", muted=").append(audioManager.isStreamMute(stream)); } } - @TargetApi(23) private static void logAudioDeviceInfo(String tag, AudioManager audioManager) { - if (!WebRtcAudioUtils.runningOnMarshmallowOrHigher()) { + if (Build.VERSION.SDK_INT < 23) { return; } final AudioDeviceInfo[] devices = diff --git a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioEffects.java b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioEffects.java index 116cf53ac6..51048eab36 100644 --- a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioEffects.java +++ b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioEffects.java @@ -10,7 +10,6 @@ package org.webrtc.audio; -import android.annotation.TargetApi; import android.media.audiofx.AcousticEchoCanceler; import android.media.audiofx.AudioEffect; import android.media.audiofx.AudioEffect.Descriptor; @@ -56,14 +55,16 @@ class WebRtcAudioEffects { // Returns true if all conditions for supporting HW Acoustic Echo Cancellation (AEC) are // fulfilled. - @TargetApi(18) public static boolean isAcousticEchoCancelerSupported() { + if (Build.VERSION.SDK_INT < 18) + return false; return isEffectTypeAvailable(AudioEffect.EFFECT_TYPE_AEC, AOSP_ACOUSTIC_ECHO_CANCELER); } // Returns true if all conditions for supporting HW Noise Suppression (NS) are fulfilled. - @TargetApi(18) public static boolean isNoiseSuppressorSupported() { + if (Build.VERSION.SDK_INT < 18) + return false; return isEffectTypeAvailable(AudioEffect.EFFECT_TYPE_NS, AOSP_NOISE_SUPPRESSOR); } @@ -188,9 +189,8 @@ class WebRtcAudioEffects { // AudioEffect.Descriptor array that are actually not available on the device. // As an example: Samsung Galaxy S6 includes an AGC in the descriptor but // AutomaticGainControl.isAvailable() returns false. - @TargetApi(18) private boolean effectTypeIsVoIP(UUID type) { - if (!WebRtcAudioUtils.runningOnJellyBeanMR2OrHigher()) + if (Build.VERSION.SDK_INT < 18) return false; return (AudioEffect.EFFECT_TYPE_AEC.equals(type) && isAcousticEchoCancelerSupported()) @@ -221,7 +221,6 @@ class WebRtcAudioEffects { // Returns true if an effect of the specified type is available. Functionally // equivalent to (NoiseSuppressor|AutomaticGainControl|...).isAvailable(), but // faster as it avoids the expensive OS call to enumerate effects. - @TargetApi(18) private static boolean isEffectTypeAvailable(UUID effectType, UUID blackListedUuid) { Descriptor[] effects = getAvailableEffects(); if (effects == null) { diff --git a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioManager.java b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioManager.java index 5b810084cc..f016dad502 100644 --- a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioManager.java +++ b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioManager.java @@ -10,13 +10,13 @@ package org.webrtc.audio; -import android.annotation.TargetApi; import android.content.Context; import android.content.pm.PackageManager; import android.media.AudioFormat; import android.media.AudioManager; import android.media.AudioRecord; import android.media.AudioTrack; +import android.os.Build; import org.webrtc.Logging; import org.webrtc.CalledByNative; @@ -64,7 +64,7 @@ class WebRtcAudioManager { // as well. The NDK doc states that: "As of API level 21, lower latency // audio input is supported on select devices. To take advantage of this // feature, first confirm that lower latency output is available". - return WebRtcAudioUtils.runningOnLollipopOrHigher() && isLowLatencyOutputSupported(context); + return Build.VERSION.SDK_INT >= 21 && isLowLatencyOutputSupported(context); } /** @@ -79,23 +79,22 @@ class WebRtcAudioManager { return 8000; } // Deliver best possible estimate based on default Android AudioManager APIs. - final int sampleRateHz = WebRtcAudioUtils.runningOnJellyBeanMR1OrHigher() - ? getSampleRateOnJellyBeanMR10OrHigher(audioManager) - : DEFAULT_SAMPLE_RATE_HZ; + final int sampleRateHz = getSampleRateForApiLevel(audioManager); Logging.d(TAG, "Sample rate is set to " + sampleRateHz + " Hz"); return sampleRateHz; } - @TargetApi(17) - private static int getSampleRateOnJellyBeanMR10OrHigher(AudioManager audioManager) { + private static int getSampleRateForApiLevel(AudioManager audioManager) { + if (Build.VERSION.SDK_INT < 17) { + return DEFAULT_SAMPLE_RATE_HZ; + } String sampleRateString = audioManager.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE); return (sampleRateString == null) ? DEFAULT_SAMPLE_RATE_HZ : Integer.parseInt(sampleRateString); } // Returns the native output buffer size for low-latency output streams. - @TargetApi(17) private static int getLowLatencyFramesPerBuffer(AudioManager audioManager) { - if (!WebRtcAudioUtils.runningOnJellyBeanMR1OrHigher()) { + if (Build.VERSION.SDK_INT < 17) { return DEFAULT_FRAME_PER_BUFFER; } String framesPerBuffer = diff --git a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioRecord.java b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioRecord.java index d2a17859ee..864a2b1b63 100644 --- a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioRecord.java +++ b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioRecord.java @@ -10,12 +10,12 @@ package org.webrtc.audio; -import android.annotation.TargetApi; import android.content.Context; import android.media.AudioFormat; import android.media.AudioManager; import android.media.AudioRecord; import android.media.MediaRecorder.AudioSource; +import android.os.Build; import android.os.Process; import java.lang.System; import java.nio.ByteBuffer; @@ -299,9 +299,8 @@ class WebRtcAudioRecord { + "sample rate: " + audioRecord.getSampleRate()); } - @TargetApi(23) private void logMainParametersExtended() { - if (WebRtcAudioUtils.runningOnMarshmallowOrHigher()) { + if (Build.VERSION.SDK_INT >= 23) { Logging.d(TAG, "AudioRecord: " // The frame count of the native AudioRecord buffer. diff --git a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java index 7912de71e5..1318a1b190 100644 --- a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java +++ b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java @@ -10,13 +10,13 @@ package org.webrtc.audio; -import android.annotation.SuppressLint; import android.annotation.TargetApi; import android.content.Context; import android.media.AudioAttributes; import android.media.AudioFormat; import android.media.AudioManager; import android.media.AudioTrack; +import android.os.Build; import android.os.Process; import java.lang.Thread; import java.nio.ByteBuffer; @@ -49,19 +49,14 @@ class WebRtcAudioTrack { private static final int DEFAULT_USAGE = getDefaultUsageAttribute(); private static int getDefaultUsageAttribute() { - if (WebRtcAudioUtils.runningOnLollipopOrHigher()) { - return getDefaultUsageAttributeOnLollipopOrHigher(); + if (Build.VERSION.SDK_INT >= 21) { + return AudioAttributes.USAGE_VOICE_COMMUNICATION; } else { // Not used on SDKs lower than L. return 0; } } - @TargetApi(21) - private static int getDefaultUsageAttributeOnLollipopOrHigher() { - return AudioAttributes.USAGE_VOICE_COMMUNICATION; - } - private long nativeAudioTrack; private final Context context; private final AudioManager audioManager; @@ -117,12 +112,7 @@ class WebRtcAudioTrack { byteBuffer.put(emptyBytes); byteBuffer.position(0); } - int bytesWritten = 0; - if (WebRtcAudioUtils.runningOnLollipopOrHigher()) { - bytesWritten = writeOnLollipop(audioTrack, byteBuffer, sizeInBytes); - } else { - bytesWritten = writePreLollipop(audioTrack, byteBuffer, sizeInBytes); - } + int bytesWritten = writeBytes(audioTrack, byteBuffer, sizeInBytes); if (bytesWritten != sizeInBytes) { Logging.e(TAG, "AudioTrack.write played invalid number of bytes: " + bytesWritten); // If a write() returns a negative value, an error has occurred. @@ -156,13 +146,12 @@ class WebRtcAudioTrack { } } - @TargetApi(21) - private int writeOnLollipop(AudioTrack audioTrack, ByteBuffer byteBuffer, int sizeInBytes) { - return audioTrack.write(byteBuffer, sizeInBytes, AudioTrack.WRITE_BLOCKING); - } - - private int writePreLollipop(AudioTrack audioTrack, ByteBuffer byteBuffer, int sizeInBytes) { - return audioTrack.write(byteBuffer.array(), byteBuffer.arrayOffset(), sizeInBytes); + private int writeBytes(AudioTrack audioTrack, ByteBuffer byteBuffer, int sizeInBytes) { + if (Build.VERSION.SDK_INT >= 21) { + return audioTrack.write(byteBuffer, sizeInBytes, AudioTrack.WRITE_BLOCKING); + } else { + return audioTrack.write(byteBuffer.array(), byteBuffer.arrayOffset(), sizeInBytes); + } } // Stops the inner thread loop which results in calling AudioTrack.stop(). @@ -233,7 +222,7 @@ class WebRtcAudioTrack { // Create an AudioTrack object and initialize its associated audio buffer. // The size of this buffer determines how long an AudioTrack can play // before running out of data. - if (WebRtcAudioUtils.runningOnLollipopOrHigher()) { + if (Build.VERSION.SDK_INT >= 21) { // If we are on API level 21 or higher, it is possible to use a special AudioTrack // constructor that uses AudioAttributes and AudioFormat as input. It allows us to // supersede the notion of stream types for defining the behavior of audio playback, @@ -339,11 +328,8 @@ class WebRtcAudioTrack { return true; } - // TODO(bugs.webrtc.org/8580): Call requires API level 21 (current min is 16): - // `android.media.AudioManager#isVolumeFixed`: NewApi [warning] - @SuppressLint("NewApi") private boolean isVolumeFixed() { - if (!WebRtcAudioUtils.runningOnLollipopOrHigher()) + if (Build.VERSION.SDK_INT < 21) return false; return audioManager.isVolumeFixed(); } @@ -402,15 +388,17 @@ class WebRtcAudioTrack { AudioFormat.ENCODING_PCM_16BIT, bufferSizeInBytes, AudioTrack.MODE_STREAM); } - @TargetApi(24) - private void logMainParametersExtended() { - if (WebRtcAudioUtils.runningOnMarshmallowOrHigher()) { + private void logBufferSizeInFrames() { + if (Build.VERSION.SDK_INT >= 23) { Logging.d(TAG, "AudioTrack: " // The effective size of the AudioTrack buffer that the app writes to. + "buffer size in frames: " + audioTrack.getBufferSizeInFrames()); } - if (WebRtcAudioUtils.runningOnNougatOrHigher()) { + } + + private void logBufferCapacityInFrames() { + if (Build.VERSION.SDK_INT >= 24) { Logging.d(TAG, "AudioTrack: " // Maximum size of the AudioTrack buffer in frames. @@ -418,15 +406,19 @@ class WebRtcAudioTrack { } } + private void logMainParametersExtended() { + logBufferSizeInFrames(); + logBufferCapacityInFrames(); + } + // Prints the number of underrun occurrences in the application-level write // buffer since the AudioTrack was created. An underrun occurs if the app does // not write audio data quickly enough, causing the buffer to underflow and a // potential audio glitch. // TODO(henrika): keep track of this value in the field and possibly add new // UMA stat if needed. - @TargetApi(24) private void logUnderrunCount() { - if (WebRtcAudioUtils.runningOnNougatOrHigher()) { + if (Build.VERSION.SDK_INT >= 24) { Logging.d(TAG, "underrun count: " + audioTrack.getUnderrunCount()); } } diff --git a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioUtils.java b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioUtils.java index 052e26c5cb..b277fa0279 100644 --- a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioUtils.java +++ b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioUtils.java @@ -15,8 +15,6 @@ import static android.media.AudioManager.MODE_IN_COMMUNICATION; import static android.media.AudioManager.MODE_NORMAL; import static android.media.AudioManager.MODE_RINGTONE; -import android.annotation.SuppressLint; -import android.annotation.TargetApi; import android.content.Context; import android.content.pm.PackageManager; import android.media.AudioDeviceInfo; @@ -34,41 +32,6 @@ import org.webrtc.Logging; final class WebRtcAudioUtils { private static final String TAG = "WebRtcAudioUtilsExternal"; - public static boolean runningOnJellyBeanMR1OrHigher() { - // November 2012: Android 4.2. API Level 17. - return Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1; - } - - public static boolean runningOnJellyBeanMR2OrHigher() { - // July 24, 2013: Android 4.3. API Level 18. - return Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2; - } - - public static boolean runningOnLollipopOrHigher() { - // API Level 21. - return Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP; - } - - public static boolean runningOnMarshmallowOrHigher() { - // API Level 23. - return Build.VERSION.SDK_INT >= Build.VERSION_CODES.M; - } - - public static boolean runningOnNougatOrHigher() { - // API Level 24. - return Build.VERSION.SDK_INT >= Build.VERSION_CODES.N; - } - - public static boolean runningOnOreoOrHigher() { - // API Level 26. - return Build.VERSION.SDK_INT >= Build.VERSION_CODES.O; - } - - public static boolean runningOnOreoMR1OrHigher() { - // API Level 27. - return Build.VERSION.SDK_INT >= Build.VERSION_CODES.O_MR1; - } - // Helper method for building a string of thread information. public static String getThreadInfo() { return "@[name=" + Thread.currentThread().getName() + ", id=" + Thread.currentThread().getId() @@ -116,21 +79,22 @@ final class WebRtcAudioUtils { + "BT SCO: " + audioManager.isBluetoothScoOn()); } - // TODO(bugs.webrtc.org/8580): Call requires API level 21 (current min is 16): - // `android.media.AudioManager#isVolumeFixed`: NewApi [warning] - @SuppressLint("NewApi") + private static boolean isVolumeFixed(AudioManager audioManager) { + if (Build.VERSION.SDK_INT < 21) { + return false; + } + return audioManager.isVolumeFixed(); + } + // Adds volume information for all possible stream types. private static void logAudioStateVolume(String tag, AudioManager audioManager) { final int[] streams = {AudioManager.STREAM_VOICE_CALL, AudioManager.STREAM_MUSIC, AudioManager.STREAM_RING, AudioManager.STREAM_ALARM, AudioManager.STREAM_NOTIFICATION, AudioManager.STREAM_SYSTEM}; Logging.d(tag, "Audio State: "); - boolean fixedVolume = false; - if (WebRtcAudioUtils.runningOnLollipopOrHigher()) { - fixedVolume = audioManager.isVolumeFixed(); - // Some devices may not have volume controls and might use a fixed volume. - Logging.d(tag, " fixed volume=" + fixedVolume); - } + // Some devices may not have volume controls and might use a fixed volume. + boolean fixedVolume = isVolumeFixed(audioManager); + Logging.d(tag, " fixed volume=" + fixedVolume); if (!fixedVolume) { for (int stream : streams) { StringBuilder info = new StringBuilder(); @@ -143,17 +107,15 @@ final class WebRtcAudioUtils { } } - @TargetApi(23) private static void logIsStreamMute( String tag, AudioManager audioManager, int stream, StringBuilder info) { - if (WebRtcAudioUtils.runningOnMarshmallowOrHigher()) { + if (Build.VERSION.SDK_INT >= 23) { info.append(", muted=").append(audioManager.isStreamMute(stream)); } } - @TargetApi(23) private static void logAudioDeviceInfo(String tag, AudioManager audioManager) { - if (!WebRtcAudioUtils.runningOnMarshmallowOrHigher()) { + if (Build.VERSION.SDK_INT < 23) { return; } final AudioDeviceInfo[] devices = audioManager.getDevices(AudioManager.GET_DEVICES_ALL);