Remove @SuppressLint(NewApi) and guard @TargetApi methods

Also rename runningOnLollipopOrHigher() etc in WebRtcAudioUtils
to runningOnApi21OrHigher() etc since mapping API numbers to
names is error prone.

Bug: webrtc:9818
Change-Id: I4a71de72e3891ca2b6fc2341db9131bb2db4cce7
Reviewed-on: https://webrtc-review.googlesource.com/c/103820
Reviewed-by: Sami Kalliomäki <sakal@webrtc.org>
Reviewed-by: Henrik Andreassson <henrika@webrtc.org>
Commit-Queue: Paulina Hensman <phensman@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#25009}
This commit is contained in:
Paulina Hensman
2018-10-04 16:54:13 +02:00
committed by Commit Bot
parent 97c65b76c2
commit 6c966eaf17
10 changed files with 110 additions and 209 deletions

View File

@ -10,7 +10,6 @@
package org.webrtc.audio;
import android.annotation.TargetApi;
import android.media.audiofx.AcousticEchoCanceler;
import android.media.audiofx.AudioEffect;
import android.media.audiofx.AudioEffect.Descriptor;
@ -56,14 +55,16 @@ class WebRtcAudioEffects {
// Returns true if all conditions for supporting HW Acoustic Echo Cancellation (AEC) are
// fulfilled.
@TargetApi(18)
public static boolean isAcousticEchoCancelerSupported() {
if (Build.VERSION.SDK_INT < 18)
return false;
return isEffectTypeAvailable(AudioEffect.EFFECT_TYPE_AEC, AOSP_ACOUSTIC_ECHO_CANCELER);
}
// Returns true if all conditions for supporting HW Noise Suppression (NS) are fulfilled.
@TargetApi(18)
public static boolean isNoiseSuppressorSupported() {
if (Build.VERSION.SDK_INT < 18)
return false;
return isEffectTypeAvailable(AudioEffect.EFFECT_TYPE_NS, AOSP_NOISE_SUPPRESSOR);
}
@ -188,9 +189,8 @@ class WebRtcAudioEffects {
// AudioEffect.Descriptor array that are actually not available on the device.
// As an example: Samsung Galaxy S6 includes an AGC in the descriptor but
// AutomaticGainControl.isAvailable() returns false.
@TargetApi(18)
private boolean effectTypeIsVoIP(UUID type) {
if (!WebRtcAudioUtils.runningOnJellyBeanMR2OrHigher())
if (Build.VERSION.SDK_INT < 18)
return false;
return (AudioEffect.EFFECT_TYPE_AEC.equals(type) && isAcousticEchoCancelerSupported())
@ -221,7 +221,6 @@ class WebRtcAudioEffects {
// Returns true if an effect of the specified type is available. Functionally
// equivalent to (NoiseSuppressor|AutomaticGainControl|...).isAvailable(), but
// faster as it avoids the expensive OS call to enumerate effects.
@TargetApi(18)
private static boolean isEffectTypeAvailable(UUID effectType, UUID blackListedUuid) {
Descriptor[] effects = getAvailableEffects();
if (effects == null) {

View File

@ -10,13 +10,13 @@
package org.webrtc.audio;
import android.annotation.TargetApi;
import android.content.Context;
import android.content.pm.PackageManager;
import android.media.AudioFormat;
import android.media.AudioManager;
import android.media.AudioRecord;
import android.media.AudioTrack;
import android.os.Build;
import org.webrtc.Logging;
import org.webrtc.CalledByNative;
@ -64,7 +64,7 @@ class WebRtcAudioManager {
// as well. The NDK doc states that: "As of API level 21, lower latency
// audio input is supported on select devices. To take advantage of this
// feature, first confirm that lower latency output is available".
return WebRtcAudioUtils.runningOnLollipopOrHigher() && isLowLatencyOutputSupported(context);
return Build.VERSION.SDK_INT >= 21 && isLowLatencyOutputSupported(context);
}
/**
@ -79,23 +79,22 @@ class WebRtcAudioManager {
return 8000;
}
// Deliver best possible estimate based on default Android AudioManager APIs.
final int sampleRateHz = WebRtcAudioUtils.runningOnJellyBeanMR1OrHigher()
? getSampleRateOnJellyBeanMR10OrHigher(audioManager)
: DEFAULT_SAMPLE_RATE_HZ;
final int sampleRateHz = getSampleRateForApiLevel(audioManager);
Logging.d(TAG, "Sample rate is set to " + sampleRateHz + " Hz");
return sampleRateHz;
}
@TargetApi(17)
private static int getSampleRateOnJellyBeanMR10OrHigher(AudioManager audioManager) {
private static int getSampleRateForApiLevel(AudioManager audioManager) {
if (Build.VERSION.SDK_INT < 17) {
return DEFAULT_SAMPLE_RATE_HZ;
}
String sampleRateString = audioManager.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE);
return (sampleRateString == null) ? DEFAULT_SAMPLE_RATE_HZ : Integer.parseInt(sampleRateString);
}
// Returns the native output buffer size for low-latency output streams.
@TargetApi(17)
private static int getLowLatencyFramesPerBuffer(AudioManager audioManager) {
if (!WebRtcAudioUtils.runningOnJellyBeanMR1OrHigher()) {
if (Build.VERSION.SDK_INT < 17) {
return DEFAULT_FRAME_PER_BUFFER;
}
String framesPerBuffer =

View File

@ -10,12 +10,12 @@
package org.webrtc.audio;
import android.annotation.TargetApi;
import android.content.Context;
import android.media.AudioFormat;
import android.media.AudioManager;
import android.media.AudioRecord;
import android.media.MediaRecorder.AudioSource;
import android.os.Build;
import android.os.Process;
import java.lang.System;
import java.nio.ByteBuffer;
@ -299,9 +299,8 @@ class WebRtcAudioRecord {
+ "sample rate: " + audioRecord.getSampleRate());
}
@TargetApi(23)
private void logMainParametersExtended() {
if (WebRtcAudioUtils.runningOnMarshmallowOrHigher()) {
if (Build.VERSION.SDK_INT >= 23) {
Logging.d(TAG,
"AudioRecord: "
// The frame count of the native AudioRecord buffer.

View File

@ -10,13 +10,13 @@
package org.webrtc.audio;
import android.annotation.SuppressLint;
import android.annotation.TargetApi;
import android.content.Context;
import android.media.AudioAttributes;
import android.media.AudioFormat;
import android.media.AudioManager;
import android.media.AudioTrack;
import android.os.Build;
import android.os.Process;
import java.lang.Thread;
import java.nio.ByteBuffer;
@ -49,19 +49,14 @@ class WebRtcAudioTrack {
private static final int DEFAULT_USAGE = getDefaultUsageAttribute();
private static int getDefaultUsageAttribute() {
if (WebRtcAudioUtils.runningOnLollipopOrHigher()) {
return getDefaultUsageAttributeOnLollipopOrHigher();
if (Build.VERSION.SDK_INT >= 21) {
return AudioAttributes.USAGE_VOICE_COMMUNICATION;
} else {
// Not used on SDKs lower than L.
return 0;
}
}
@TargetApi(21)
private static int getDefaultUsageAttributeOnLollipopOrHigher() {
return AudioAttributes.USAGE_VOICE_COMMUNICATION;
}
private long nativeAudioTrack;
private final Context context;
private final AudioManager audioManager;
@ -117,12 +112,7 @@ class WebRtcAudioTrack {
byteBuffer.put(emptyBytes);
byteBuffer.position(0);
}
int bytesWritten = 0;
if (WebRtcAudioUtils.runningOnLollipopOrHigher()) {
bytesWritten = writeOnLollipop(audioTrack, byteBuffer, sizeInBytes);
} else {
bytesWritten = writePreLollipop(audioTrack, byteBuffer, sizeInBytes);
}
int bytesWritten = writeBytes(audioTrack, byteBuffer, sizeInBytes);
if (bytesWritten != sizeInBytes) {
Logging.e(TAG, "AudioTrack.write played invalid number of bytes: " + bytesWritten);
// If a write() returns a negative value, an error has occurred.
@ -156,13 +146,12 @@ class WebRtcAudioTrack {
}
}
@TargetApi(21)
private int writeOnLollipop(AudioTrack audioTrack, ByteBuffer byteBuffer, int sizeInBytes) {
return audioTrack.write(byteBuffer, sizeInBytes, AudioTrack.WRITE_BLOCKING);
}
private int writePreLollipop(AudioTrack audioTrack, ByteBuffer byteBuffer, int sizeInBytes) {
return audioTrack.write(byteBuffer.array(), byteBuffer.arrayOffset(), sizeInBytes);
private int writeBytes(AudioTrack audioTrack, ByteBuffer byteBuffer, int sizeInBytes) {
if (Build.VERSION.SDK_INT >= 21) {
return audioTrack.write(byteBuffer, sizeInBytes, AudioTrack.WRITE_BLOCKING);
} else {
return audioTrack.write(byteBuffer.array(), byteBuffer.arrayOffset(), sizeInBytes);
}
}
// Stops the inner thread loop which results in calling AudioTrack.stop().
@ -233,7 +222,7 @@ class WebRtcAudioTrack {
// Create an AudioTrack object and initialize its associated audio buffer.
// The size of this buffer determines how long an AudioTrack can play
// before running out of data.
if (WebRtcAudioUtils.runningOnLollipopOrHigher()) {
if (Build.VERSION.SDK_INT >= 21) {
// If we are on API level 21 or higher, it is possible to use a special AudioTrack
// constructor that uses AudioAttributes and AudioFormat as input. It allows us to
// supersede the notion of stream types for defining the behavior of audio playback,
@ -339,11 +328,8 @@ class WebRtcAudioTrack {
return true;
}
// TODO(bugs.webrtc.org/8580): Call requires API level 21 (current min is 16):
// `android.media.AudioManager#isVolumeFixed`: NewApi [warning]
@SuppressLint("NewApi")
private boolean isVolumeFixed() {
if (!WebRtcAudioUtils.runningOnLollipopOrHigher())
if (Build.VERSION.SDK_INT < 21)
return false;
return audioManager.isVolumeFixed();
}
@ -402,15 +388,17 @@ class WebRtcAudioTrack {
AudioFormat.ENCODING_PCM_16BIT, bufferSizeInBytes, AudioTrack.MODE_STREAM);
}
@TargetApi(24)
private void logMainParametersExtended() {
if (WebRtcAudioUtils.runningOnMarshmallowOrHigher()) {
private void logBufferSizeInFrames() {
if (Build.VERSION.SDK_INT >= 23) {
Logging.d(TAG,
"AudioTrack: "
// The effective size of the AudioTrack buffer that the app writes to.
+ "buffer size in frames: " + audioTrack.getBufferSizeInFrames());
}
if (WebRtcAudioUtils.runningOnNougatOrHigher()) {
}
private void logBufferCapacityInFrames() {
if (Build.VERSION.SDK_INT >= 24) {
Logging.d(TAG,
"AudioTrack: "
// Maximum size of the AudioTrack buffer in frames.
@ -418,15 +406,19 @@ class WebRtcAudioTrack {
}
}
private void logMainParametersExtended() {
logBufferSizeInFrames();
logBufferCapacityInFrames();
}
// Prints the number of underrun occurrences in the application-level write
// buffer since the AudioTrack was created. An underrun occurs if the app does
// not write audio data quickly enough, causing the buffer to underflow and a
// potential audio glitch.
// TODO(henrika): keep track of this value in the field and possibly add new
// UMA stat if needed.
@TargetApi(24)
private void logUnderrunCount() {
if (WebRtcAudioUtils.runningOnNougatOrHigher()) {
if (Build.VERSION.SDK_INT >= 24) {
Logging.d(TAG, "underrun count: " + audioTrack.getUnderrunCount());
}
}

View File

@ -15,8 +15,6 @@ import static android.media.AudioManager.MODE_IN_COMMUNICATION;
import static android.media.AudioManager.MODE_NORMAL;
import static android.media.AudioManager.MODE_RINGTONE;
import android.annotation.SuppressLint;
import android.annotation.TargetApi;
import android.content.Context;
import android.content.pm.PackageManager;
import android.media.AudioDeviceInfo;
@ -34,41 +32,6 @@ import org.webrtc.Logging;
final class WebRtcAudioUtils {
private static final String TAG = "WebRtcAudioUtilsExternal";
public static boolean runningOnJellyBeanMR1OrHigher() {
// November 2012: Android 4.2. API Level 17.
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1;
}
public static boolean runningOnJellyBeanMR2OrHigher() {
// July 24, 2013: Android 4.3. API Level 18.
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2;
}
public static boolean runningOnLollipopOrHigher() {
// API Level 21.
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP;
}
public static boolean runningOnMarshmallowOrHigher() {
// API Level 23.
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.M;
}
public static boolean runningOnNougatOrHigher() {
// API Level 24.
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.N;
}
public static boolean runningOnOreoOrHigher() {
// API Level 26.
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.O;
}
public static boolean runningOnOreoMR1OrHigher() {
// API Level 27.
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.O_MR1;
}
// Helper method for building a string of thread information.
public static String getThreadInfo() {
return "@[name=" + Thread.currentThread().getName() + ", id=" + Thread.currentThread().getId()
@ -116,21 +79,22 @@ final class WebRtcAudioUtils {
+ "BT SCO: " + audioManager.isBluetoothScoOn());
}
// TODO(bugs.webrtc.org/8580): Call requires API level 21 (current min is 16):
// `android.media.AudioManager#isVolumeFixed`: NewApi [warning]
@SuppressLint("NewApi")
private static boolean isVolumeFixed(AudioManager audioManager) {
if (Build.VERSION.SDK_INT < 21) {
return false;
}
return audioManager.isVolumeFixed();
}
// Adds volume information for all possible stream types.
private static void logAudioStateVolume(String tag, AudioManager audioManager) {
final int[] streams = {AudioManager.STREAM_VOICE_CALL, AudioManager.STREAM_MUSIC,
AudioManager.STREAM_RING, AudioManager.STREAM_ALARM, AudioManager.STREAM_NOTIFICATION,
AudioManager.STREAM_SYSTEM};
Logging.d(tag, "Audio State: ");
boolean fixedVolume = false;
if (WebRtcAudioUtils.runningOnLollipopOrHigher()) {
fixedVolume = audioManager.isVolumeFixed();
// Some devices may not have volume controls and might use a fixed volume.
Logging.d(tag, " fixed volume=" + fixedVolume);
}
// Some devices may not have volume controls and might use a fixed volume.
boolean fixedVolume = isVolumeFixed(audioManager);
Logging.d(tag, " fixed volume=" + fixedVolume);
if (!fixedVolume) {
for (int stream : streams) {
StringBuilder info = new StringBuilder();
@ -143,17 +107,15 @@ final class WebRtcAudioUtils {
}
}
@TargetApi(23)
private static void logIsStreamMute(
String tag, AudioManager audioManager, int stream, StringBuilder info) {
if (WebRtcAudioUtils.runningOnMarshmallowOrHigher()) {
if (Build.VERSION.SDK_INT >= 23) {
info.append(", muted=").append(audioManager.isStreamMute(stream));
}
}
@TargetApi(23)
private static void logAudioDeviceInfo(String tag, AudioManager audioManager) {
if (!WebRtcAudioUtils.runningOnMarshmallowOrHigher()) {
if (Build.VERSION.SDK_INT < 23) {
return;
}
final AudioDeviceInfo[] devices = audioManager.getDevices(AudioManager.GET_DEVICES_ALL);