Remove @SuppressLint(NewApi) and guard @TargetApi methods
Also rename runningOnLollipopOrHigher() etc in WebRtcAudioUtils to runningOnApi21OrHigher() etc since mapping API numbers to names is error prone. Bug: webrtc:9818 Change-Id: I4a71de72e3891ca2b6fc2341db9131bb2db4cce7 Reviewed-on: https://webrtc-review.googlesource.com/c/103820 Reviewed-by: Sami Kalliomäki <sakal@webrtc.org> Reviewed-by: Henrik Andreassson <henrika@webrtc.org> Commit-Queue: Paulina Hensman <phensman@webrtc.org> Cr-Commit-Position: refs/heads/master@{#25009}
This commit is contained in:
committed by
Commit Bot
parent
97c65b76c2
commit
6c966eaf17
@ -10,7 +10,6 @@
|
|||||||
|
|
||||||
package org.webrtc.voiceengine;
|
package org.webrtc.voiceengine;
|
||||||
|
|
||||||
import android.annotation.TargetApi;
|
|
||||||
import android.media.audiofx.AcousticEchoCanceler;
|
import android.media.audiofx.AcousticEchoCanceler;
|
||||||
import android.media.audiofx.AudioEffect;
|
import android.media.audiofx.AudioEffect;
|
||||||
import android.media.audiofx.AudioEffect.Descriptor;
|
import android.media.audiofx.AudioEffect.Descriptor;
|
||||||
@ -96,8 +95,9 @@ public class WebRtcAudioEffects {
|
|||||||
|
|
||||||
// Returns true if the platform AEC should be excluded based on its UUID.
|
// Returns true if the platform AEC should be excluded based on its UUID.
|
||||||
// AudioEffect.queryEffects() can throw IllegalStateException.
|
// AudioEffect.queryEffects() can throw IllegalStateException.
|
||||||
@TargetApi(18)
|
|
||||||
private static boolean isAcousticEchoCancelerExcludedByUUID() {
|
private static boolean isAcousticEchoCancelerExcludedByUUID() {
|
||||||
|
if (Build.VERSION.SDK_INT < 18)
|
||||||
|
return false;
|
||||||
for (Descriptor d : getAvailableEffects()) {
|
for (Descriptor d : getAvailableEffects()) {
|
||||||
if (d.type.equals(AudioEffect.EFFECT_TYPE_AEC)
|
if (d.type.equals(AudioEffect.EFFECT_TYPE_AEC)
|
||||||
&& d.uuid.equals(AOSP_ACOUSTIC_ECHO_CANCELER)) {
|
&& d.uuid.equals(AOSP_ACOUSTIC_ECHO_CANCELER)) {
|
||||||
@ -109,8 +109,9 @@ public class WebRtcAudioEffects {
|
|||||||
|
|
||||||
// Returns true if the platform NS should be excluded based on its UUID.
|
// Returns true if the platform NS should be excluded based on its UUID.
|
||||||
// AudioEffect.queryEffects() can throw IllegalStateException.
|
// AudioEffect.queryEffects() can throw IllegalStateException.
|
||||||
@TargetApi(18)
|
|
||||||
private static boolean isNoiseSuppressorExcludedByUUID() {
|
private static boolean isNoiseSuppressorExcludedByUUID() {
|
||||||
|
if (Build.VERSION.SDK_INT < 18)
|
||||||
|
return false;
|
||||||
for (Descriptor d : getAvailableEffects()) {
|
for (Descriptor d : getAvailableEffects()) {
|
||||||
if (d.type.equals(AudioEffect.EFFECT_TYPE_NS) && d.uuid.equals(AOSP_NOISE_SUPPRESSOR)) {
|
if (d.type.equals(AudioEffect.EFFECT_TYPE_NS) && d.uuid.equals(AOSP_NOISE_SUPPRESSOR)) {
|
||||||
return true;
|
return true;
|
||||||
@ -120,14 +121,16 @@ public class WebRtcAudioEffects {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Returns true if the device supports Acoustic Echo Cancellation (AEC).
|
// Returns true if the device supports Acoustic Echo Cancellation (AEC).
|
||||||
@TargetApi(18)
|
|
||||||
private static boolean isAcousticEchoCancelerEffectAvailable() {
|
private static boolean isAcousticEchoCancelerEffectAvailable() {
|
||||||
|
if (Build.VERSION.SDK_INT < 18)
|
||||||
|
return false;
|
||||||
return isEffectTypeAvailable(AudioEffect.EFFECT_TYPE_AEC);
|
return isEffectTypeAvailable(AudioEffect.EFFECT_TYPE_AEC);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Returns true if the device supports Noise Suppression (NS).
|
// Returns true if the device supports Noise Suppression (NS).
|
||||||
@TargetApi(18)
|
|
||||||
private static boolean isNoiseSuppressorEffectAvailable() {
|
private static boolean isNoiseSuppressorEffectAvailable() {
|
||||||
|
if (Build.VERSION.SDK_INT < 18)
|
||||||
|
return false;
|
||||||
return isEffectTypeAvailable(AudioEffect.EFFECT_TYPE_NS);
|
return isEffectTypeAvailable(AudioEffect.EFFECT_TYPE_NS);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -274,9 +277,8 @@ public class WebRtcAudioEffects {
|
|||||||
// AudioEffect.Descriptor array that are actually not available on the device.
|
// AudioEffect.Descriptor array that are actually not available on the device.
|
||||||
// As an example: Samsung Galaxy S6 includes an AGC in the descriptor but
|
// As an example: Samsung Galaxy S6 includes an AGC in the descriptor but
|
||||||
// AutomaticGainControl.isAvailable() returns false.
|
// AutomaticGainControl.isAvailable() returns false.
|
||||||
@TargetApi(18)
|
|
||||||
private boolean effectTypeIsVoIP(UUID type) {
|
private boolean effectTypeIsVoIP(UUID type) {
|
||||||
if (!WebRtcAudioUtils.runningOnJellyBeanMR2OrHigher())
|
if (Build.VERSION.SDK_INT < 18)
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
return (AudioEffect.EFFECT_TYPE_AEC.equals(type) && isAcousticEchoCancelerSupported())
|
return (AudioEffect.EFFECT_TYPE_AEC.equals(type) && isAcousticEchoCancelerSupported())
|
||||||
|
|||||||
@ -10,7 +10,6 @@
|
|||||||
|
|
||||||
package org.webrtc.voiceengine;
|
package org.webrtc.voiceengine;
|
||||||
|
|
||||||
import android.annotation.TargetApi;
|
|
||||||
import android.content.Context;
|
import android.content.Context;
|
||||||
import android.content.pm.PackageManager;
|
import android.content.pm.PackageManager;
|
||||||
import android.media.AudioFormat;
|
import android.media.AudioFormat;
|
||||||
@ -259,14 +258,13 @@ public class WebRtcAudioManager {
|
|||||||
// as well. The NDK doc states that: "As of API level 21, lower latency
|
// as well. The NDK doc states that: "As of API level 21, lower latency
|
||||||
// audio input is supported on select devices. To take advantage of this
|
// audio input is supported on select devices. To take advantage of this
|
||||||
// feature, first confirm that lower latency output is available".
|
// feature, first confirm that lower latency output is available".
|
||||||
return WebRtcAudioUtils.runningOnLollipopOrHigher() && isLowLatencyOutputSupported();
|
return Build.VERSION.SDK_INT >= 21 && isLowLatencyOutputSupported();
|
||||||
}
|
}
|
||||||
|
|
||||||
// Returns true if the device has professional audio level of functionality
|
// Returns true if the device has professional audio level of functionality
|
||||||
// and therefore supports the lowest possible round-trip latency.
|
// and therefore supports the lowest possible round-trip latency.
|
||||||
@TargetApi(23)
|
|
||||||
private boolean isProAudioSupported() {
|
private boolean isProAudioSupported() {
|
||||||
return WebRtcAudioUtils.runningOnMarshmallowOrHigher()
|
return Build.VERSION.SDK_INT >= 23
|
||||||
&& ContextUtils.getApplicationContext().getPackageManager().hasSystemFeature(
|
&& ContextUtils.getApplicationContext().getPackageManager().hasSystemFeature(
|
||||||
PackageManager.FEATURE_AUDIO_PRO);
|
PackageManager.FEATURE_AUDIO_PRO);
|
||||||
}
|
}
|
||||||
@ -277,7 +275,7 @@ public class WebRtcAudioManager {
|
|||||||
if (blacklistDeviceForAAudioUsage) {
|
if (blacklistDeviceForAAudioUsage) {
|
||||||
Logging.w(TAG, "AAudio support is currently disabled on all devices!");
|
Logging.w(TAG, "AAudio support is currently disabled on all devices!");
|
||||||
}
|
}
|
||||||
return !blacklistDeviceForAAudioUsage && WebRtcAudioUtils.runningOnOreoMR1OrHigher();
|
return !blacklistDeviceForAAudioUsage && Build.VERSION.SDK_INT >= 27;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Returns the native output sample rate for this device's output stream.
|
// Returns the native output sample rate for this device's output stream.
|
||||||
@ -297,28 +295,24 @@ public class WebRtcAudioManager {
|
|||||||
}
|
}
|
||||||
// No overrides available. Deliver best possible estimate based on default
|
// No overrides available. Deliver best possible estimate based on default
|
||||||
// Android AudioManager APIs.
|
// Android AudioManager APIs.
|
||||||
final int sampleRateHz;
|
final int sampleRateHz = getSampleRateForApiLevel();
|
||||||
if (WebRtcAudioUtils.runningOnJellyBeanMR1OrHigher()) {
|
|
||||||
sampleRateHz = getSampleRateOnJellyBeanMR10OrHigher();
|
|
||||||
} else {
|
|
||||||
sampleRateHz = WebRtcAudioUtils.getDefaultSampleRateHz();
|
|
||||||
}
|
|
||||||
Logging.d(TAG, "Sample rate is set to " + sampleRateHz + " Hz");
|
Logging.d(TAG, "Sample rate is set to " + sampleRateHz + " Hz");
|
||||||
return sampleRateHz;
|
return sampleRateHz;
|
||||||
}
|
}
|
||||||
|
|
||||||
@TargetApi(17)
|
private int getSampleRateForApiLevel() {
|
||||||
private int getSampleRateOnJellyBeanMR10OrHigher() {
|
if (Build.VERSION.SDK_INT < 17) {
|
||||||
|
return WebRtcAudioUtils.getDefaultSampleRateHz();
|
||||||
|
}
|
||||||
String sampleRateString = audioManager.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE);
|
String sampleRateString = audioManager.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE);
|
||||||
return (sampleRateString == null) ? WebRtcAudioUtils.getDefaultSampleRateHz()
|
return (sampleRateString == null) ? WebRtcAudioUtils.getDefaultSampleRateHz()
|
||||||
: Integer.parseInt(sampleRateString);
|
: Integer.parseInt(sampleRateString);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Returns the native output buffer size for low-latency output streams.
|
// Returns the native output buffer size for low-latency output streams.
|
||||||
@TargetApi(17)
|
|
||||||
private int getLowLatencyOutputFramesPerBuffer() {
|
private int getLowLatencyOutputFramesPerBuffer() {
|
||||||
assertTrue(isLowLatencyOutputSupported());
|
assertTrue(isLowLatencyOutputSupported());
|
||||||
if (!WebRtcAudioUtils.runningOnJellyBeanMR1OrHigher()) {
|
if (Build.VERSION.SDK_INT < 17) {
|
||||||
return DEFAULT_FRAME_PER_BUFFER;
|
return DEFAULT_FRAME_PER_BUFFER;
|
||||||
}
|
}
|
||||||
String framesPerBuffer =
|
String framesPerBuffer =
|
||||||
|
|||||||
@ -10,10 +10,10 @@
|
|||||||
|
|
||||||
package org.webrtc.voiceengine;
|
package org.webrtc.voiceengine;
|
||||||
|
|
||||||
import android.annotation.TargetApi;
|
|
||||||
import android.media.AudioFormat;
|
import android.media.AudioFormat;
|
||||||
import android.media.AudioRecord;
|
import android.media.AudioRecord;
|
||||||
import android.media.MediaRecorder.AudioSource;
|
import android.media.MediaRecorder.AudioSource;
|
||||||
|
import android.os.Build;
|
||||||
import android.os.Process;
|
import android.os.Process;
|
||||||
import java.lang.System;
|
import java.lang.System;
|
||||||
import java.nio.ByteBuffer;
|
import java.nio.ByteBuffer;
|
||||||
@ -332,9 +332,8 @@ public class WebRtcAudioRecord {
|
|||||||
+ "sample rate: " + audioRecord.getSampleRate());
|
+ "sample rate: " + audioRecord.getSampleRate());
|
||||||
}
|
}
|
||||||
|
|
||||||
@TargetApi(23)
|
|
||||||
private void logMainParametersExtended() {
|
private void logMainParametersExtended() {
|
||||||
if (WebRtcAudioUtils.runningOnMarshmallowOrHigher()) {
|
if (Build.VERSION.SDK_INT >= 23) {
|
||||||
Logging.d(TAG, "AudioRecord: "
|
Logging.d(TAG, "AudioRecord: "
|
||||||
// The frame count of the native AudioRecord buffer.
|
// The frame count of the native AudioRecord buffer.
|
||||||
+ "buffer size in frames: " + audioRecord.getBufferSizeInFrames());
|
+ "buffer size in frames: " + audioRecord.getBufferSizeInFrames());
|
||||||
|
|||||||
@ -10,13 +10,13 @@
|
|||||||
|
|
||||||
package org.webrtc.voiceengine;
|
package org.webrtc.voiceengine;
|
||||||
|
|
||||||
import android.annotation.SuppressLint;
|
|
||||||
import android.annotation.TargetApi;
|
import android.annotation.TargetApi;
|
||||||
import android.content.Context;
|
import android.content.Context;
|
||||||
import android.media.AudioAttributes;
|
import android.media.AudioAttributes;
|
||||||
import android.media.AudioFormat;
|
import android.media.AudioFormat;
|
||||||
import android.media.AudioManager;
|
import android.media.AudioManager;
|
||||||
import android.media.AudioTrack;
|
import android.media.AudioTrack;
|
||||||
|
import android.os.Build;
|
||||||
import android.os.Process;
|
import android.os.Process;
|
||||||
import java.lang.Thread;
|
import java.lang.Thread;
|
||||||
import java.nio.ByteBuffer;
|
import java.nio.ByteBuffer;
|
||||||
@ -61,19 +61,14 @@ public class WebRtcAudioTrack {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private static int getDefaultUsageAttribute() {
|
private static int getDefaultUsageAttribute() {
|
||||||
if (WebRtcAudioUtils.runningOnLollipopOrHigher()) {
|
if (Build.VERSION.SDK_INT >= 21) {
|
||||||
return getDefaultUsageAttributeOnLollipopOrHigher();
|
return AudioAttributes.USAGE_VOICE_COMMUNICATION;
|
||||||
} else {
|
} else {
|
||||||
// Not used on SDKs lower than L.
|
// Not used on SDKs lower than 21.
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@TargetApi(21)
|
|
||||||
private static int getDefaultUsageAttributeOnLollipopOrHigher() {
|
|
||||||
return AudioAttributes.USAGE_VOICE_COMMUNICATION;
|
|
||||||
}
|
|
||||||
|
|
||||||
private final long nativeAudioTrack;
|
private final long nativeAudioTrack;
|
||||||
private final AudioManager audioManager;
|
private final AudioManager audioManager;
|
||||||
private final ThreadUtils.ThreadChecker threadChecker = new ThreadUtils.ThreadChecker();
|
private final ThreadUtils.ThreadChecker threadChecker = new ThreadUtils.ThreadChecker();
|
||||||
@ -159,12 +154,7 @@ public class WebRtcAudioTrack {
|
|||||||
byteBuffer.put(emptyBytes);
|
byteBuffer.put(emptyBytes);
|
||||||
byteBuffer.position(0);
|
byteBuffer.position(0);
|
||||||
}
|
}
|
||||||
int bytesWritten = 0;
|
int bytesWritten = writeBytes(audioTrack, byteBuffer, sizeInBytes);
|
||||||
if (WebRtcAudioUtils.runningOnLollipopOrHigher()) {
|
|
||||||
bytesWritten = writeOnLollipop(audioTrack, byteBuffer, sizeInBytes);
|
|
||||||
} else {
|
|
||||||
bytesWritten = writePreLollipop(audioTrack, byteBuffer, sizeInBytes);
|
|
||||||
}
|
|
||||||
if (bytesWritten != sizeInBytes) {
|
if (bytesWritten != sizeInBytes) {
|
||||||
Logging.e(TAG, "AudioTrack.write played invalid number of bytes: " + bytesWritten);
|
Logging.e(TAG, "AudioTrack.write played invalid number of bytes: " + bytesWritten);
|
||||||
// If a write() returns a negative value, an error has occurred.
|
// If a write() returns a negative value, an error has occurred.
|
||||||
@ -198,13 +188,12 @@ public class WebRtcAudioTrack {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@TargetApi(21)
|
private int writeBytes(AudioTrack audioTrack, ByteBuffer byteBuffer, int sizeInBytes) {
|
||||||
private int writeOnLollipop(AudioTrack audioTrack, ByteBuffer byteBuffer, int sizeInBytes) {
|
if (Build.VERSION.SDK_INT >= 21) {
|
||||||
return audioTrack.write(byteBuffer, sizeInBytes, AudioTrack.WRITE_BLOCKING);
|
return audioTrack.write(byteBuffer, sizeInBytes, AudioTrack.WRITE_BLOCKING);
|
||||||
}
|
} else {
|
||||||
|
return audioTrack.write(byteBuffer.array(), byteBuffer.arrayOffset(), sizeInBytes);
|
||||||
private int writePreLollipop(AudioTrack audioTrack, ByteBuffer byteBuffer, int sizeInBytes) {
|
}
|
||||||
return audioTrack.write(byteBuffer.array(), byteBuffer.arrayOffset(), sizeInBytes);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Stops the inner thread loop which results in calling AudioTrack.stop().
|
// Stops the inner thread loop which results in calling AudioTrack.stop().
|
||||||
@ -266,7 +255,7 @@ public class WebRtcAudioTrack {
|
|||||||
// Create an AudioTrack object and initialize its associated audio buffer.
|
// Create an AudioTrack object and initialize its associated audio buffer.
|
||||||
// The size of this buffer determines how long an AudioTrack can play
|
// The size of this buffer determines how long an AudioTrack can play
|
||||||
// before running out of data.
|
// before running out of data.
|
||||||
if (WebRtcAudioUtils.runningOnLollipopOrHigher()) {
|
if (Build.VERSION.SDK_INT >= 21) {
|
||||||
// If we are on API level 21 or higher, it is possible to use a special AudioTrack
|
// If we are on API level 21 or higher, it is possible to use a special AudioTrack
|
||||||
// constructor that uses AudioAttributes and AudioFormat as input. It allows us to
|
// constructor that uses AudioAttributes and AudioFormat as input. It allows us to
|
||||||
// supersede the notion of stream types for defining the behavior of audio playback,
|
// supersede the notion of stream types for defining the behavior of audio playback,
|
||||||
@ -370,11 +359,8 @@ public class WebRtcAudioTrack {
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO(bugs.webrtc.org/8580): Call requires API level 21 (current min is 16):
|
|
||||||
// `android.media.AudioManager#isVolumeFixed`: NewApi [warning]
|
|
||||||
@SuppressLint("NewApi")
|
|
||||||
private boolean isVolumeFixed() {
|
private boolean isVolumeFixed() {
|
||||||
if (!WebRtcAudioUtils.runningOnLollipopOrHigher())
|
if (Build.VERSION.SDK_INT < 21)
|
||||||
return false;
|
return false;
|
||||||
return audioManager.isVolumeFixed();
|
return audioManager.isVolumeFixed();
|
||||||
}
|
}
|
||||||
@ -437,29 +423,36 @@ public class WebRtcAudioTrack {
|
|||||||
AudioFormat.ENCODING_PCM_16BIT, bufferSizeInBytes, AudioTrack.MODE_STREAM);
|
AudioFormat.ENCODING_PCM_16BIT, bufferSizeInBytes, AudioTrack.MODE_STREAM);
|
||||||
}
|
}
|
||||||
|
|
||||||
@TargetApi(24)
|
private void logBufferSizeInFrames() {
|
||||||
private void logMainParametersExtended() {
|
if (Build.VERSION.SDK_INT >= 23) {
|
||||||
if (WebRtcAudioUtils.runningOnMarshmallowOrHigher()) {
|
|
||||||
Logging.d(TAG, "AudioTrack: "
|
Logging.d(TAG, "AudioTrack: "
|
||||||
// The effective size of the AudioTrack buffer that the app writes to.
|
// The effective size of the AudioTrack buffer that the app writes to.
|
||||||
+ "buffer size in frames: " + audioTrack.getBufferSizeInFrames());
|
+ "buffer size in frames: " + audioTrack.getBufferSizeInFrames());
|
||||||
}
|
}
|
||||||
if (WebRtcAudioUtils.runningOnNougatOrHigher()) {
|
}
|
||||||
Logging.d(TAG, "AudioTrack: "
|
|
||||||
|
private void logBufferCapacityInFrames() {
|
||||||
|
if (Build.VERSION.SDK_INT >= 24) {
|
||||||
|
Logging.d(TAG,
|
||||||
|
"AudioTrack: "
|
||||||
// Maximum size of the AudioTrack buffer in frames.
|
// Maximum size of the AudioTrack buffer in frames.
|
||||||
+ "buffer capacity in frames: " + audioTrack.getBufferCapacityInFrames());
|
+ "buffer capacity in frames: " + audioTrack.getBufferCapacityInFrames());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void logMainParametersExtended() {
|
||||||
|
logBufferSizeInFrames();
|
||||||
|
logBufferCapacityInFrames();
|
||||||
|
}
|
||||||
|
|
||||||
// Prints the number of underrun occurrences in the application-level write
|
// Prints the number of underrun occurrences in the application-level write
|
||||||
// buffer since the AudioTrack was created. An underrun occurs if the app does
|
// buffer since the AudioTrack was created. An underrun occurs if the app does
|
||||||
// not write audio data quickly enough, causing the buffer to underflow and a
|
// not write audio data quickly enough, causing the buffer to underflow and a
|
||||||
// potential audio glitch.
|
// potential audio glitch.
|
||||||
// TODO(henrika): keep track of this value in the field and possibly add new
|
// TODO(henrika): keep track of this value in the field and possibly add new
|
||||||
// UMA stat if needed.
|
// UMA stat if needed.
|
||||||
@TargetApi(24)
|
|
||||||
private void logUnderrunCount() {
|
private void logUnderrunCount() {
|
||||||
if (WebRtcAudioUtils.runningOnNougatOrHigher()) {
|
if (Build.VERSION.SDK_INT >= 24) {
|
||||||
Logging.d(TAG, "underrun count: " + audioTrack.getUnderrunCount());
|
Logging.d(TAG, "underrun count: " + audioTrack.getUnderrunCount());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -15,8 +15,6 @@ import static android.media.AudioManager.MODE_IN_COMMUNICATION;
|
|||||||
import static android.media.AudioManager.MODE_NORMAL;
|
import static android.media.AudioManager.MODE_NORMAL;
|
||||||
import static android.media.AudioManager.MODE_RINGTONE;
|
import static android.media.AudioManager.MODE_RINGTONE;
|
||||||
|
|
||||||
import android.annotation.SuppressLint;
|
|
||||||
import android.annotation.TargetApi;
|
|
||||||
import android.content.Context;
|
import android.content.Context;
|
||||||
import android.content.pm.PackageManager;
|
import android.content.pm.PackageManager;
|
||||||
import android.media.AudioDeviceInfo;
|
import android.media.AudioDeviceInfo;
|
||||||
@ -166,41 +164,6 @@ public final class WebRtcAudioUtils {
|
|||||||
return Arrays.asList(WebRtcAudioUtils.BLACKLISTED_NS_MODELS);
|
return Arrays.asList(WebRtcAudioUtils.BLACKLISTED_NS_MODELS);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static boolean runningOnJellyBeanMR1OrHigher() {
|
|
||||||
// November 2012: Android 4.2. API Level 17.
|
|
||||||
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static boolean runningOnJellyBeanMR2OrHigher() {
|
|
||||||
// July 24, 2013: Android 4.3. API Level 18.
|
|
||||||
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static boolean runningOnLollipopOrHigher() {
|
|
||||||
// API Level 21.
|
|
||||||
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static boolean runningOnMarshmallowOrHigher() {
|
|
||||||
// API Level 23.
|
|
||||||
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.M;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static boolean runningOnNougatOrHigher() {
|
|
||||||
// API Level 24.
|
|
||||||
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.N;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static boolean runningOnOreoOrHigher() {
|
|
||||||
// API Level 26.
|
|
||||||
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.O;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static boolean runningOnOreoMR1OrHigher() {
|
|
||||||
// API Level 27.
|
|
||||||
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.O_MR1;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Helper method for building a string of thread information.
|
// Helper method for building a string of thread information.
|
||||||
public static String getThreadInfo() {
|
public static String getThreadInfo() {
|
||||||
return "@[name=" + Thread.currentThread().getName() + ", id=" + Thread.currentThread().getId()
|
return "@[name=" + Thread.currentThread().getName() + ", id=" + Thread.currentThread().getId()
|
||||||
@ -255,9 +218,13 @@ public final class WebRtcAudioUtils {
|
|||||||
+ "BT SCO: " + audioManager.isBluetoothScoOn());
|
+ "BT SCO: " + audioManager.isBluetoothScoOn());
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO(bugs.webrtc.org/8580): Call requires API level 21 (current min is 16):
|
private static boolean isVolumeFixed(AudioManager audioManager) {
|
||||||
// `android.media.AudioManager#isVolumeFixed`: NewApi [warning]
|
if (Build.VERSION.SDK_INT < 21) {
|
||||||
@SuppressLint("NewApi")
|
return false;
|
||||||
|
}
|
||||||
|
return audioManager.isVolumeFixed();
|
||||||
|
}
|
||||||
|
|
||||||
// Adds volume information for all possible stream types.
|
// Adds volume information for all possible stream types.
|
||||||
private static void logAudioStateVolume(String tag, AudioManager audioManager) {
|
private static void logAudioStateVolume(String tag, AudioManager audioManager) {
|
||||||
final int[] streams = {
|
final int[] streams = {
|
||||||
@ -269,12 +236,9 @@ public final class WebRtcAudioUtils {
|
|||||||
AudioManager.STREAM_SYSTEM
|
AudioManager.STREAM_SYSTEM
|
||||||
};
|
};
|
||||||
Logging.d(tag, "Audio State: ");
|
Logging.d(tag, "Audio State: ");
|
||||||
boolean fixedVolume = false;
|
// Some devices may not have volume controls and might use a fixed volume.
|
||||||
if (WebRtcAudioUtils.runningOnLollipopOrHigher()) {
|
boolean fixedVolume = isVolumeFixed(audioManager);
|
||||||
fixedVolume = audioManager.isVolumeFixed();
|
Logging.d(tag, " fixed volume=" + fixedVolume);
|
||||||
// Some devices may not have volume controls and might use a fixed volume.
|
|
||||||
Logging.d(tag, " fixed volume=" + fixedVolume);
|
|
||||||
}
|
|
||||||
if (!fixedVolume) {
|
if (!fixedVolume) {
|
||||||
for (int stream : streams) {
|
for (int stream : streams) {
|
||||||
StringBuilder info = new StringBuilder();
|
StringBuilder info = new StringBuilder();
|
||||||
@ -287,17 +251,15 @@ public final class WebRtcAudioUtils {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@TargetApi(23)
|
|
||||||
private static void logIsStreamMute(
|
private static void logIsStreamMute(
|
||||||
String tag, AudioManager audioManager, int stream, StringBuilder info) {
|
String tag, AudioManager audioManager, int stream, StringBuilder info) {
|
||||||
if (WebRtcAudioUtils.runningOnMarshmallowOrHigher()) {
|
if (Build.VERSION.SDK_INT >= 23) {
|
||||||
info.append(", muted=").append(audioManager.isStreamMute(stream));
|
info.append(", muted=").append(audioManager.isStreamMute(stream));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@TargetApi(23)
|
|
||||||
private static void logAudioDeviceInfo(String tag, AudioManager audioManager) {
|
private static void logAudioDeviceInfo(String tag, AudioManager audioManager) {
|
||||||
if (!WebRtcAudioUtils.runningOnMarshmallowOrHigher()) {
|
if (Build.VERSION.SDK_INT < 23) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
final AudioDeviceInfo[] devices =
|
final AudioDeviceInfo[] devices =
|
||||||
|
|||||||
@ -10,7 +10,6 @@
|
|||||||
|
|
||||||
package org.webrtc.audio;
|
package org.webrtc.audio;
|
||||||
|
|
||||||
import android.annotation.TargetApi;
|
|
||||||
import android.media.audiofx.AcousticEchoCanceler;
|
import android.media.audiofx.AcousticEchoCanceler;
|
||||||
import android.media.audiofx.AudioEffect;
|
import android.media.audiofx.AudioEffect;
|
||||||
import android.media.audiofx.AudioEffect.Descriptor;
|
import android.media.audiofx.AudioEffect.Descriptor;
|
||||||
@ -56,14 +55,16 @@ class WebRtcAudioEffects {
|
|||||||
|
|
||||||
// Returns true if all conditions for supporting HW Acoustic Echo Cancellation (AEC) are
|
// Returns true if all conditions for supporting HW Acoustic Echo Cancellation (AEC) are
|
||||||
// fulfilled.
|
// fulfilled.
|
||||||
@TargetApi(18)
|
|
||||||
public static boolean isAcousticEchoCancelerSupported() {
|
public static boolean isAcousticEchoCancelerSupported() {
|
||||||
|
if (Build.VERSION.SDK_INT < 18)
|
||||||
|
return false;
|
||||||
return isEffectTypeAvailable(AudioEffect.EFFECT_TYPE_AEC, AOSP_ACOUSTIC_ECHO_CANCELER);
|
return isEffectTypeAvailable(AudioEffect.EFFECT_TYPE_AEC, AOSP_ACOUSTIC_ECHO_CANCELER);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Returns true if all conditions for supporting HW Noise Suppression (NS) are fulfilled.
|
// Returns true if all conditions for supporting HW Noise Suppression (NS) are fulfilled.
|
||||||
@TargetApi(18)
|
|
||||||
public static boolean isNoiseSuppressorSupported() {
|
public static boolean isNoiseSuppressorSupported() {
|
||||||
|
if (Build.VERSION.SDK_INT < 18)
|
||||||
|
return false;
|
||||||
return isEffectTypeAvailable(AudioEffect.EFFECT_TYPE_NS, AOSP_NOISE_SUPPRESSOR);
|
return isEffectTypeAvailable(AudioEffect.EFFECT_TYPE_NS, AOSP_NOISE_SUPPRESSOR);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -188,9 +189,8 @@ class WebRtcAudioEffects {
|
|||||||
// AudioEffect.Descriptor array that are actually not available on the device.
|
// AudioEffect.Descriptor array that are actually not available on the device.
|
||||||
// As an example: Samsung Galaxy S6 includes an AGC in the descriptor but
|
// As an example: Samsung Galaxy S6 includes an AGC in the descriptor but
|
||||||
// AutomaticGainControl.isAvailable() returns false.
|
// AutomaticGainControl.isAvailable() returns false.
|
||||||
@TargetApi(18)
|
|
||||||
private boolean effectTypeIsVoIP(UUID type) {
|
private boolean effectTypeIsVoIP(UUID type) {
|
||||||
if (!WebRtcAudioUtils.runningOnJellyBeanMR2OrHigher())
|
if (Build.VERSION.SDK_INT < 18)
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
return (AudioEffect.EFFECT_TYPE_AEC.equals(type) && isAcousticEchoCancelerSupported())
|
return (AudioEffect.EFFECT_TYPE_AEC.equals(type) && isAcousticEchoCancelerSupported())
|
||||||
@ -221,7 +221,6 @@ class WebRtcAudioEffects {
|
|||||||
// Returns true if an effect of the specified type is available. Functionally
|
// Returns true if an effect of the specified type is available. Functionally
|
||||||
// equivalent to (NoiseSuppressor|AutomaticGainControl|...).isAvailable(), but
|
// equivalent to (NoiseSuppressor|AutomaticGainControl|...).isAvailable(), but
|
||||||
// faster as it avoids the expensive OS call to enumerate effects.
|
// faster as it avoids the expensive OS call to enumerate effects.
|
||||||
@TargetApi(18)
|
|
||||||
private static boolean isEffectTypeAvailable(UUID effectType, UUID blackListedUuid) {
|
private static boolean isEffectTypeAvailable(UUID effectType, UUID blackListedUuid) {
|
||||||
Descriptor[] effects = getAvailableEffects();
|
Descriptor[] effects = getAvailableEffects();
|
||||||
if (effects == null) {
|
if (effects == null) {
|
||||||
|
|||||||
@ -10,13 +10,13 @@
|
|||||||
|
|
||||||
package org.webrtc.audio;
|
package org.webrtc.audio;
|
||||||
|
|
||||||
import android.annotation.TargetApi;
|
|
||||||
import android.content.Context;
|
import android.content.Context;
|
||||||
import android.content.pm.PackageManager;
|
import android.content.pm.PackageManager;
|
||||||
import android.media.AudioFormat;
|
import android.media.AudioFormat;
|
||||||
import android.media.AudioManager;
|
import android.media.AudioManager;
|
||||||
import android.media.AudioRecord;
|
import android.media.AudioRecord;
|
||||||
import android.media.AudioTrack;
|
import android.media.AudioTrack;
|
||||||
|
import android.os.Build;
|
||||||
import org.webrtc.Logging;
|
import org.webrtc.Logging;
|
||||||
import org.webrtc.CalledByNative;
|
import org.webrtc.CalledByNative;
|
||||||
|
|
||||||
@ -64,7 +64,7 @@ class WebRtcAudioManager {
|
|||||||
// as well. The NDK doc states that: "As of API level 21, lower latency
|
// as well. The NDK doc states that: "As of API level 21, lower latency
|
||||||
// audio input is supported on select devices. To take advantage of this
|
// audio input is supported on select devices. To take advantage of this
|
||||||
// feature, first confirm that lower latency output is available".
|
// feature, first confirm that lower latency output is available".
|
||||||
return WebRtcAudioUtils.runningOnLollipopOrHigher() && isLowLatencyOutputSupported(context);
|
return Build.VERSION.SDK_INT >= 21 && isLowLatencyOutputSupported(context);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -79,23 +79,22 @@ class WebRtcAudioManager {
|
|||||||
return 8000;
|
return 8000;
|
||||||
}
|
}
|
||||||
// Deliver best possible estimate based on default Android AudioManager APIs.
|
// Deliver best possible estimate based on default Android AudioManager APIs.
|
||||||
final int sampleRateHz = WebRtcAudioUtils.runningOnJellyBeanMR1OrHigher()
|
final int sampleRateHz = getSampleRateForApiLevel(audioManager);
|
||||||
? getSampleRateOnJellyBeanMR10OrHigher(audioManager)
|
|
||||||
: DEFAULT_SAMPLE_RATE_HZ;
|
|
||||||
Logging.d(TAG, "Sample rate is set to " + sampleRateHz + " Hz");
|
Logging.d(TAG, "Sample rate is set to " + sampleRateHz + " Hz");
|
||||||
return sampleRateHz;
|
return sampleRateHz;
|
||||||
}
|
}
|
||||||
|
|
||||||
@TargetApi(17)
|
private static int getSampleRateForApiLevel(AudioManager audioManager) {
|
||||||
private static int getSampleRateOnJellyBeanMR10OrHigher(AudioManager audioManager) {
|
if (Build.VERSION.SDK_INT < 17) {
|
||||||
|
return DEFAULT_SAMPLE_RATE_HZ;
|
||||||
|
}
|
||||||
String sampleRateString = audioManager.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE);
|
String sampleRateString = audioManager.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE);
|
||||||
return (sampleRateString == null) ? DEFAULT_SAMPLE_RATE_HZ : Integer.parseInt(sampleRateString);
|
return (sampleRateString == null) ? DEFAULT_SAMPLE_RATE_HZ : Integer.parseInt(sampleRateString);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Returns the native output buffer size for low-latency output streams.
|
// Returns the native output buffer size for low-latency output streams.
|
||||||
@TargetApi(17)
|
|
||||||
private static int getLowLatencyFramesPerBuffer(AudioManager audioManager) {
|
private static int getLowLatencyFramesPerBuffer(AudioManager audioManager) {
|
||||||
if (!WebRtcAudioUtils.runningOnJellyBeanMR1OrHigher()) {
|
if (Build.VERSION.SDK_INT < 17) {
|
||||||
return DEFAULT_FRAME_PER_BUFFER;
|
return DEFAULT_FRAME_PER_BUFFER;
|
||||||
}
|
}
|
||||||
String framesPerBuffer =
|
String framesPerBuffer =
|
||||||
|
|||||||
@ -10,12 +10,12 @@
|
|||||||
|
|
||||||
package org.webrtc.audio;
|
package org.webrtc.audio;
|
||||||
|
|
||||||
import android.annotation.TargetApi;
|
|
||||||
import android.content.Context;
|
import android.content.Context;
|
||||||
import android.media.AudioFormat;
|
import android.media.AudioFormat;
|
||||||
import android.media.AudioManager;
|
import android.media.AudioManager;
|
||||||
import android.media.AudioRecord;
|
import android.media.AudioRecord;
|
||||||
import android.media.MediaRecorder.AudioSource;
|
import android.media.MediaRecorder.AudioSource;
|
||||||
|
import android.os.Build;
|
||||||
import android.os.Process;
|
import android.os.Process;
|
||||||
import java.lang.System;
|
import java.lang.System;
|
||||||
import java.nio.ByteBuffer;
|
import java.nio.ByteBuffer;
|
||||||
@ -299,9 +299,8 @@ class WebRtcAudioRecord {
|
|||||||
+ "sample rate: " + audioRecord.getSampleRate());
|
+ "sample rate: " + audioRecord.getSampleRate());
|
||||||
}
|
}
|
||||||
|
|
||||||
@TargetApi(23)
|
|
||||||
private void logMainParametersExtended() {
|
private void logMainParametersExtended() {
|
||||||
if (WebRtcAudioUtils.runningOnMarshmallowOrHigher()) {
|
if (Build.VERSION.SDK_INT >= 23) {
|
||||||
Logging.d(TAG,
|
Logging.d(TAG,
|
||||||
"AudioRecord: "
|
"AudioRecord: "
|
||||||
// The frame count of the native AudioRecord buffer.
|
// The frame count of the native AudioRecord buffer.
|
||||||
|
|||||||
@ -10,13 +10,13 @@
|
|||||||
|
|
||||||
package org.webrtc.audio;
|
package org.webrtc.audio;
|
||||||
|
|
||||||
import android.annotation.SuppressLint;
|
|
||||||
import android.annotation.TargetApi;
|
import android.annotation.TargetApi;
|
||||||
import android.content.Context;
|
import android.content.Context;
|
||||||
import android.media.AudioAttributes;
|
import android.media.AudioAttributes;
|
||||||
import android.media.AudioFormat;
|
import android.media.AudioFormat;
|
||||||
import android.media.AudioManager;
|
import android.media.AudioManager;
|
||||||
import android.media.AudioTrack;
|
import android.media.AudioTrack;
|
||||||
|
import android.os.Build;
|
||||||
import android.os.Process;
|
import android.os.Process;
|
||||||
import java.lang.Thread;
|
import java.lang.Thread;
|
||||||
import java.nio.ByteBuffer;
|
import java.nio.ByteBuffer;
|
||||||
@ -49,19 +49,14 @@ class WebRtcAudioTrack {
|
|||||||
private static final int DEFAULT_USAGE = getDefaultUsageAttribute();
|
private static final int DEFAULT_USAGE = getDefaultUsageAttribute();
|
||||||
|
|
||||||
private static int getDefaultUsageAttribute() {
|
private static int getDefaultUsageAttribute() {
|
||||||
if (WebRtcAudioUtils.runningOnLollipopOrHigher()) {
|
if (Build.VERSION.SDK_INT >= 21) {
|
||||||
return getDefaultUsageAttributeOnLollipopOrHigher();
|
return AudioAttributes.USAGE_VOICE_COMMUNICATION;
|
||||||
} else {
|
} else {
|
||||||
// Not used on SDKs lower than L.
|
// Not used on SDKs lower than L.
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@TargetApi(21)
|
|
||||||
private static int getDefaultUsageAttributeOnLollipopOrHigher() {
|
|
||||||
return AudioAttributes.USAGE_VOICE_COMMUNICATION;
|
|
||||||
}
|
|
||||||
|
|
||||||
private long nativeAudioTrack;
|
private long nativeAudioTrack;
|
||||||
private final Context context;
|
private final Context context;
|
||||||
private final AudioManager audioManager;
|
private final AudioManager audioManager;
|
||||||
@ -117,12 +112,7 @@ class WebRtcAudioTrack {
|
|||||||
byteBuffer.put(emptyBytes);
|
byteBuffer.put(emptyBytes);
|
||||||
byteBuffer.position(0);
|
byteBuffer.position(0);
|
||||||
}
|
}
|
||||||
int bytesWritten = 0;
|
int bytesWritten = writeBytes(audioTrack, byteBuffer, sizeInBytes);
|
||||||
if (WebRtcAudioUtils.runningOnLollipopOrHigher()) {
|
|
||||||
bytesWritten = writeOnLollipop(audioTrack, byteBuffer, sizeInBytes);
|
|
||||||
} else {
|
|
||||||
bytesWritten = writePreLollipop(audioTrack, byteBuffer, sizeInBytes);
|
|
||||||
}
|
|
||||||
if (bytesWritten != sizeInBytes) {
|
if (bytesWritten != sizeInBytes) {
|
||||||
Logging.e(TAG, "AudioTrack.write played invalid number of bytes: " + bytesWritten);
|
Logging.e(TAG, "AudioTrack.write played invalid number of bytes: " + bytesWritten);
|
||||||
// If a write() returns a negative value, an error has occurred.
|
// If a write() returns a negative value, an error has occurred.
|
||||||
@ -156,13 +146,12 @@ class WebRtcAudioTrack {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@TargetApi(21)
|
private int writeBytes(AudioTrack audioTrack, ByteBuffer byteBuffer, int sizeInBytes) {
|
||||||
private int writeOnLollipop(AudioTrack audioTrack, ByteBuffer byteBuffer, int sizeInBytes) {
|
if (Build.VERSION.SDK_INT >= 21) {
|
||||||
return audioTrack.write(byteBuffer, sizeInBytes, AudioTrack.WRITE_BLOCKING);
|
return audioTrack.write(byteBuffer, sizeInBytes, AudioTrack.WRITE_BLOCKING);
|
||||||
}
|
} else {
|
||||||
|
return audioTrack.write(byteBuffer.array(), byteBuffer.arrayOffset(), sizeInBytes);
|
||||||
private int writePreLollipop(AudioTrack audioTrack, ByteBuffer byteBuffer, int sizeInBytes) {
|
}
|
||||||
return audioTrack.write(byteBuffer.array(), byteBuffer.arrayOffset(), sizeInBytes);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Stops the inner thread loop which results in calling AudioTrack.stop().
|
// Stops the inner thread loop which results in calling AudioTrack.stop().
|
||||||
@ -233,7 +222,7 @@ class WebRtcAudioTrack {
|
|||||||
// Create an AudioTrack object and initialize its associated audio buffer.
|
// Create an AudioTrack object and initialize its associated audio buffer.
|
||||||
// The size of this buffer determines how long an AudioTrack can play
|
// The size of this buffer determines how long an AudioTrack can play
|
||||||
// before running out of data.
|
// before running out of data.
|
||||||
if (WebRtcAudioUtils.runningOnLollipopOrHigher()) {
|
if (Build.VERSION.SDK_INT >= 21) {
|
||||||
// If we are on API level 21 or higher, it is possible to use a special AudioTrack
|
// If we are on API level 21 or higher, it is possible to use a special AudioTrack
|
||||||
// constructor that uses AudioAttributes and AudioFormat as input. It allows us to
|
// constructor that uses AudioAttributes and AudioFormat as input. It allows us to
|
||||||
// supersede the notion of stream types for defining the behavior of audio playback,
|
// supersede the notion of stream types for defining the behavior of audio playback,
|
||||||
@ -339,11 +328,8 @@ class WebRtcAudioTrack {
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO(bugs.webrtc.org/8580): Call requires API level 21 (current min is 16):
|
|
||||||
// `android.media.AudioManager#isVolumeFixed`: NewApi [warning]
|
|
||||||
@SuppressLint("NewApi")
|
|
||||||
private boolean isVolumeFixed() {
|
private boolean isVolumeFixed() {
|
||||||
if (!WebRtcAudioUtils.runningOnLollipopOrHigher())
|
if (Build.VERSION.SDK_INT < 21)
|
||||||
return false;
|
return false;
|
||||||
return audioManager.isVolumeFixed();
|
return audioManager.isVolumeFixed();
|
||||||
}
|
}
|
||||||
@ -402,15 +388,17 @@ class WebRtcAudioTrack {
|
|||||||
AudioFormat.ENCODING_PCM_16BIT, bufferSizeInBytes, AudioTrack.MODE_STREAM);
|
AudioFormat.ENCODING_PCM_16BIT, bufferSizeInBytes, AudioTrack.MODE_STREAM);
|
||||||
}
|
}
|
||||||
|
|
||||||
@TargetApi(24)
|
private void logBufferSizeInFrames() {
|
||||||
private void logMainParametersExtended() {
|
if (Build.VERSION.SDK_INT >= 23) {
|
||||||
if (WebRtcAudioUtils.runningOnMarshmallowOrHigher()) {
|
|
||||||
Logging.d(TAG,
|
Logging.d(TAG,
|
||||||
"AudioTrack: "
|
"AudioTrack: "
|
||||||
// The effective size of the AudioTrack buffer that the app writes to.
|
// The effective size of the AudioTrack buffer that the app writes to.
|
||||||
+ "buffer size in frames: " + audioTrack.getBufferSizeInFrames());
|
+ "buffer size in frames: " + audioTrack.getBufferSizeInFrames());
|
||||||
}
|
}
|
||||||
if (WebRtcAudioUtils.runningOnNougatOrHigher()) {
|
}
|
||||||
|
|
||||||
|
private void logBufferCapacityInFrames() {
|
||||||
|
if (Build.VERSION.SDK_INT >= 24) {
|
||||||
Logging.d(TAG,
|
Logging.d(TAG,
|
||||||
"AudioTrack: "
|
"AudioTrack: "
|
||||||
// Maximum size of the AudioTrack buffer in frames.
|
// Maximum size of the AudioTrack buffer in frames.
|
||||||
@ -418,15 +406,19 @@ class WebRtcAudioTrack {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void logMainParametersExtended() {
|
||||||
|
logBufferSizeInFrames();
|
||||||
|
logBufferCapacityInFrames();
|
||||||
|
}
|
||||||
|
|
||||||
// Prints the number of underrun occurrences in the application-level write
|
// Prints the number of underrun occurrences in the application-level write
|
||||||
// buffer since the AudioTrack was created. An underrun occurs if the app does
|
// buffer since the AudioTrack was created. An underrun occurs if the app does
|
||||||
// not write audio data quickly enough, causing the buffer to underflow and a
|
// not write audio data quickly enough, causing the buffer to underflow and a
|
||||||
// potential audio glitch.
|
// potential audio glitch.
|
||||||
// TODO(henrika): keep track of this value in the field and possibly add new
|
// TODO(henrika): keep track of this value in the field and possibly add new
|
||||||
// UMA stat if needed.
|
// UMA stat if needed.
|
||||||
@TargetApi(24)
|
|
||||||
private void logUnderrunCount() {
|
private void logUnderrunCount() {
|
||||||
if (WebRtcAudioUtils.runningOnNougatOrHigher()) {
|
if (Build.VERSION.SDK_INT >= 24) {
|
||||||
Logging.d(TAG, "underrun count: " + audioTrack.getUnderrunCount());
|
Logging.d(TAG, "underrun count: " + audioTrack.getUnderrunCount());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -15,8 +15,6 @@ import static android.media.AudioManager.MODE_IN_COMMUNICATION;
|
|||||||
import static android.media.AudioManager.MODE_NORMAL;
|
import static android.media.AudioManager.MODE_NORMAL;
|
||||||
import static android.media.AudioManager.MODE_RINGTONE;
|
import static android.media.AudioManager.MODE_RINGTONE;
|
||||||
|
|
||||||
import android.annotation.SuppressLint;
|
|
||||||
import android.annotation.TargetApi;
|
|
||||||
import android.content.Context;
|
import android.content.Context;
|
||||||
import android.content.pm.PackageManager;
|
import android.content.pm.PackageManager;
|
||||||
import android.media.AudioDeviceInfo;
|
import android.media.AudioDeviceInfo;
|
||||||
@ -34,41 +32,6 @@ import org.webrtc.Logging;
|
|||||||
final class WebRtcAudioUtils {
|
final class WebRtcAudioUtils {
|
||||||
private static final String TAG = "WebRtcAudioUtilsExternal";
|
private static final String TAG = "WebRtcAudioUtilsExternal";
|
||||||
|
|
||||||
public static boolean runningOnJellyBeanMR1OrHigher() {
|
|
||||||
// November 2012: Android 4.2. API Level 17.
|
|
||||||
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static boolean runningOnJellyBeanMR2OrHigher() {
|
|
||||||
// July 24, 2013: Android 4.3. API Level 18.
|
|
||||||
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static boolean runningOnLollipopOrHigher() {
|
|
||||||
// API Level 21.
|
|
||||||
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static boolean runningOnMarshmallowOrHigher() {
|
|
||||||
// API Level 23.
|
|
||||||
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.M;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static boolean runningOnNougatOrHigher() {
|
|
||||||
// API Level 24.
|
|
||||||
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.N;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static boolean runningOnOreoOrHigher() {
|
|
||||||
// API Level 26.
|
|
||||||
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.O;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static boolean runningOnOreoMR1OrHigher() {
|
|
||||||
// API Level 27.
|
|
||||||
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.O_MR1;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Helper method for building a string of thread information.
|
// Helper method for building a string of thread information.
|
||||||
public static String getThreadInfo() {
|
public static String getThreadInfo() {
|
||||||
return "@[name=" + Thread.currentThread().getName() + ", id=" + Thread.currentThread().getId()
|
return "@[name=" + Thread.currentThread().getName() + ", id=" + Thread.currentThread().getId()
|
||||||
@ -116,21 +79,22 @@ final class WebRtcAudioUtils {
|
|||||||
+ "BT SCO: " + audioManager.isBluetoothScoOn());
|
+ "BT SCO: " + audioManager.isBluetoothScoOn());
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO(bugs.webrtc.org/8580): Call requires API level 21 (current min is 16):
|
private static boolean isVolumeFixed(AudioManager audioManager) {
|
||||||
// `android.media.AudioManager#isVolumeFixed`: NewApi [warning]
|
if (Build.VERSION.SDK_INT < 21) {
|
||||||
@SuppressLint("NewApi")
|
return false;
|
||||||
|
}
|
||||||
|
return audioManager.isVolumeFixed();
|
||||||
|
}
|
||||||
|
|
||||||
// Adds volume information for all possible stream types.
|
// Adds volume information for all possible stream types.
|
||||||
private static void logAudioStateVolume(String tag, AudioManager audioManager) {
|
private static void logAudioStateVolume(String tag, AudioManager audioManager) {
|
||||||
final int[] streams = {AudioManager.STREAM_VOICE_CALL, AudioManager.STREAM_MUSIC,
|
final int[] streams = {AudioManager.STREAM_VOICE_CALL, AudioManager.STREAM_MUSIC,
|
||||||
AudioManager.STREAM_RING, AudioManager.STREAM_ALARM, AudioManager.STREAM_NOTIFICATION,
|
AudioManager.STREAM_RING, AudioManager.STREAM_ALARM, AudioManager.STREAM_NOTIFICATION,
|
||||||
AudioManager.STREAM_SYSTEM};
|
AudioManager.STREAM_SYSTEM};
|
||||||
Logging.d(tag, "Audio State: ");
|
Logging.d(tag, "Audio State: ");
|
||||||
boolean fixedVolume = false;
|
// Some devices may not have volume controls and might use a fixed volume.
|
||||||
if (WebRtcAudioUtils.runningOnLollipopOrHigher()) {
|
boolean fixedVolume = isVolumeFixed(audioManager);
|
||||||
fixedVolume = audioManager.isVolumeFixed();
|
Logging.d(tag, " fixed volume=" + fixedVolume);
|
||||||
// Some devices may not have volume controls and might use a fixed volume.
|
|
||||||
Logging.d(tag, " fixed volume=" + fixedVolume);
|
|
||||||
}
|
|
||||||
if (!fixedVolume) {
|
if (!fixedVolume) {
|
||||||
for (int stream : streams) {
|
for (int stream : streams) {
|
||||||
StringBuilder info = new StringBuilder();
|
StringBuilder info = new StringBuilder();
|
||||||
@ -143,17 +107,15 @@ final class WebRtcAudioUtils {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@TargetApi(23)
|
|
||||||
private static void logIsStreamMute(
|
private static void logIsStreamMute(
|
||||||
String tag, AudioManager audioManager, int stream, StringBuilder info) {
|
String tag, AudioManager audioManager, int stream, StringBuilder info) {
|
||||||
if (WebRtcAudioUtils.runningOnMarshmallowOrHigher()) {
|
if (Build.VERSION.SDK_INT >= 23) {
|
||||||
info.append(", muted=").append(audioManager.isStreamMute(stream));
|
info.append(", muted=").append(audioManager.isStreamMute(stream));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@TargetApi(23)
|
|
||||||
private static void logAudioDeviceInfo(String tag, AudioManager audioManager) {
|
private static void logAudioDeviceInfo(String tag, AudioManager audioManager) {
|
||||||
if (!WebRtcAudioUtils.runningOnMarshmallowOrHigher()) {
|
if (Build.VERSION.SDK_INT < 23) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
final AudioDeviceInfo[] devices = audioManager.getDevices(AudioManager.GET_DEVICES_ALL);
|
final AudioDeviceInfo[] devices = audioManager.getDevices(AudioManager.GET_DEVICES_ALL);
|
||||||
|
|||||||
Reference in New Issue
Block a user