Use low latency mode on Android O and later.

This CL makes it possible to use a low-latency mode on Android O and later. This should help to reduce the audio latency. The feature is disabled by default and needs to be enabled when creating the audio device module.

Bug: webrtc:12284
Change-Id: Idf41146aa0bc1206e9a2e28e4101d85c3e4eaefc
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/196741
Reviewed-by: Sami Kalliomäki <sakal@webrtc.org>
Reviewed-by: Henrik Andreassson <henrika@webrtc.org>
Commit-Queue: Ivo Creusen <ivoc@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#32854}
This commit is contained in:
Ivo Creusen
2020-12-16 14:28:03 +01:00
committed by Commit Bot
parent 4a541f15dd
commit c25a3a3a1e
6 changed files with 293 additions and 17 deletions

View File

@ -0,0 +1,81 @@
/*
* Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc.audio;
import android.media.AudioTrack;
import android.os.Build;
import org.webrtc.Logging;
// Lowers the buffer size if no underruns are detected for 100 ms. Once an
// underrun is detected, the buffer size is increased by 10 ms and it will not
// be lowered further. The buffer size will never be increased more than
// 5 times, to avoid the possibility of the buffer size increasing without
// bounds.
class LowLatencyAudioBufferManager {
private static final String TAG = "LowLatencyAudioBufferManager";
// The underrun count that was valid during the previous call to maybeAdjustBufferSize(). Used to
// detect increases in the value.
private int prevUnderrunCount;
// The number of ticks to wait without an underrun before decreasing the buffer size.
private int ticksUntilNextDecrease;
// Indicate if we should continue to decrease the buffer size.
private boolean keepLoweringBufferSize;
// How often the buffer size was increased.
private int bufferIncreaseCounter;
public LowLatencyAudioBufferManager() {
this.prevUnderrunCount = 0;
this.ticksUntilNextDecrease = 10;
this.keepLoweringBufferSize = true;
this.bufferIncreaseCounter = 0;
}
public void maybeAdjustBufferSize(AudioTrack audioTrack) {
if (Build.VERSION.SDK_INT >= 26) {
final int underrunCount = audioTrack.getUnderrunCount();
if (underrunCount > prevUnderrunCount) {
// Don't increase buffer more than 5 times. Continuing to increase the buffer size
// could be harmful on low-power devices that regularly experience underruns under
// normal conditions.
if (bufferIncreaseCounter < 5) {
// Underrun detected, increase buffer size by 10ms.
final int currentBufferSize = audioTrack.getBufferSizeInFrames();
final int newBufferSize = currentBufferSize + audioTrack.getPlaybackRate() / 100;
Logging.d(TAG,
"Underrun detected! Increasing AudioTrack buffer size from " + currentBufferSize
+ " to " + newBufferSize);
audioTrack.setBufferSizeInFrames(newBufferSize);
bufferIncreaseCounter++;
}
// Stop trying to lower the buffer size.
keepLoweringBufferSize = false;
prevUnderrunCount = underrunCount;
ticksUntilNextDecrease = 10;
} else if (keepLoweringBufferSize) {
ticksUntilNextDecrease--;
if (ticksUntilNextDecrease <= 0) {
// No underrun seen for 100 ms, try to lower the buffer size by 10ms.
final int bufferSize10ms = audioTrack.getPlaybackRate() / 100;
// Never go below a buffer size of 10ms.
final int currentBufferSize = audioTrack.getBufferSizeInFrames();
final int newBufferSize = Math.max(bufferSize10ms, currentBufferSize - bufferSize10ms);
if (newBufferSize != currentBufferSize) {
Logging.d(TAG,
"Lowering AudioTrack buffer size from " + currentBufferSize + " to "
+ newBufferSize);
audioTrack.setBufferSizeInFrames(newBufferSize);
}
ticksUntilNextDecrease = 10;
}
}
}
}
}

View File

@ -19,7 +19,6 @@ import android.media.AudioTrack;
import android.os.Build;
import android.os.Process;
import android.support.annotation.Nullable;
import java.lang.Thread;
import java.nio.ByteBuffer;
import org.webrtc.CalledByNative;
import org.webrtc.Logging;
@ -27,6 +26,7 @@ import org.webrtc.ThreadUtils;
import org.webrtc.audio.JavaAudioDeviceModule.AudioTrackErrorCallback;
import org.webrtc.audio.JavaAudioDeviceModule.AudioTrackStartErrorCode;
import org.webrtc.audio.JavaAudioDeviceModule.AudioTrackStateCallback;
import org.webrtc.audio.LowLatencyAudioBufferManager;
class WebRtcAudioTrack {
private static final String TAG = "WebRtcAudioTrackExternal";
@ -80,6 +80,8 @@ class WebRtcAudioTrack {
// Can be used to ensure that the speaker is fully muted.
private volatile boolean speakerMute;
private byte[] emptyBytes;
private boolean useLowLatency;
private int initialBufferSizeInFrames;
private final @Nullable AudioTrackErrorCallback errorCallback;
private final @Nullable AudioTrackStateCallback stateCallback;
@ -92,9 +94,11 @@ class WebRtcAudioTrack {
*/
private class AudioTrackThread extends Thread {
private volatile boolean keepAlive = true;
private LowLatencyAudioBufferManager bufferManager;
public AudioTrackThread(String name) {
super(name);
bufferManager = new LowLatencyAudioBufferManager();
}
@Override
@ -134,6 +138,9 @@ class WebRtcAudioTrack {
reportWebRtcAudioTrackError("AudioTrack.write failed: " + bytesWritten);
}
}
if (useLowLatency) {
bufferManager.maybeAdjustBufferSize(audioTrack);
}
// The byte buffer must be rewinded since byteBuffer.position() is
// increased at each call to AudioTrack.write(). If we don't do this,
// next call to AudioTrack.write() will fail.
@ -164,12 +171,12 @@ class WebRtcAudioTrack {
@CalledByNative
WebRtcAudioTrack(Context context, AudioManager audioManager) {
this(context, audioManager, null /* audioAttributes */, null /* errorCallback */,
null /* stateCallback */);
null /* stateCallback */, false /* useLowLatency */);
}
WebRtcAudioTrack(Context context, AudioManager audioManager,
@Nullable AudioAttributes audioAttributes, @Nullable AudioTrackErrorCallback errorCallback,
@Nullable AudioTrackStateCallback stateCallback) {
@Nullable AudioTrackStateCallback stateCallback, boolean useLowLatency) {
threadChecker.detachThread();
this.context = context;
this.audioManager = audioManager;
@ -177,6 +184,7 @@ class WebRtcAudioTrack {
this.errorCallback = errorCallback;
this.stateCallback = stateCallback;
this.volumeLogger = new VolumeLogger(audioManager);
this.useLowLatency = useLowLatency;
Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo());
}
@ -218,6 +226,13 @@ class WebRtcAudioTrack {
return -1;
}
// Don't use low-latency mode when a bufferSizeFactor > 1 is used. When bufferSizeFactor > 1
// we want to use a larger buffer to prevent underruns. However, low-latency mode would
// decrease the buffer size, which makes the bufferSizeFactor have no effect.
if (bufferSizeFactor > 1.0) {
useLowLatency = false;
}
// Ensure that prevision audio session was stopped correctly before trying
// to create a new AudioTrack.
if (audioTrack != null) {
@ -228,7 +243,11 @@ class WebRtcAudioTrack {
// Create an AudioTrack object and initialize its associated audio buffer.
// The size of this buffer determines how long an AudioTrack can play
// before running out of data.
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
if (useLowLatency && Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
// On API level 26 or higher, we can use a low latency mode.
audioTrack = createAudioTrackOnOreoOrHigher(
sampleRate, channelConfig, minBufferSizeInBytes, audioAttributes);
} else if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
// If we are on API level 21 or higher, it is possible to use a special AudioTrack
// constructor that uses AudioAttributes and AudioFormat as input. It allows us to
// supersede the notion of stream types for defining the behavior of audio playback,
@ -255,6 +274,11 @@ class WebRtcAudioTrack {
releaseAudioResources();
return -1;
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
initialBufferSizeInFrames = audioTrack.getBufferSizeInFrames();
} else {
initialBufferSizeInFrames = -1;
}
logMainParameters();
logMainParametersExtended();
return minBufferSizeInBytes;
@ -382,22 +406,16 @@ class WebRtcAudioTrack {
+ "max gain: " + AudioTrack.getMaxVolume());
}
// Creates and AudioTrack instance using AudioAttributes and AudioFormat as input.
// It allows certain platforms or routing policies to use this information for more
// refined volume or routing decisions.
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
private static AudioTrack createAudioTrackOnLollipopOrHigher(int sampleRateInHz,
int channelConfig, int bufferSizeInBytes, @Nullable AudioAttributes overrideAttributes) {
Logging.d(TAG, "createAudioTrackOnLollipopOrHigher");
// TODO(henrika): use setPerformanceMode(int) with PERFORMANCE_MODE_LOW_LATENCY to control
// performance when Android O is supported. Add some logging in the mean time.
private static void logNativeOutputSampleRate(int requestedSampleRateInHz) {
final int nativeOutputSampleRate =
AudioTrack.getNativeOutputSampleRate(AudioManager.STREAM_VOICE_CALL);
Logging.d(TAG, "nativeOutputSampleRate: " + nativeOutputSampleRate);
if (sampleRateInHz != nativeOutputSampleRate) {
if (requestedSampleRateInHz != nativeOutputSampleRate) {
Logging.w(TAG, "Unable to use fast mode since requested sample rate is not native");
}
}
private static AudioAttributes getAudioAttributes(@Nullable AudioAttributes overrideAttributes) {
AudioAttributes.Builder attributesBuilder =
new AudioAttributes.Builder()
.setUsage(DEFAULT_USAGE)
@ -417,9 +435,20 @@ class WebRtcAudioTrack {
attributesBuilder = applyAttributesOnQOrHigher(attributesBuilder, overrideAttributes);
}
}
return attributesBuilder.build();
}
// Creates and AudioTrack instance using AudioAttributes and AudioFormat as input.
// It allows certain platforms or routing policies to use this information for more
// refined volume or routing decisions.
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
private static AudioTrack createAudioTrackOnLollipopOrHigher(int sampleRateInHz,
int channelConfig, int bufferSizeInBytes, @Nullable AudioAttributes overrideAttributes) {
Logging.d(TAG, "createAudioTrackOnLollipopOrHigher");
logNativeOutputSampleRate(sampleRateInHz);
// Create an audio track where the audio usage is for VoIP and the content type is speech.
return new AudioTrack(attributesBuilder.build(),
return new AudioTrack(getAudioAttributes(overrideAttributes),
new AudioFormat.Builder()
.setEncoding(AudioFormat.ENCODING_PCM_16BIT)
.setSampleRate(sampleRateInHz)
@ -428,6 +457,32 @@ class WebRtcAudioTrack {
bufferSizeInBytes, AudioTrack.MODE_STREAM, AudioManager.AUDIO_SESSION_ID_GENERATE);
}
// Creates and AudioTrack instance using AudioAttributes and AudioFormat as input.
// Use the low-latency mode to improve audio latency. Note that the low-latency mode may
// prevent effects (such as AEC) from working. Assuming AEC is working, the delay changes
// that happen in low-latency mode during the call will cause the AEC to perform worse.
// The behavior of the low-latency mode may be device dependent, use at your own risk.
@TargetApi(Build.VERSION_CODES.O)
private static AudioTrack createAudioTrackOnOreoOrHigher(int sampleRateInHz, int channelConfig,
int bufferSizeInBytes, @Nullable AudioAttributes overrideAttributes) {
Logging.d(TAG, "createAudioTrackOnOreoOrHigher");
logNativeOutputSampleRate(sampleRateInHz);
// Create an audio track where the audio usage is for VoIP and the content type is speech.
return new AudioTrack.Builder()
.setAudioAttributes(getAudioAttributes(overrideAttributes))
.setAudioFormat(new AudioFormat.Builder()
.setEncoding(AudioFormat.ENCODING_PCM_16BIT)
.setSampleRate(sampleRateInHz)
.setChannelMask(channelConfig)
.build())
.setBufferSizeInBytes(bufferSizeInBytes)
.setPerformanceMode(AudioTrack.PERFORMANCE_MODE_LOW_LATENCY)
.setTransferMode(AudioTrack.MODE_STREAM)
.setSessionId(AudioManager.AUDIO_SESSION_ID_GENERATE)
.build();
}
@TargetApi(Build.VERSION_CODES.Q)
private static AudioAttributes.Builder applyAttributesOnQOrHigher(
AudioAttributes.Builder builder, AudioAttributes overrideAttributes) {
@ -458,6 +513,11 @@ class WebRtcAudioTrack {
return -1;
}
@CalledByNative
private int getInitialBufferSizeInFrames() {
return initialBufferSizeInFrames;
}
private void logBufferCapacityInFrames() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
Logging.d(TAG,