Allow clients to provide custom scheduler to AudioModule

Bug: None
Change-Id: Ie80f84c64a43e957d7f8c4b61ac2f1495d292b50
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/184300
Commit-Queue: Bin Zhu <ricebin@google.com>
Reviewed-by: Sami Kalliomäki <sakal@webrtc.org>
Reviewed-by: Henrik Andreassson <henrika@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#32201}
This commit is contained in:
Bin Zhu
2020-09-25 09:14:56 -07:00
committed by Commit Bot
parent e5d669ed28
commit 66515d6676
2 changed files with 66 additions and 32 deletions

View File

@ -15,6 +15,7 @@ import android.media.AudioDeviceInfo;
import android.media.AudioManager; import android.media.AudioManager;
import android.os.Build; import android.os.Build;
import android.support.annotation.RequiresApi; import android.support.annotation.RequiresApi;
import java.util.concurrent.ScheduledExecutorService;
import org.webrtc.JniCommon; import org.webrtc.JniCommon;
import org.webrtc.Logging; import org.webrtc.Logging;
@ -31,6 +32,7 @@ public class JavaAudioDeviceModule implements AudioDeviceModule {
public static class Builder { public static class Builder {
private final Context context; private final Context context;
private ScheduledExecutorService scheduler;
private final AudioManager audioManager; private final AudioManager audioManager;
private int inputSampleRate; private int inputSampleRate;
private int outputSampleRate; private int outputSampleRate;
@ -53,6 +55,11 @@ public class JavaAudioDeviceModule implements AudioDeviceModule {
this.outputSampleRate = WebRtcAudioManager.getSampleRate(audioManager); this.outputSampleRate = WebRtcAudioManager.getSampleRate(audioManager);
} }
public Builder setScheduler(ScheduledExecutorService scheduler) {
this.scheduler = scheduler;
return this;
}
/** /**
* Call this method if the default handling of querying the native sample rate shall be * Call this method if the default handling of querying the native sample rate shall be
* overridden. Can be useful on some devices where the available Android APIs are known to * overridden. Can be useful on some devices where the available Android APIs are known to
@ -208,9 +215,13 @@ public class JavaAudioDeviceModule implements AudioDeviceModule {
} }
Logging.d(TAG, "HW AEC will not be used."); Logging.d(TAG, "HW AEC will not be used.");
} }
final WebRtcAudioRecord audioInput = new WebRtcAudioRecord(context, audioManager, audioSource, ScheduledExecutorService executor = this.scheduler;
audioFormat, audioRecordErrorCallback, audioRecordStateCallback, samplesReadyCallback, if (executor == null) {
useHardwareAcousticEchoCanceler, useHardwareNoiseSuppressor); executor = WebRtcAudioRecord.newDefaultScheduler();
}
final WebRtcAudioRecord audioInput = new WebRtcAudioRecord(context, executor, audioManager,
audioSource, audioFormat, audioRecordErrorCallback, audioRecordStateCallback,
samplesReadyCallback, useHardwareAcousticEchoCanceler, useHardwareNoiseSuppressor);
final WebRtcAudioTrack audioOutput = new WebRtcAudioTrack( final WebRtcAudioTrack audioOutput = new WebRtcAudioTrack(
context, audioManager, audioTrackErrorCallback, audioTrackStateCallback); context, audioManager, audioTrackErrorCallback, audioTrackStateCallback);
return new JavaAudioDeviceModule(context, audioManager, audioInput, audioOutput, return new JavaAudioDeviceModule(context, audioManager, audioInput, audioOutput,

View File

@ -31,7 +31,10 @@ import java.util.concurrent.Callable;
import java.util.concurrent.Executors; import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture; import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import org.webrtc.CalledByNative; import org.webrtc.CalledByNative;
import org.webrtc.Logging; import org.webrtc.Logging;
import org.webrtc.ThreadUtils; import org.webrtc.ThreadUtils;
@ -90,12 +93,12 @@ class WebRtcAudioRecord {
private @Nullable AudioRecordThread audioThread; private @Nullable AudioRecordThread audioThread;
private @Nullable AudioDeviceInfo preferredDevice; private @Nullable AudioDeviceInfo preferredDevice;
private @Nullable ScheduledExecutorService executor; private final ScheduledExecutorService executor;
private @Nullable ScheduledFuture<String> future; private @Nullable ScheduledFuture<String> future;
private volatile boolean microphoneMute; private volatile boolean microphoneMute;
private boolean audioSourceMatchesRecordingSession; private final AtomicReference<Boolean> audioSourceMatchesRecordingSessionRef =
private boolean isAudioConfigVerified; new AtomicReference<>();
private byte[] emptyBytes; private byte[] emptyBytes;
private final @Nullable AudioRecordErrorCallback errorCallback; private final @Nullable AudioRecordErrorCallback errorCallback;
@ -179,14 +182,15 @@ class WebRtcAudioRecord {
@CalledByNative @CalledByNative
WebRtcAudioRecord(Context context, AudioManager audioManager) { WebRtcAudioRecord(Context context, AudioManager audioManager) {
this(context, audioManager, DEFAULT_AUDIO_SOURCE, DEFAULT_AUDIO_FORMAT, this(context, newDefaultScheduler() /* scheduler */, audioManager, DEFAULT_AUDIO_SOURCE,
null /* errorCallback */, null /* stateCallback */, null /* audioSamplesReadyCallback */, DEFAULT_AUDIO_FORMAT, null /* errorCallback */, null /* stateCallback */,
WebRtcAudioEffects.isAcousticEchoCancelerSupported(), null /* audioSamplesReadyCallback */, WebRtcAudioEffects.isAcousticEchoCancelerSupported(),
WebRtcAudioEffects.isNoiseSuppressorSupported()); WebRtcAudioEffects.isNoiseSuppressorSupported());
} }
public WebRtcAudioRecord(Context context, AudioManager audioManager, int audioSource, public WebRtcAudioRecord(Context context, ScheduledExecutorService scheduler,
int audioFormat, @Nullable AudioRecordErrorCallback errorCallback, AudioManager audioManager, int audioSource, int audioFormat,
@Nullable AudioRecordErrorCallback errorCallback,
@Nullable AudioRecordStateCallback stateCallback, @Nullable AudioRecordStateCallback stateCallback,
@Nullable SamplesReadyCallback audioSamplesReadyCallback, @Nullable SamplesReadyCallback audioSamplesReadyCallback,
boolean isAcousticEchoCancelerSupported, boolean isNoiseSuppressorSupported) { boolean isAcousticEchoCancelerSupported, boolean isNoiseSuppressorSupported) {
@ -197,6 +201,7 @@ class WebRtcAudioRecord {
throw new IllegalArgumentException("HW NS not supported"); throw new IllegalArgumentException("HW NS not supported");
} }
this.context = context; this.context = context;
this.executor = scheduler;
this.audioManager = audioManager; this.audioManager = audioManager;
this.audioSource = audioSource; this.audioSource = audioSource;
this.audioFormat = audioFormat; this.audioFormat = audioFormat;
@ -227,7 +232,7 @@ class WebRtcAudioRecord {
// checked before using the returned value of isAudioSourceMatchingRecordingSession(). // checked before using the returned value of isAudioSourceMatchingRecordingSession().
@CalledByNative @CalledByNative
boolean isAudioConfigVerified() { boolean isAudioConfigVerified() {
return isAudioConfigVerified; return audioSourceMatchesRecordingSessionRef.get() != null;
} }
// Returns true if verifyAudioConfig() succeeds. This value is set after a specific delay when // Returns true if verifyAudioConfig() succeeds. This value is set after a specific delay when
@ -236,7 +241,8 @@ class WebRtcAudioRecord {
// enabled in WebRtcAudioRecord to ensure that the returned value is valid. // enabled in WebRtcAudioRecord to ensure that the returned value is valid.
@CalledByNative @CalledByNative
boolean isAudioSourceMatchingRecordingSession() { boolean isAudioSourceMatchingRecordingSession() {
if (!isAudioConfigVerified) { Boolean audioSourceMatchesRecordingSession = audioSourceMatchesRecordingSessionRef.get();
if (audioSourceMatchesRecordingSession == null) {
Logging.w(TAG, "Audio configuration has not yet been verified"); Logging.w(TAG, "Audio configuration has not yet been verified");
return false; return false;
} }
@ -298,6 +304,7 @@ class WebRtcAudioRecord {
// Throws IllegalArgumentException. // Throws IllegalArgumentException.
audioRecord = createAudioRecordOnMOrHigher( audioRecord = createAudioRecordOnMOrHigher(
audioSource, sampleRate, channelConfig, audioFormat, bufferSizeInBytes); audioSource, sampleRate, channelConfig, audioFormat, bufferSizeInBytes);
audioSourceMatchesRecordingSessionRef.set(null);
if (preferredDevice != null) { if (preferredDevice != null) {
setPreferredDevice(preferredDevice); setPreferredDevice(preferredDevice);
} }
@ -306,6 +313,7 @@ class WebRtcAudioRecord {
// Throws UnsupportedOperationException. // Throws UnsupportedOperationException.
audioRecord = createAudioRecordOnLowerThanM( audioRecord = createAudioRecordOnLowerThanM(
audioSource, sampleRate, channelConfig, audioFormat, bufferSizeInBytes); audioSource, sampleRate, channelConfig, audioFormat, bufferSizeInBytes);
audioSourceMatchesRecordingSessionRef.set(null);
} }
} catch (IllegalArgumentException | UnsupportedOperationException e) { } catch (IllegalArgumentException | UnsupportedOperationException e) {
// Report of exception message is sufficient. Example: "Cannot create AudioRecord". // Report of exception message is sufficient. Example: "Cannot create AudioRecord".
@ -324,7 +332,7 @@ class WebRtcAudioRecord {
// Check number of active recording sessions. Should be zero but we have seen conflict cases // Check number of active recording sessions. Should be zero but we have seen conflict cases
// and adding a log for it can help us figure out details about conflicting sessions. // and adding a log for it can help us figure out details about conflicting sessions.
final int numActiveRecordingSessions = final int numActiveRecordingSessions =
logRecordingConfigurations(false /* verifyAudioConfig */); logRecordingConfigurations(audioRecord, false /* verifyAudioConfig */);
if (numActiveRecordingSessions != 0) { if (numActiveRecordingSessions != 0) {
// Log the conflict as a warning since initialization did in fact succeed. Most likely, the // Log the conflict as a warning since initialization did in fact succeed. Most likely, the
// upcoming call to startRecording() will fail under these conditions. // upcoming call to startRecording() will fail under these conditions.
@ -371,7 +379,7 @@ class WebRtcAudioRecord {
} }
audioThread = new AudioRecordThread("AudioRecordJavaThread"); audioThread = new AudioRecordThread("AudioRecordJavaThread");
audioThread.start(); audioThread.start();
scheduleLogRecordingConfigurationsTask(); scheduleLogRecordingConfigurationsTask(audioRecord);
return true; return true;
} }
@ -386,10 +394,6 @@ class WebRtcAudioRecord {
} }
future = null; future = null;
} }
if (executor != null) {
executor.shutdownNow();
executor = null;
}
audioThread.stopThread(); audioThread.stopThread();
if (!ThreadUtils.joinUninterruptibly(audioThread, AUDIO_RECORD_THREAD_JOIN_TIMEOUT_MS)) { if (!ThreadUtils.joinUninterruptibly(audioThread, AUDIO_RECORD_THREAD_JOIN_TIMEOUT_MS)) {
Logging.e(TAG, "Join of AudioRecordJavaThread timed out"); Logging.e(TAG, "Join of AudioRecordJavaThread timed out");
@ -442,8 +446,8 @@ class WebRtcAudioRecord {
@TargetApi(Build.VERSION_CODES.N) @TargetApi(Build.VERSION_CODES.N)
// Checks the number of active recording sessions and logs the states of all active sessions. // Checks the number of active recording sessions and logs the states of all active sessions.
// Returns number of active sessions. // Returns number of active sessions. Note that this could occur on arbituary thread.
private int logRecordingConfigurations(boolean verifyAudioConfig) { private int logRecordingConfigurations(AudioRecord audioRecord, boolean verifyAudioConfig) {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.N) { if (Build.VERSION.SDK_INT < Build.VERSION_CODES.N) {
Logging.w(TAG, "AudioManager#getActiveRecordingConfigurations() requires N or higher"); Logging.w(TAG, "AudioManager#getActiveRecordingConfigurations() requires N or higher");
return 0; return 0;
@ -451,6 +455,7 @@ class WebRtcAudioRecord {
if (audioRecord == null) { if (audioRecord == null) {
return 0; return 0;
} }
// Get a list of the currently active audio recording configurations of the device (can be more // Get a list of the currently active audio recording configurations of the device (can be more
// than one). An empty list indicates there is no recording active when queried. // than one). An empty list indicates there is no recording active when queried.
List<AudioRecordingConfiguration> configs = audioManager.getActiveRecordingConfigurations(); List<AudioRecordingConfiguration> configs = audioManager.getActiveRecordingConfigurations();
@ -463,10 +468,9 @@ class WebRtcAudioRecord {
// to the AudioRecord instance) is matching what the audio recording configuration lists // to the AudioRecord instance) is matching what the audio recording configuration lists
// as its client parameters. If these do not match, recording might work but under invalid // as its client parameters. If these do not match, recording might work but under invalid
// conditions. // conditions.
audioSourceMatchesRecordingSession = audioSourceMatchesRecordingSessionRef.set(
verifyAudioConfig(audioRecord.getAudioSource(), audioRecord.getAudioSessionId(), verifyAudioConfig(audioRecord.getAudioSource(), audioRecord.getAudioSessionId(),
audioRecord.getFormat(), audioRecord.getRoutedDevice(), configs); audioRecord.getFormat(), audioRecord.getRoutedDevice(), configs));
isAudioConfigVerified = true;
} }
} }
return numActiveRecordingSessions; return numActiveRecordingSessions;
@ -501,12 +505,13 @@ class WebRtcAudioRecord {
audioRecord.release(); audioRecord.release();
audioRecord = null; audioRecord = null;
} }
audioSourceMatchesRecordingSessionRef.set(null);
} }
private void reportWebRtcAudioRecordInitError(String errorMessage) { private void reportWebRtcAudioRecordInitError(String errorMessage) {
Logging.e(TAG, "Init recording error: " + errorMessage); Logging.e(TAG, "Init recording error: " + errorMessage);
WebRtcAudioUtils.logAudioState(TAG, context, audioManager); WebRtcAudioUtils.logAudioState(TAG, context, audioManager);
logRecordingConfigurations(false /* verifyAudioConfig */); logRecordingConfigurations(audioRecord, false /* verifyAudioConfig */);
if (errorCallback != null) { if (errorCallback != null) {
errorCallback.onWebRtcAudioRecordInitError(errorMessage); errorCallback.onWebRtcAudioRecordInitError(errorMessage);
} }
@ -516,7 +521,7 @@ class WebRtcAudioRecord {
AudioRecordStartErrorCode errorCode, String errorMessage) { AudioRecordStartErrorCode errorCode, String errorMessage) {
Logging.e(TAG, "Start recording error: " + errorCode + ". " + errorMessage); Logging.e(TAG, "Start recording error: " + errorCode + ". " + errorMessage);
WebRtcAudioUtils.logAudioState(TAG, context, audioManager); WebRtcAudioUtils.logAudioState(TAG, context, audioManager);
logRecordingConfigurations(false /* verifyAudioConfig */); logRecordingConfigurations(audioRecord, false /* verifyAudioConfig */);
if (errorCallback != null) { if (errorCallback != null) {
errorCallback.onWebRtcAudioRecordStartError(errorCode, errorMessage); errorCallback.onWebRtcAudioRecordStartError(errorCode, errorMessage);
} }
@ -564,18 +569,18 @@ class WebRtcAudioRecord {
// Use an ExecutorService to schedule a task after a given delay where the task consists of // Use an ExecutorService to schedule a task after a given delay where the task consists of
// checking (by logging) the current status of active recording sessions. // checking (by logging) the current status of active recording sessions.
private void scheduleLogRecordingConfigurationsTask() { private void scheduleLogRecordingConfigurationsTask(AudioRecord audioRecord) {
Logging.d(TAG, "scheduleLogRecordingConfigurationsTask"); Logging.d(TAG, "scheduleLogRecordingConfigurationsTask");
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.N) { if (Build.VERSION.SDK_INT < Build.VERSION_CODES.N) {
return; return;
} }
if (executor != null) {
executor.shutdownNow();
}
executor = Executors.newSingleThreadScheduledExecutor();
Callable<String> callable = () -> { Callable<String> callable = () -> {
logRecordingConfigurations(true /* verifyAudioConfig */); if (this.audioRecord == audioRecord) {
logRecordingConfigurations(audioRecord, true /* verifyAudioConfig */);
} else {
Logging.d(TAG, "audio record has changed");
}
return "Scheduled task is done"; return "Scheduled task is done";
}; };
@ -704,4 +709,22 @@ class WebRtcAudioRecord {
return "INVALID"; return "INVALID";
} }
} }
private static final AtomicInteger nextSchedulerId = new AtomicInteger(0);
static ScheduledExecutorService newDefaultScheduler() {
AtomicInteger nextThreadId = new AtomicInteger(0);
return Executors.newScheduledThreadPool(0, new ThreadFactory() {
/**
* Constructs a new {@code Thread}
*/
@Override
public Thread newThread(Runnable r) {
Thread thread = Executors.defaultThreadFactory().newThread(r);
thread.setName(String.format("WebRtcAudioRecordScheduler-%s-%s",
nextSchedulerId.getAndIncrement(), nextThreadId.getAndIncrement()));
return thread;
}
});
}
} }