Adds setAudio[Track/Record]StateCallback interfaces to the Java ADM

Bug: webrtc:10950
Change-Id: Ifa7bd7eb003bf97812ce0dfa5a0192ee8955419c
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/151648
Reviewed-by: Alex Glaznev <glaznev@webrtc.org>
Commit-Queue: Henrik Andreassson <henrika@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#29107}
This commit is contained in:
henrika
2019-09-06 13:45:11 +02:00
committed by Commit Bot
parent 81a08a7feb
commit 4d6b2691bd
4 changed files with 121 additions and 7 deletions

View File

@ -75,7 +75,9 @@ import org.webrtc.VideoTrack;
import org.webrtc.audio.AudioDeviceModule;
import org.webrtc.audio.JavaAudioDeviceModule;
import org.webrtc.audio.JavaAudioDeviceModule.AudioRecordErrorCallback;
import org.webrtc.audio.JavaAudioDeviceModule.AudioRecordStateCallback;
import org.webrtc.audio.JavaAudioDeviceModule.AudioTrackErrorCallback;
import org.webrtc.audio.JavaAudioDeviceModule.AudioTrackStateCallback;
/**
* Peer connection client implementation.
@ -501,12 +503,40 @@ public class PeerConnectionClient {
}
};
// Set audio record state callbacks.
AudioRecordStateCallback audioRecordStateCallback = new AudioRecordStateCallback() {
@Override
public void onWebRtcAudioRecordStart() {
Log.i(TAG, "Audio recording starts");
}
@Override
public void onWebRtcAudioRecordStop() {
Log.i(TAG, "Audio recording stops");
}
};
// Set audio track state callbacks.
AudioTrackStateCallback audioTrackStateCallback = new AudioTrackStateCallback() {
@Override
public void onWebRtcAudioTrackStart() {
Log.i(TAG, "Audio playout starts");
}
@Override
public void onWebRtcAudioTrackStop() {
Log.i(TAG, "Audio playout stops");
}
};
return JavaAudioDeviceModule.builder(appContext)
.setSamplesReadyCallback(saveRecordedAudioToFile)
.setUseHardwareAcousticEchoCanceler(!peerConnectionParameters.disableBuiltInAEC)
.setUseHardwareNoiseSuppressor(!peerConnectionParameters.disableBuiltInNS)
.setAudioRecordErrorCallback(audioRecordErrorCallback)
.setAudioTrackErrorCallback(audioTrackErrorCallback)
.setAudioRecordStateCallback(audioRecordStateCallback)
.setAudioTrackStateCallback(audioTrackStateCallback)
.createAudioDeviceModule();
}

View File

@ -36,6 +36,8 @@ public class JavaAudioDeviceModule implements AudioDeviceModule {
private AudioTrackErrorCallback audioTrackErrorCallback;
private AudioRecordErrorCallback audioRecordErrorCallback;
private SamplesReadyCallback samplesReadyCallback;
private AudioTrackStateCallback audioTrackStateCallback;
private AudioRecordStateCallback audioRecordStateCallback;
private boolean useHardwareAcousticEchoCanceler = isBuiltInAcousticEchoCancelerSupported();
private boolean useHardwareNoiseSuppressor = isBuiltInNoiseSuppressorSupported();
private boolean useStereoInput;
@ -122,6 +124,22 @@ public class JavaAudioDeviceModule implements AudioDeviceModule {
return this;
}
/**
* Set a callback to retrieve information from the AudioTrack on when audio starts and stop.
*/
public Builder setAudioTrackStateCallback(AudioTrackStateCallback audioTrackStateCallback) {
this.audioTrackStateCallback = audioTrackStateCallback;
return this;
}
/**
* Set a callback to retrieve information from the AudioRecord on when audio starts and stops.
*/
public Builder setAudioRecordStateCallback(AudioRecordStateCallback audioRecordStateCallback) {
this.audioRecordStateCallback = audioRecordStateCallback;
return this;
}
/**
* Control if the built-in HW noise suppressor should be used or not. The default is on if it is
* supported. It is possible to query support by calling isBuiltInNoiseSuppressorSupported().
@ -188,10 +206,10 @@ public class JavaAudioDeviceModule implements AudioDeviceModule {
Logging.d(TAG, "HW AEC will not be used.");
}
final WebRtcAudioRecord audioInput = new WebRtcAudioRecord(context, audioManager, audioSource,
audioFormat, audioRecordErrorCallback, samplesReadyCallback,
audioFormat, audioRecordErrorCallback, audioRecordStateCallback, samplesReadyCallback,
useHardwareAcousticEchoCanceler, useHardwareNoiseSuppressor);
final WebRtcAudioTrack audioOutput =
new WebRtcAudioTrack(context, audioManager, audioTrackErrorCallback);
final WebRtcAudioTrack audioOutput = new WebRtcAudioTrack(
context, audioManager, audioTrackErrorCallback, audioTrackStateCallback);
return new JavaAudioDeviceModule(context, audioManager, audioInput, audioOutput,
inputSampleRate, outputSampleRate, useStereoInput, useStereoOutput);
}
@ -210,6 +228,12 @@ public class JavaAudioDeviceModule implements AudioDeviceModule {
void onWebRtcAudioRecordError(String errorMessage);
}
/** Called when audio recording starts and stops. */
public static interface AudioRecordStateCallback {
void onWebRtcAudioRecordStart();
void onWebRtcAudioRecordStop();
}
/**
* Contains audio sample information.
*/
@ -265,6 +289,12 @@ public class JavaAudioDeviceModule implements AudioDeviceModule {
void onWebRtcAudioTrackError(String errorMessage);
}
/** Called when audio playout starts and stops. */
public static interface AudioTrackStateCallback {
void onWebRtcAudioTrackStart();
void onWebRtcAudioTrackStop();
}
/**
* Returns true if the device supports built-in HW AEC, and the UUID is approved (some UUIDs can
* be excluded).

View File

@ -28,6 +28,7 @@ import org.webrtc.Logging;
import org.webrtc.ThreadUtils;
import org.webrtc.audio.JavaAudioDeviceModule.AudioRecordErrorCallback;
import org.webrtc.audio.JavaAudioDeviceModule.AudioRecordStartErrorCode;
import org.webrtc.audio.JavaAudioDeviceModule.AudioRecordStateCallback;
import org.webrtc.audio.JavaAudioDeviceModule.SamplesReadyCallback;
class WebRtcAudioRecord {
@ -54,6 +55,12 @@ class WebRtcAudioRecord {
// Guaranteed to be supported by all devices.
public static final int DEFAULT_AUDIO_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
// Indicates AudioRecord has started recording audio.
private static final int AUDIO_RECORD_START = 0;
// Indicates AudioRecord has stopped recording audio.
private static final int AUDIO_RECORD_STOP = 1;
private final Context context;
private final AudioManager audioManager;
private final int audioSource;
@ -72,6 +79,7 @@ class WebRtcAudioRecord {
private byte[] emptyBytes;
private final @Nullable AudioRecordErrorCallback errorCallback;
private final @Nullable AudioRecordStateCallback stateCallback;
private final @Nullable SamplesReadyCallback audioSamplesReadyCallback;
private final boolean isAcousticEchoCancelerSupported;
private final boolean isNoiseSuppressorSupported;
@ -95,6 +103,9 @@ class WebRtcAudioRecord {
Logging.d(TAG, "AudioRecordThread" + WebRtcAudioUtils.getThreadInfo());
assertTrue(audioRecord.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING);
// Audio recording has started and the client is informed about it.
doAudioRecordStateCallback(AUDIO_RECORD_START);
long lastTime = System.nanoTime();
while (keepAlive) {
int bytesRead = audioRecord.read(byteBuffer, byteBuffer.capacity());
@ -131,6 +142,7 @@ class WebRtcAudioRecord {
try {
if (audioRecord != null) {
audioRecord.stop();
doAudioRecordStateCallback(AUDIO_RECORD_STOP);
}
} catch (IllegalStateException e) {
Logging.e(TAG, "AudioRecord.stop failed: " + e.getMessage());
@ -148,13 +160,14 @@ class WebRtcAudioRecord {
@CalledByNative
WebRtcAudioRecord(Context context, AudioManager audioManager) {
this(context, audioManager, DEFAULT_AUDIO_SOURCE, DEFAULT_AUDIO_FORMAT,
null /* errorCallback */, null /* audioSamplesReadyCallback */,
null /* errorCallback */, null /* stateCallback */, null /* audioSamplesReadyCallback */,
WebRtcAudioEffects.isAcousticEchoCancelerSupported(),
WebRtcAudioEffects.isNoiseSuppressorSupported());
}
public WebRtcAudioRecord(Context context, AudioManager audioManager, int audioSource,
int audioFormat, @Nullable AudioRecordErrorCallback errorCallback,
@Nullable AudioRecordStateCallback stateCallback,
@Nullable SamplesReadyCallback audioSamplesReadyCallback,
boolean isAcousticEchoCancelerSupported, boolean isNoiseSuppressorSupported) {
if (isAcousticEchoCancelerSupported && !WebRtcAudioEffects.isAcousticEchoCancelerSupported()) {
@ -168,6 +181,7 @@ class WebRtcAudioRecord {
this.audioSource = audioSource;
this.audioFormat = audioFormat;
this.errorCallback = errorCallback;
this.stateCallback = stateCallback;
this.audioSamplesReadyCallback = audioSamplesReadyCallback;
this.isAcousticEchoCancelerSupported = isAcousticEchoCancelerSupported;
this.isNoiseSuppressorSupported = isNoiseSuppressorSupported;
@ -395,6 +409,19 @@ class WebRtcAudioRecord {
}
}
private void doAudioRecordStateCallback(int audioState) {
Logging.d(TAG, "doAudioRecordStateCallback: " + audioState);
if (stateCallback != null) {
if (audioState == WebRtcAudioRecord.AUDIO_RECORD_START) {
stateCallback.onWebRtcAudioRecordStart();
} else if (audioState == WebRtcAudioRecord.AUDIO_RECORD_STOP) {
stateCallback.onWebRtcAudioRecordStop();
} else {
Logging.e(TAG, "Invalid audio state");
}
}
}
// Reference from Android code, AudioFormat.getBytesPerSample. BitPerSample / 8
// Default audio data format is PCM 16 bits per sample.
// Guaranteed to be supported by all devices

View File

@ -26,6 +26,7 @@ import org.webrtc.Logging;
import org.webrtc.ThreadUtils;
import org.webrtc.audio.JavaAudioDeviceModule.AudioTrackErrorCallback;
import org.webrtc.audio.JavaAudioDeviceModule.AudioTrackStartErrorCode;
import org.webrtc.audio.JavaAudioDeviceModule.AudioTrackStateCallback;
class WebRtcAudioTrack {
private static final String TAG = "WebRtcAudioTrackExternal";
@ -57,6 +58,12 @@ class WebRtcAudioTrack {
}
}
// Indicates the AudioTrack has started playing audio.
private static final int AUDIO_TRACK_START = 0;
// Indicates the AudioTrack has stopped playing audio.
private static final int AUDIO_TRACK_STOP = 1;
private long nativeAudioTrack;
private final Context context;
private final AudioManager audioManager;
@ -74,6 +81,7 @@ class WebRtcAudioTrack {
private byte[] emptyBytes;
private final @Nullable AudioTrackErrorCallback errorCallback;
private final @Nullable AudioTrackStateCallback stateCallback;
/**
* Audio thread which keeps calling AudioTrack.write() to stream audio.
@ -94,6 +102,9 @@ class WebRtcAudioTrack {
Logging.d(TAG, "AudioTrackThread" + WebRtcAudioUtils.getThreadInfo());
assertTrue(audioTrack.getPlayState() == AudioTrack.PLAYSTATE_PLAYING);
// Audio playout has started and the client is informed about it.
doAudioTrackStateCallback(AUDIO_TRACK_START);
// Fixed size in bytes of each 10ms block of audio data that we ask for
// using callbacks to the native WebRTC client.
final int sizeInBytes = byteBuffer.capacity();
@ -140,6 +151,7 @@ class WebRtcAudioTrack {
try {
audioTrack.stop();
Logging.d(TAG, "AudioTrack.stop is done.");
doAudioTrackStateCallback(AUDIO_TRACK_STOP);
} catch (IllegalStateException e) {
Logging.e(TAG, "AudioTrack.stop failed: " + e.getMessage());
}
@ -164,15 +176,17 @@ class WebRtcAudioTrack {
@CalledByNative
WebRtcAudioTrack(Context context, AudioManager audioManager) {
this(context, audioManager, null /* errorCallback */);
this(context, audioManager, null /* errorCallback */, null /* stateCallback */);
}
WebRtcAudioTrack(
Context context, AudioManager audioManager, @Nullable AudioTrackErrorCallback errorCallback) {
WebRtcAudioTrack(Context context, AudioManager audioManager,
@Nullable AudioTrackErrorCallback errorCallback,
@Nullable AudioTrackStateCallback stateCallback) {
threadChecker.detachThread();
this.context = context;
this.audioManager = audioManager;
this.errorCallback = errorCallback;
this.stateCallback = stateCallback;
this.volumeLogger = new VolumeLogger(audioManager);
}
@ -493,4 +507,17 @@ class WebRtcAudioTrack {
errorCallback.onWebRtcAudioTrackError(errorMessage);
}
}
private void doAudioTrackStateCallback(int audioState) {
Logging.d(TAG, "doAudioTrackStateCallback: " + audioState);
if (stateCallback != null) {
if (audioState == WebRtcAudioTrack.AUDIO_TRACK_START) {
stateCallback.onWebRtcAudioTrackStart();
} else if (audioState == WebRtcAudioTrack.AUDIO_TRACK_STOP) {
stateCallback.onWebRtcAudioTrackStop();
} else {
Logging.e(TAG, "Invalid audio state");
}
}
}
}