Reland of Improves native Android audio implementations
Second attempt to land https://webrtc-review.googlesource.com/c/src/+/15481. This time with an extra (dummy) interface to ensure that we don't break downstream clients. Improves native Android audio implementations. Bug: webrtc:8453 Change-Id: I659a3013ae523a2588e4c41ca44b7d0d2d65efb7 Reviewed-on: https://webrtc-review.googlesource.com/16425 Commit-Queue: Henrik Andreassson <henrika@webrtc.org> Reviewed-by: Sami Kalliomäki <sakal@webrtc.org> Cr-Commit-Position: refs/heads/master@{#20462}
This commit is contained in:
@ -61,7 +61,7 @@ import org.webrtc.voiceengine.WebRtcAudioRecord;
|
||||
import org.webrtc.voiceengine.WebRtcAudioRecord.AudioRecordStartErrorCode;
|
||||
import org.webrtc.voiceengine.WebRtcAudioRecord.WebRtcAudioRecordErrorCallback;
|
||||
import org.webrtc.voiceengine.WebRtcAudioTrack;
|
||||
import org.webrtc.voiceengine.WebRtcAudioTrack.WebRtcAudioTrackErrorCallback;
|
||||
import org.webrtc.voiceengine.WebRtcAudioTrack.AudioTrackStartErrorCode;
|
||||
import org.webrtc.voiceengine.WebRtcAudioUtils;
|
||||
|
||||
/**
|
||||
@ -492,19 +492,23 @@ public class PeerConnectionClient {
|
||||
}
|
||||
});
|
||||
|
||||
WebRtcAudioTrack.setErrorCallback(new WebRtcAudioTrackErrorCallback() {
|
||||
WebRtcAudioTrack.setErrorCallback(new WebRtcAudioTrack.ErrorCallback() {
|
||||
@Override
|
||||
public void onWebRtcAudioTrackInitError(String errorMessage) {
|
||||
Log.e(TAG, "onWebRtcAudioTrackInitError: " + errorMessage);
|
||||
reportError(errorMessage);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onWebRtcAudioTrackStartError(String errorMessage) {
|
||||
public void onWebRtcAudioTrackStartError(
|
||||
AudioTrackStartErrorCode errorCode, String errorMessage) {
|
||||
Log.e(TAG, "onWebRtcAudioTrackStartError: " + errorCode + ". " + errorMessage);
|
||||
reportError(errorMessage);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onWebRtcAudioTrackError(String errorMessage) {
|
||||
Log.e(TAG, "onWebRtcAudioTrackError: " + errorMessage);
|
||||
reportError(errorMessage);
|
||||
}
|
||||
});
|
||||
|
||||
@ -128,16 +128,20 @@ public class WebRtcAudioRecord {
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
// Stops recording audio data.
|
||||
if (audioRecord != null) {
|
||||
Logging.d(TAG, "Calling AudioRecord.stop...");
|
||||
try {
|
||||
audioRecord.stop();
|
||||
}
|
||||
Logging.d(TAG, "AudioRecord.stop is done.");
|
||||
} catch (IllegalStateException e) {
|
||||
Logging.e(TAG, "AudioRecord.stop failed: " + e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
// Stops the inner thread loop and also calls AudioRecord.stop().
|
||||
}
|
||||
|
||||
// Stops the inner thread loop which results in calling AudioRecord.stop().
|
||||
// Does not block the calling thread.
|
||||
public void stopThread() {
|
||||
Logging.d(TAG, "stopThread");
|
||||
@ -255,6 +259,7 @@ public class WebRtcAudioRecord {
|
||||
} catch (IllegalStateException e) {
|
||||
reportWebRtcAudioRecordStartError(AudioRecordStartErrorCode.AUDIO_RECORD_START_EXCEPTION,
|
||||
"AudioRecord.startRecording failed: " + e.getMessage());
|
||||
releaseAudioResources();
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -270,6 +275,7 @@ public class WebRtcAudioRecord {
|
||||
AudioRecordStartErrorCode.AUDIO_RECORD_START_STATE_MISMATCH,
|
||||
"AudioRecord.startRecording failed - incorrect state :"
|
||||
+ audioRecord.getRecordingState());
|
||||
releaseAudioResources();
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -286,9 +292,13 @@ public class WebRtcAudioRecord {
|
||||
Logging.d(TAG, "stopRecording");
|
||||
assertTrue(audioThread != null);
|
||||
audioThread.stopThread();
|
||||
|
||||
Logging.d(TAG, "Stopping the AudioRecordThread...");
|
||||
audioThread.interrupt();
|
||||
if (!ThreadUtils.joinUninterruptibly(audioThread, AUDIO_RECORD_THREAD_JOIN_TIMEOUT_MS)) {
|
||||
Logging.e(TAG, "Join of AudioRecordJavaThread timed out");
|
||||
Logging.e(TAG, "Join of AudioRecordThread timed out.");
|
||||
}
|
||||
Logging.d(TAG, "AudioRecordThread has now been stopped.");
|
||||
audioThread = null;
|
||||
if (effects != null) {
|
||||
effects.release();
|
||||
@ -353,6 +363,7 @@ public class WebRtcAudioRecord {
|
||||
|
||||
// Releases the native AudioRecord resources.
|
||||
private void releaseAudioResources() {
|
||||
Logging.d(TAG, "releaseAudioResources");
|
||||
if (audioRecord != null) {
|
||||
audioRecord.release();
|
||||
audioRecord = null;
|
||||
|
||||
@ -72,6 +72,7 @@ public class WebRtcAudioTrack {
|
||||
|
||||
private final long nativeAudioTrack;
|
||||
private final AudioManager audioManager;
|
||||
private final ThreadUtils.ThreadChecker threadChecker = new ThreadUtils.ThreadChecker();
|
||||
|
||||
private ByteBuffer byteBuffer;
|
||||
|
||||
@ -83,16 +84,37 @@ public class WebRtcAudioTrack {
|
||||
private static volatile boolean speakerMute = false;
|
||||
private byte[] emptyBytes;
|
||||
|
||||
// Audio playout/track error handler functions.
|
||||
public enum AudioTrackStartErrorCode {
|
||||
AUDIO_TRACK_START_EXCEPTION,
|
||||
AUDIO_TRACK_START_STATE_MISMATCH,
|
||||
}
|
||||
|
||||
@Deprecated
|
||||
public static interface WebRtcAudioTrackErrorCallback {
|
||||
void onWebRtcAudioTrackInitError(String errorMessage);
|
||||
void onWebRtcAudioTrackStartError(String errorMessage);
|
||||
void onWebRtcAudioTrackError(String errorMessage);
|
||||
}
|
||||
|
||||
private static WebRtcAudioTrackErrorCallback errorCallback = null;
|
||||
// TODO(henrika): upgrade all clients to use this new interface instead.
|
||||
public static interface ErrorCallback {
|
||||
void onWebRtcAudioTrackInitError(String errorMessage);
|
||||
void onWebRtcAudioTrackStartError(AudioTrackStartErrorCode errorCode, String errorMessage);
|
||||
void onWebRtcAudioTrackError(String errorMessage);
|
||||
}
|
||||
|
||||
private static WebRtcAudioTrackErrorCallback errorCallbackOld = null;
|
||||
private static ErrorCallback errorCallback = null;
|
||||
|
||||
@Deprecated
|
||||
public static void setErrorCallback(WebRtcAudioTrackErrorCallback errorCallback) {
|
||||
Logging.d(TAG, "Set error callback");
|
||||
Logging.d(TAG, "Set error callback (deprecated");
|
||||
WebRtcAudioTrack.errorCallbackOld = errorCallback;
|
||||
}
|
||||
|
||||
public static void setErrorCallback(ErrorCallback errorCallback) {
|
||||
Logging.d(TAG, "Set extended error callback");
|
||||
WebRtcAudioTrack.errorCallback = errorCallback;
|
||||
}
|
||||
|
||||
@ -113,26 +135,7 @@ public class WebRtcAudioTrack {
|
||||
public void run() {
|
||||
Process.setThreadPriority(Process.THREAD_PRIORITY_URGENT_AUDIO);
|
||||
Logging.d(TAG, "AudioTrackThread" + WebRtcAudioUtils.getThreadInfo());
|
||||
|
||||
try {
|
||||
// In MODE_STREAM mode we can optionally prime the output buffer by
|
||||
// writing up to bufferSizeInBytes (from constructor) before starting.
|
||||
// This priming will avoid an immediate underrun, but is not required.
|
||||
// TODO(henrika): initial tests have shown that priming is not required.
|
||||
audioTrack.play();
|
||||
} catch (IllegalStateException e) {
|
||||
reportWebRtcAudioTrackStartError("AudioTrack.play failed: " + e.getMessage());
|
||||
releaseAudioResources();
|
||||
return;
|
||||
}
|
||||
// We have seen reports that AudioTrack.play() can sometimes start in a
|
||||
// paued mode (e.g. when application is in background mode).
|
||||
// TODO(henrika): consider calling reportWebRtcAudioTrackStartError()
|
||||
// and release audio resources here as well. For now, let the thread start
|
||||
// and hope that the audio session can be restored later.
|
||||
if (audioTrack.getPlayState() != AudioTrack.PLAYSTATE_PLAYING) {
|
||||
Logging.w(TAG, "AudioTrack failed to enter playing state.");
|
||||
}
|
||||
assertTrue(audioTrack.getPlayState() == AudioTrack.PLAYSTATE_PLAYING);
|
||||
|
||||
// Fixed size in bytes of each 10ms block of audio data that we ask for
|
||||
// using callbacks to the native WebRTC client.
|
||||
@ -181,10 +184,10 @@ public class WebRtcAudioTrack {
|
||||
// MODE_STREAM mode, audio will stop playing after the last buffer that
|
||||
// was written has been played.
|
||||
if (audioTrack != null) {
|
||||
Logging.d(TAG, "Stopping the audio track...");
|
||||
Logging.d(TAG, "Calling AudioTrack.stop...");
|
||||
try {
|
||||
audioTrack.stop();
|
||||
Logging.d(TAG, "The audio track has now been stopped.");
|
||||
Logging.d(TAG, "AudioTrack.stop is done.");
|
||||
} catch (IllegalStateException e) {
|
||||
Logging.e(TAG, "AudioTrack.stop failed: " + e.getMessage());
|
||||
}
|
||||
@ -200,7 +203,7 @@ public class WebRtcAudioTrack {
|
||||
return audioTrack.write(byteBuffer.array(), byteBuffer.arrayOffset(), sizeInBytes);
|
||||
}
|
||||
|
||||
// Stops the inner thread loop and also calls AudioTrack.pause() and flush().
|
||||
// Stops the inner thread loop which results in calling AudioTrack.stop().
|
||||
// Does not block the calling thread.
|
||||
public void stopThread() {
|
||||
Logging.d(TAG, "stopThread");
|
||||
@ -209,6 +212,7 @@ public class WebRtcAudioTrack {
|
||||
}
|
||||
|
||||
WebRtcAudioTrack(long nativeAudioTrack) {
|
||||
threadChecker.checkIsOnValidThread();
|
||||
Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo());
|
||||
this.nativeAudioTrack = nativeAudioTrack;
|
||||
audioManager =
|
||||
@ -219,6 +223,7 @@ public class WebRtcAudioTrack {
|
||||
}
|
||||
|
||||
private boolean initPlayout(int sampleRate, int channels) {
|
||||
threadChecker.checkIsOnValidThread();
|
||||
Logging.d(TAG, "initPlayout(sampleRate=" + sampleRate + ", channels=" + channels + ")");
|
||||
final int bytesPerFrame = channels * (BITS_PER_SAMPLE / 8);
|
||||
byteBuffer = ByteBuffer.allocateDirect(bytesPerFrame * (sampleRate / BUFFERS_PER_SECOND));
|
||||
@ -290,41 +295,66 @@ public class WebRtcAudioTrack {
|
||||
}
|
||||
|
||||
private boolean startPlayout() {
|
||||
threadChecker.checkIsOnValidThread();
|
||||
Logging.d(TAG, "startPlayout");
|
||||
assertTrue(audioTrack != null);
|
||||
assertTrue(audioThread == null);
|
||||
if (audioTrack.getState() != AudioTrack.STATE_INITIALIZED) {
|
||||
reportWebRtcAudioTrackStartError("AudioTrack instance is not successfully initialized.");
|
||||
|
||||
// Starts playing an audio track.
|
||||
try {
|
||||
audioTrack.play();
|
||||
} catch (IllegalStateException e) {
|
||||
reportWebRtcAudioTrackStartError(AudioTrackStartErrorCode.AUDIO_TRACK_START_EXCEPTION,
|
||||
"AudioTrack.play failed: " + e.getMessage());
|
||||
releaseAudioResources();
|
||||
return false;
|
||||
}
|
||||
|
||||
// Verify the playout state up to two times (with a sleep in between)
|
||||
// before returning false and reporting an error.
|
||||
int numberOfStateChecks = 0;
|
||||
while (audioTrack.getPlayState() != AudioTrack.PLAYSTATE_PLAYING &&
|
||||
++numberOfStateChecks < 2) {
|
||||
threadSleep(200);
|
||||
}
|
||||
if (audioTrack.getPlayState() != AudioTrack.PLAYSTATE_PLAYING) {
|
||||
reportWebRtcAudioTrackStartError(
|
||||
AudioTrackStartErrorCode.AUDIO_TRACK_START_STATE_MISMATCH,
|
||||
"AudioTrack.play failed - incorrect state :"
|
||||
+ audioTrack.getPlayState());
|
||||
releaseAudioResources();
|
||||
return false;
|
||||
}
|
||||
|
||||
// Create and start new high-priority thread which calls AudioTrack.write()
|
||||
// and where we also call the native nativeGetPlayoutData() callback to
|
||||
// request decoded audio from WebRTC.
|
||||
audioThread = new AudioTrackThread("AudioTrackJavaThread");
|
||||
audioThread.start();
|
||||
return true;
|
||||
}
|
||||
|
||||
private boolean stopPlayout() {
|
||||
threadChecker.checkIsOnValidThread();
|
||||
Logging.d(TAG, "stopPlayout");
|
||||
assertTrue(audioThread != null);
|
||||
logUnderrunCount();
|
||||
audioThread.stopThread();
|
||||
|
||||
final Thread aThread = audioThread;
|
||||
audioThread = null;
|
||||
if (aThread != null) {
|
||||
Logging.d(TAG, "Stopping the AudioTrackThread...");
|
||||
aThread.interrupt();
|
||||
if (!ThreadUtils.joinUninterruptibly(aThread, AUDIO_TRACK_THREAD_JOIN_TIMEOUT_MS)) {
|
||||
audioThread.interrupt();
|
||||
if (!ThreadUtils.joinUninterruptibly(audioThread, AUDIO_TRACK_THREAD_JOIN_TIMEOUT_MS)) {
|
||||
Logging.e(TAG, "Join of AudioTrackThread timed out.");
|
||||
}
|
||||
Logging.d(TAG, "AudioTrackThread has now been stopped.");
|
||||
}
|
||||
|
||||
audioThread = null;
|
||||
releaseAudioResources();
|
||||
return true;
|
||||
}
|
||||
|
||||
// Get max possible volume index for a phone call audio stream.
|
||||
private int getStreamMaxVolume() {
|
||||
threadChecker.checkIsOnValidThread();
|
||||
Logging.d(TAG, "getStreamMaxVolume");
|
||||
assertTrue(audioManager != null);
|
||||
return audioManager.getStreamMaxVolume(AudioManager.STREAM_VOICE_CALL);
|
||||
@ -332,6 +362,7 @@ public class WebRtcAudioTrack {
|
||||
|
||||
// Set current volume level for a phone call audio stream.
|
||||
private boolean setStreamVolume(int volume) {
|
||||
threadChecker.checkIsOnValidThread();
|
||||
Logging.d(TAG, "setStreamVolume(" + volume + ")");
|
||||
assertTrue(audioManager != null);
|
||||
if (isVolumeFixed()) {
|
||||
@ -350,6 +381,7 @@ public class WebRtcAudioTrack {
|
||||
|
||||
/** Get current volume level for a phone call audio stream. */
|
||||
private int getStreamVolume() {
|
||||
threadChecker.checkIsOnValidThread();
|
||||
Logging.d(TAG, "getStreamVolume");
|
||||
assertTrue(audioManager != null);
|
||||
return audioManager.getStreamVolume(AudioManager.STREAM_VOICE_CALL);
|
||||
@ -464,24 +496,43 @@ public class WebRtcAudioTrack {
|
||||
}
|
||||
|
||||
private void reportWebRtcAudioTrackInitError(String errorMessage) {
|
||||
Logging.e(TAG, "Init error: " + errorMessage);
|
||||
Logging.e(TAG, "Init playout error: " + errorMessage);
|
||||
if (errorCallback != null) {
|
||||
errorCallbackOld.onWebRtcAudioTrackInitError(errorMessage);
|
||||
}
|
||||
if (errorCallback != null) {
|
||||
errorCallback.onWebRtcAudioTrackInitError(errorMessage);
|
||||
}
|
||||
}
|
||||
|
||||
private void reportWebRtcAudioTrackStartError(String errorMessage) {
|
||||
Logging.e(TAG, "Start error: " + errorMessage);
|
||||
private void reportWebRtcAudioTrackStartError(
|
||||
AudioTrackStartErrorCode errorCode, String errorMessage) {
|
||||
Logging.e(TAG, "Start playout error: " + errorCode + ". " + errorMessage);
|
||||
if (errorCallback != null) {
|
||||
errorCallback.onWebRtcAudioTrackStartError(errorMessage);
|
||||
errorCallbackOld.onWebRtcAudioTrackStartError(errorMessage);
|
||||
}
|
||||
if (errorCallback != null) {
|
||||
errorCallback.onWebRtcAudioTrackStartError(errorCode, errorMessage);
|
||||
}
|
||||
}
|
||||
|
||||
private void reportWebRtcAudioTrackError(String errorMessage) {
|
||||
Logging.e(TAG, "Run-time playback error: " + errorMessage);
|
||||
if (errorCallback != null) {
|
||||
errorCallbackOld.onWebRtcAudioTrackError(errorMessage);
|
||||
}
|
||||
if (errorCallback != null) {
|
||||
errorCallback.onWebRtcAudioTrackError(errorMessage);
|
||||
}
|
||||
}
|
||||
|
||||
// Causes the currently executing thread to sleep for the specified number
|
||||
// of milliseconds.
|
||||
private void threadSleep(long millis) {
|
||||
try {
|
||||
Thread.sleep(millis);
|
||||
} catch (InterruptedException e) {
|
||||
Logging.e(TAG, "Thread.sleep failed: " + e.getMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user