Reland of Improves native Android audio implementations

Second attempt to land https://webrtc-review.googlesource.com/c/src/+/15481.
This time with an extra (dummy) interface to ensure that we don't
break downstream clients.

Improves native Android audio implementations.

Bug: webrtc:8453
Change-Id: I659a3013ae523a2588e4c41ca44b7d0d2d65efb7
Reviewed-on: https://webrtc-review.googlesource.com/16425
Commit-Queue: Henrik Andreassson <henrika@webrtc.org>
Reviewed-by: Sami Kalliomäki <sakal@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#20462}
This commit is contained in:
henrika
2017-10-27 11:25:25 +02:00
committed by Commit Bot
parent e4c6915b87
commit b0576ecc71
3 changed files with 116 additions and 50 deletions

View File

@ -61,7 +61,7 @@ import org.webrtc.voiceengine.WebRtcAudioRecord;
import org.webrtc.voiceengine.WebRtcAudioRecord.AudioRecordStartErrorCode; import org.webrtc.voiceengine.WebRtcAudioRecord.AudioRecordStartErrorCode;
import org.webrtc.voiceengine.WebRtcAudioRecord.WebRtcAudioRecordErrorCallback; import org.webrtc.voiceengine.WebRtcAudioRecord.WebRtcAudioRecordErrorCallback;
import org.webrtc.voiceengine.WebRtcAudioTrack; import org.webrtc.voiceengine.WebRtcAudioTrack;
import org.webrtc.voiceengine.WebRtcAudioTrack.WebRtcAudioTrackErrorCallback; import org.webrtc.voiceengine.WebRtcAudioTrack.AudioTrackStartErrorCode;
import org.webrtc.voiceengine.WebRtcAudioUtils; import org.webrtc.voiceengine.WebRtcAudioUtils;
/** /**
@ -492,19 +492,23 @@ public class PeerConnectionClient {
} }
}); });
WebRtcAudioTrack.setErrorCallback(new WebRtcAudioTrackErrorCallback() { WebRtcAudioTrack.setErrorCallback(new WebRtcAudioTrack.ErrorCallback() {
@Override @Override
public void onWebRtcAudioTrackInitError(String errorMessage) { public void onWebRtcAudioTrackInitError(String errorMessage) {
Log.e(TAG, "onWebRtcAudioTrackInitError: " + errorMessage);
reportError(errorMessage); reportError(errorMessage);
} }
@Override @Override
public void onWebRtcAudioTrackStartError(String errorMessage) { public void onWebRtcAudioTrackStartError(
AudioTrackStartErrorCode errorCode, String errorMessage) {
Log.e(TAG, "onWebRtcAudioTrackStartError: " + errorCode + ". " + errorMessage);
reportError(errorMessage); reportError(errorMessage);
} }
@Override @Override
public void onWebRtcAudioTrackError(String errorMessage) { public void onWebRtcAudioTrackError(String errorMessage) {
Log.e(TAG, "onWebRtcAudioTrackError: " + errorMessage);
reportError(errorMessage); reportError(errorMessage);
} }
}); });

View File

@ -128,16 +128,20 @@ public class WebRtcAudioRecord {
} }
} }
try { // Stops recording audio data.
if (audioRecord != null) { if (audioRecord != null) {
Logging.d(TAG, "Calling AudioRecord.stop...");
try {
audioRecord.stop(); audioRecord.stop();
} Logging.d(TAG, "AudioRecord.stop is done.");
} catch (IllegalStateException e) { } catch (IllegalStateException e) {
Logging.e(TAG, "AudioRecord.stop failed: " + e.getMessage()); Logging.e(TAG, "AudioRecord.stop failed: " + e.getMessage());
} }
} }
// Stops the inner thread loop and also calls AudioRecord.stop(). }
// Stops the inner thread loop which results in calling AudioRecord.stop().
// Does not block the calling thread. // Does not block the calling thread.
public void stopThread() { public void stopThread() {
Logging.d(TAG, "stopThread"); Logging.d(TAG, "stopThread");
@ -255,6 +259,7 @@ public class WebRtcAudioRecord {
} catch (IllegalStateException e) { } catch (IllegalStateException e) {
reportWebRtcAudioRecordStartError(AudioRecordStartErrorCode.AUDIO_RECORD_START_EXCEPTION, reportWebRtcAudioRecordStartError(AudioRecordStartErrorCode.AUDIO_RECORD_START_EXCEPTION,
"AudioRecord.startRecording failed: " + e.getMessage()); "AudioRecord.startRecording failed: " + e.getMessage());
releaseAudioResources();
return false; return false;
} }
@ -270,6 +275,7 @@ public class WebRtcAudioRecord {
AudioRecordStartErrorCode.AUDIO_RECORD_START_STATE_MISMATCH, AudioRecordStartErrorCode.AUDIO_RECORD_START_STATE_MISMATCH,
"AudioRecord.startRecording failed - incorrect state :" "AudioRecord.startRecording failed - incorrect state :"
+ audioRecord.getRecordingState()); + audioRecord.getRecordingState());
releaseAudioResources();
return false; return false;
} }
@ -286,9 +292,13 @@ public class WebRtcAudioRecord {
Logging.d(TAG, "stopRecording"); Logging.d(TAG, "stopRecording");
assertTrue(audioThread != null); assertTrue(audioThread != null);
audioThread.stopThread(); audioThread.stopThread();
Logging.d(TAG, "Stopping the AudioRecordThread...");
audioThread.interrupt();
if (!ThreadUtils.joinUninterruptibly(audioThread, AUDIO_RECORD_THREAD_JOIN_TIMEOUT_MS)) { if (!ThreadUtils.joinUninterruptibly(audioThread, AUDIO_RECORD_THREAD_JOIN_TIMEOUT_MS)) {
Logging.e(TAG, "Join of AudioRecordJavaThread timed out"); Logging.e(TAG, "Join of AudioRecordThread timed out.");
} }
Logging.d(TAG, "AudioRecordThread has now been stopped.");
audioThread = null; audioThread = null;
if (effects != null) { if (effects != null) {
effects.release(); effects.release();
@ -353,6 +363,7 @@ public class WebRtcAudioRecord {
// Releases the native AudioRecord resources. // Releases the native AudioRecord resources.
private void releaseAudioResources() { private void releaseAudioResources() {
Logging.d(TAG, "releaseAudioResources");
if (audioRecord != null) { if (audioRecord != null) {
audioRecord.release(); audioRecord.release();
audioRecord = null; audioRecord = null;

View File

@ -72,6 +72,7 @@ public class WebRtcAudioTrack {
private final long nativeAudioTrack; private final long nativeAudioTrack;
private final AudioManager audioManager; private final AudioManager audioManager;
private final ThreadUtils.ThreadChecker threadChecker = new ThreadUtils.ThreadChecker();
private ByteBuffer byteBuffer; private ByteBuffer byteBuffer;
@ -83,16 +84,37 @@ public class WebRtcAudioTrack {
private static volatile boolean speakerMute = false; private static volatile boolean speakerMute = false;
private byte[] emptyBytes; private byte[] emptyBytes;
// Audio playout/track error handler functions.
public enum AudioTrackStartErrorCode {
AUDIO_TRACK_START_EXCEPTION,
AUDIO_TRACK_START_STATE_MISMATCH,
}
@Deprecated
public static interface WebRtcAudioTrackErrorCallback { public static interface WebRtcAudioTrackErrorCallback {
void onWebRtcAudioTrackInitError(String errorMessage); void onWebRtcAudioTrackInitError(String errorMessage);
void onWebRtcAudioTrackStartError(String errorMessage); void onWebRtcAudioTrackStartError(String errorMessage);
void onWebRtcAudioTrackError(String errorMessage); void onWebRtcAudioTrackError(String errorMessage);
} }
private static WebRtcAudioTrackErrorCallback errorCallback = null; // TODO(henrika): upgrade all clients to use this new interface instead.
public static interface ErrorCallback {
void onWebRtcAudioTrackInitError(String errorMessage);
void onWebRtcAudioTrackStartError(AudioTrackStartErrorCode errorCode, String errorMessage);
void onWebRtcAudioTrackError(String errorMessage);
}
private static WebRtcAudioTrackErrorCallback errorCallbackOld = null;
private static ErrorCallback errorCallback = null;
@Deprecated
public static void setErrorCallback(WebRtcAudioTrackErrorCallback errorCallback) { public static void setErrorCallback(WebRtcAudioTrackErrorCallback errorCallback) {
Logging.d(TAG, "Set error callback"); Logging.d(TAG, "Set error callback (deprecated");
WebRtcAudioTrack.errorCallbackOld = errorCallback;
}
public static void setErrorCallback(ErrorCallback errorCallback) {
Logging.d(TAG, "Set extended error callback");
WebRtcAudioTrack.errorCallback = errorCallback; WebRtcAudioTrack.errorCallback = errorCallback;
} }
@ -113,26 +135,7 @@ public class WebRtcAudioTrack {
public void run() { public void run() {
Process.setThreadPriority(Process.THREAD_PRIORITY_URGENT_AUDIO); Process.setThreadPriority(Process.THREAD_PRIORITY_URGENT_AUDIO);
Logging.d(TAG, "AudioTrackThread" + WebRtcAudioUtils.getThreadInfo()); Logging.d(TAG, "AudioTrackThread" + WebRtcAudioUtils.getThreadInfo());
assertTrue(audioTrack.getPlayState() == AudioTrack.PLAYSTATE_PLAYING);
try {
// In MODE_STREAM mode we can optionally prime the output buffer by
// writing up to bufferSizeInBytes (from constructor) before starting.
// This priming will avoid an immediate underrun, but is not required.
// TODO(henrika): initial tests have shown that priming is not required.
audioTrack.play();
} catch (IllegalStateException e) {
reportWebRtcAudioTrackStartError("AudioTrack.play failed: " + e.getMessage());
releaseAudioResources();
return;
}
// We have seen reports that AudioTrack.play() can sometimes start in a
// paued mode (e.g. when application is in background mode).
// TODO(henrika): consider calling reportWebRtcAudioTrackStartError()
// and release audio resources here as well. For now, let the thread start
// and hope that the audio session can be restored later.
if (audioTrack.getPlayState() != AudioTrack.PLAYSTATE_PLAYING) {
Logging.w(TAG, "AudioTrack failed to enter playing state.");
}
// Fixed size in bytes of each 10ms block of audio data that we ask for // Fixed size in bytes of each 10ms block of audio data that we ask for
// using callbacks to the native WebRTC client. // using callbacks to the native WebRTC client.
@ -181,10 +184,10 @@ public class WebRtcAudioTrack {
// MODE_STREAM mode, audio will stop playing after the last buffer that // MODE_STREAM mode, audio will stop playing after the last buffer that
// was written has been played. // was written has been played.
if (audioTrack != null) { if (audioTrack != null) {
Logging.d(TAG, "Stopping the audio track..."); Logging.d(TAG, "Calling AudioTrack.stop...");
try { try {
audioTrack.stop(); audioTrack.stop();
Logging.d(TAG, "The audio track has now been stopped."); Logging.d(TAG, "AudioTrack.stop is done.");
} catch (IllegalStateException e) { } catch (IllegalStateException e) {
Logging.e(TAG, "AudioTrack.stop failed: " + e.getMessage()); Logging.e(TAG, "AudioTrack.stop failed: " + e.getMessage());
} }
@ -200,7 +203,7 @@ public class WebRtcAudioTrack {
return audioTrack.write(byteBuffer.array(), byteBuffer.arrayOffset(), sizeInBytes); return audioTrack.write(byteBuffer.array(), byteBuffer.arrayOffset(), sizeInBytes);
} }
// Stops the inner thread loop and also calls AudioTrack.pause() and flush(). // Stops the inner thread loop which results in calling AudioTrack.stop().
// Does not block the calling thread. // Does not block the calling thread.
public void stopThread() { public void stopThread() {
Logging.d(TAG, "stopThread"); Logging.d(TAG, "stopThread");
@ -209,6 +212,7 @@ public class WebRtcAudioTrack {
} }
WebRtcAudioTrack(long nativeAudioTrack) { WebRtcAudioTrack(long nativeAudioTrack) {
threadChecker.checkIsOnValidThread();
Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo()); Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo());
this.nativeAudioTrack = nativeAudioTrack; this.nativeAudioTrack = nativeAudioTrack;
audioManager = audioManager =
@ -219,6 +223,7 @@ public class WebRtcAudioTrack {
} }
private boolean initPlayout(int sampleRate, int channels) { private boolean initPlayout(int sampleRate, int channels) {
threadChecker.checkIsOnValidThread();
Logging.d(TAG, "initPlayout(sampleRate=" + sampleRate + ", channels=" + channels + ")"); Logging.d(TAG, "initPlayout(sampleRate=" + sampleRate + ", channels=" + channels + ")");
final int bytesPerFrame = channels * (BITS_PER_SAMPLE / 8); final int bytesPerFrame = channels * (BITS_PER_SAMPLE / 8);
byteBuffer = ByteBuffer.allocateDirect(bytesPerFrame * (sampleRate / BUFFERS_PER_SECOND)); byteBuffer = ByteBuffer.allocateDirect(bytesPerFrame * (sampleRate / BUFFERS_PER_SECOND));
@ -290,41 +295,66 @@ public class WebRtcAudioTrack {
} }
private boolean startPlayout() { private boolean startPlayout() {
threadChecker.checkIsOnValidThread();
Logging.d(TAG, "startPlayout"); Logging.d(TAG, "startPlayout");
assertTrue(audioTrack != null); assertTrue(audioTrack != null);
assertTrue(audioThread == null); assertTrue(audioThread == null);
if (audioTrack.getState() != AudioTrack.STATE_INITIALIZED) {
reportWebRtcAudioTrackStartError("AudioTrack instance is not successfully initialized."); // Starts playing an audio track.
try {
audioTrack.play();
} catch (IllegalStateException e) {
reportWebRtcAudioTrackStartError(AudioTrackStartErrorCode.AUDIO_TRACK_START_EXCEPTION,
"AudioTrack.play failed: " + e.getMessage());
releaseAudioResources();
return false; return false;
} }
// Verify the playout state up to two times (with a sleep in between)
// before returning false and reporting an error.
int numberOfStateChecks = 0;
while (audioTrack.getPlayState() != AudioTrack.PLAYSTATE_PLAYING &&
++numberOfStateChecks < 2) {
threadSleep(200);
}
if (audioTrack.getPlayState() != AudioTrack.PLAYSTATE_PLAYING) {
reportWebRtcAudioTrackStartError(
AudioTrackStartErrorCode.AUDIO_TRACK_START_STATE_MISMATCH,
"AudioTrack.play failed - incorrect state :"
+ audioTrack.getPlayState());
releaseAudioResources();
return false;
}
// Create and start new high-priority thread which calls AudioTrack.write()
// and where we also call the native nativeGetPlayoutData() callback to
// request decoded audio from WebRTC.
audioThread = new AudioTrackThread("AudioTrackJavaThread"); audioThread = new AudioTrackThread("AudioTrackJavaThread");
audioThread.start(); audioThread.start();
return true; return true;
} }
private boolean stopPlayout() { private boolean stopPlayout() {
threadChecker.checkIsOnValidThread();
Logging.d(TAG, "stopPlayout"); Logging.d(TAG, "stopPlayout");
assertTrue(audioThread != null); assertTrue(audioThread != null);
logUnderrunCount(); logUnderrunCount();
audioThread.stopThread(); audioThread.stopThread();
final Thread aThread = audioThread;
audioThread = null;
if (aThread != null) {
Logging.d(TAG, "Stopping the AudioTrackThread..."); Logging.d(TAG, "Stopping the AudioTrackThread...");
aThread.interrupt(); audioThread.interrupt();
if (!ThreadUtils.joinUninterruptibly(aThread, AUDIO_TRACK_THREAD_JOIN_TIMEOUT_MS)) { if (!ThreadUtils.joinUninterruptibly(audioThread, AUDIO_TRACK_THREAD_JOIN_TIMEOUT_MS)) {
Logging.e(TAG, "Join of AudioTrackThread timed out."); Logging.e(TAG, "Join of AudioTrackThread timed out.");
} }
Logging.d(TAG, "AudioTrackThread has now been stopped."); Logging.d(TAG, "AudioTrackThread has now been stopped.");
} audioThread = null;
releaseAudioResources(); releaseAudioResources();
return true; return true;
} }
// Get max possible volume index for a phone call audio stream. // Get max possible volume index for a phone call audio stream.
private int getStreamMaxVolume() { private int getStreamMaxVolume() {
threadChecker.checkIsOnValidThread();
Logging.d(TAG, "getStreamMaxVolume"); Logging.d(TAG, "getStreamMaxVolume");
assertTrue(audioManager != null); assertTrue(audioManager != null);
return audioManager.getStreamMaxVolume(AudioManager.STREAM_VOICE_CALL); return audioManager.getStreamMaxVolume(AudioManager.STREAM_VOICE_CALL);
@ -332,6 +362,7 @@ public class WebRtcAudioTrack {
// Set current volume level for a phone call audio stream. // Set current volume level for a phone call audio stream.
private boolean setStreamVolume(int volume) { private boolean setStreamVolume(int volume) {
threadChecker.checkIsOnValidThread();
Logging.d(TAG, "setStreamVolume(" + volume + ")"); Logging.d(TAG, "setStreamVolume(" + volume + ")");
assertTrue(audioManager != null); assertTrue(audioManager != null);
if (isVolumeFixed()) { if (isVolumeFixed()) {
@ -350,6 +381,7 @@ public class WebRtcAudioTrack {
/** Get current volume level for a phone call audio stream. */ /** Get current volume level for a phone call audio stream. */
private int getStreamVolume() { private int getStreamVolume() {
threadChecker.checkIsOnValidThread();
Logging.d(TAG, "getStreamVolume"); Logging.d(TAG, "getStreamVolume");
assertTrue(audioManager != null); assertTrue(audioManager != null);
return audioManager.getStreamVolume(AudioManager.STREAM_VOICE_CALL); return audioManager.getStreamVolume(AudioManager.STREAM_VOICE_CALL);
@ -464,24 +496,43 @@ public class WebRtcAudioTrack {
} }
private void reportWebRtcAudioTrackInitError(String errorMessage) { private void reportWebRtcAudioTrackInitError(String errorMessage) {
Logging.e(TAG, "Init error: " + errorMessage); Logging.e(TAG, "Init playout error: " + errorMessage);
if (errorCallback != null) {
errorCallbackOld.onWebRtcAudioTrackInitError(errorMessage);
}
if (errorCallback != null) { if (errorCallback != null) {
errorCallback.onWebRtcAudioTrackInitError(errorMessage); errorCallback.onWebRtcAudioTrackInitError(errorMessage);
} }
} }
private void reportWebRtcAudioTrackStartError(String errorMessage) { private void reportWebRtcAudioTrackStartError(
Logging.e(TAG, "Start error: " + errorMessage); AudioTrackStartErrorCode errorCode, String errorMessage) {
Logging.e(TAG, "Start playout error: " + errorCode + ". " + errorMessage);
if (errorCallback != null) { if (errorCallback != null) {
errorCallback.onWebRtcAudioTrackStartError(errorMessage); errorCallbackOld.onWebRtcAudioTrackStartError(errorMessage);
}
if (errorCallback != null) {
errorCallback.onWebRtcAudioTrackStartError(errorCode, errorMessage);
} }
} }
private void reportWebRtcAudioTrackError(String errorMessage) { private void reportWebRtcAudioTrackError(String errorMessage) {
Logging.e(TAG, "Run-time playback error: " + errorMessage); Logging.e(TAG, "Run-time playback error: " + errorMessage);
if (errorCallback != null) {
errorCallbackOld.onWebRtcAudioTrackError(errorMessage);
}
if (errorCallback != null) { if (errorCallback != null) {
errorCallback.onWebRtcAudioTrackError(errorMessage); errorCallback.onWebRtcAudioTrackError(errorMessage);
} }
} }
// Causes the currently executing thread to sleep for the specified number
// of milliseconds.
private void threadSleep(long millis) {
try {
Thread.sleep(millis);
} catch (InterruptedException e) {
Logging.e(TAG, "Thread.sleep failed: " + e.getMessage());
}
}
} }