The onWebRtcAudioTrackStartError is changed in this CL which breaks the internal projects.

Revert "Improves native Android audio implementations."

This reverts commit 92b1ffd0f655e88532cb7313707f300fec911b46.

Reason for revert: <INSERT REASONING HERE>

Original change's description:
> Improves native Android audio implementations.
> 
> Summary:
> 
> Adds AudioTrackStartErrorCode to separate different types of error
> codes in combination with StartPlayout.
> 
> Harmonizes WebRtcAudioRecord and WebRtcAudioTrack implementations
> to ensure that init/start/stop is performed identically.
> 
> Adds thread checking in WebRtcAudio track.
> 
> Bug: webrtc:8453
> Change-Id: Ic913e888ff9493c9cc748a7b4dae43eb6b37fa85
> Reviewed-on: https://webrtc-review.googlesource.com/15481
> Commit-Queue: Henrik Andreassson <henrika@webrtc.org>
> Reviewed-by: Alex Glaznev <glaznev@webrtc.org>
> Cr-Commit-Position: refs/heads/master@{#20448}

TBR=henrika@webrtc.org,glaznev@webrtc.org

Change-Id: If1d1d9717387a4a8f6d9d6acf7e86ded4c655b5e
No-Presubmit: true
No-Tree-Checks: true
No-Try: true
Bug: webrtc:8453
Reviewed-on: https://webrtc-review.googlesource.com/16321
Reviewed-by: Zhi Huang <zhihuang@webrtc.org>
Commit-Queue: Zhi Huang <zhihuang@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#20452}
This commit is contained in:
Zhi Huang
2017-10-26 21:58:27 +00:00
committed by Commit Bot
parent c2a0eb2699
commit 0af34ad3fa
3 changed files with 47 additions and 90 deletions

View File

@ -61,7 +61,6 @@ import org.webrtc.voiceengine.WebRtcAudioRecord;
import org.webrtc.voiceengine.WebRtcAudioRecord.AudioRecordStartErrorCode; import org.webrtc.voiceengine.WebRtcAudioRecord.AudioRecordStartErrorCode;
import org.webrtc.voiceengine.WebRtcAudioRecord.WebRtcAudioRecordErrorCallback; import org.webrtc.voiceengine.WebRtcAudioRecord.WebRtcAudioRecordErrorCallback;
import org.webrtc.voiceengine.WebRtcAudioTrack; import org.webrtc.voiceengine.WebRtcAudioTrack;
import org.webrtc.voiceengine.WebRtcAudioTrack.AudioTrackStartErrorCode;
import org.webrtc.voiceengine.WebRtcAudioTrack.WebRtcAudioTrackErrorCallback; import org.webrtc.voiceengine.WebRtcAudioTrack.WebRtcAudioTrackErrorCallback;
import org.webrtc.voiceengine.WebRtcAudioUtils; import org.webrtc.voiceengine.WebRtcAudioUtils;
@ -496,20 +495,16 @@ public class PeerConnectionClient {
WebRtcAudioTrack.setErrorCallback(new WebRtcAudioTrackErrorCallback() { WebRtcAudioTrack.setErrorCallback(new WebRtcAudioTrackErrorCallback() {
@Override @Override
public void onWebRtcAudioTrackInitError(String errorMessage) { public void onWebRtcAudioTrackInitError(String errorMessage) {
Log.e(TAG, "onWebRtcAudioTrackInitError: " + errorMessage);
reportError(errorMessage); reportError(errorMessage);
} }
@Override @Override
public void onWebRtcAudioTrackStartError( public void onWebRtcAudioTrackStartError(String errorMessage) {
AudioTrackStartErrorCode errorCode, String errorMessage) {
Log.e(TAG, "onWebRtcAudioTrackStartError: " + errorCode + ". " + errorMessage);
reportError(errorMessage); reportError(errorMessage);
} }
@Override @Override
public void onWebRtcAudioTrackError(String errorMessage) { public void onWebRtcAudioTrackError(String errorMessage) {
Log.e(TAG, "onWebRtcAudioTrackError: " + errorMessage);
reportError(errorMessage); reportError(errorMessage);
} }
}); });

View File

@ -128,20 +128,16 @@ public class WebRtcAudioRecord {
} }
} }
// Stops recording audio data.
if (audioRecord != null) {
Logging.d(TAG, "Calling AudioRecord.stop...");
try { try {
if (audioRecord != null) {
audioRecord.stop(); audioRecord.stop();
Logging.d(TAG, "AudioRecord.stop is done."); }
} catch (IllegalStateException e) { } catch (IllegalStateException e) {
Logging.e(TAG, "AudioRecord.stop failed: " + e.getMessage()); Logging.e(TAG, "AudioRecord.stop failed: " + e.getMessage());
} }
} }
} // Stops the inner thread loop and also calls AudioRecord.stop().
// Stops the inner thread loop which results in calling AudioRecord.stop().
// Does not block the calling thread. // Does not block the calling thread.
public void stopThread() { public void stopThread() {
Logging.d(TAG, "stopThread"); Logging.d(TAG, "stopThread");
@ -259,7 +255,6 @@ public class WebRtcAudioRecord {
} catch (IllegalStateException e) { } catch (IllegalStateException e) {
reportWebRtcAudioRecordStartError(AudioRecordStartErrorCode.AUDIO_RECORD_START_EXCEPTION, reportWebRtcAudioRecordStartError(AudioRecordStartErrorCode.AUDIO_RECORD_START_EXCEPTION,
"AudioRecord.startRecording failed: " + e.getMessage()); "AudioRecord.startRecording failed: " + e.getMessage());
releaseAudioResources();
return false; return false;
} }
@ -275,7 +270,6 @@ public class WebRtcAudioRecord {
AudioRecordStartErrorCode.AUDIO_RECORD_START_STATE_MISMATCH, AudioRecordStartErrorCode.AUDIO_RECORD_START_STATE_MISMATCH,
"AudioRecord.startRecording failed - incorrect state :" "AudioRecord.startRecording failed - incorrect state :"
+ audioRecord.getRecordingState()); + audioRecord.getRecordingState());
releaseAudioResources();
return false; return false;
} }
@ -292,13 +286,9 @@ public class WebRtcAudioRecord {
Logging.d(TAG, "stopRecording"); Logging.d(TAG, "stopRecording");
assertTrue(audioThread != null); assertTrue(audioThread != null);
audioThread.stopThread(); audioThread.stopThread();
Logging.d(TAG, "Stopping the AudioRecordThread...");
audioThread.interrupt();
if (!ThreadUtils.joinUninterruptibly(audioThread, AUDIO_RECORD_THREAD_JOIN_TIMEOUT_MS)) { if (!ThreadUtils.joinUninterruptibly(audioThread, AUDIO_RECORD_THREAD_JOIN_TIMEOUT_MS)) {
Logging.e(TAG, "Join of AudioRecordThread timed out."); Logging.e(TAG, "Join of AudioRecordJavaThread timed out");
} }
Logging.d(TAG, "AudioRecordThread has now been stopped.");
audioThread = null; audioThread = null;
if (effects != null) { if (effects != null) {
effects.release(); effects.release();
@ -363,7 +353,6 @@ public class WebRtcAudioRecord {
// Releases the native AudioRecord resources. // Releases the native AudioRecord resources.
private void releaseAudioResources() { private void releaseAudioResources() {
Logging.d(TAG, "releaseAudioResources");
if (audioRecord != null) { if (audioRecord != null) {
audioRecord.release(); audioRecord.release();
audioRecord = null; audioRecord = null;

View File

@ -72,7 +72,6 @@ public class WebRtcAudioTrack {
private final long nativeAudioTrack; private final long nativeAudioTrack;
private final AudioManager audioManager; private final AudioManager audioManager;
private final ThreadUtils.ThreadChecker threadChecker = new ThreadUtils.ThreadChecker();
private ByteBuffer byteBuffer; private ByteBuffer byteBuffer;
@ -84,15 +83,9 @@ public class WebRtcAudioTrack {
private static volatile boolean speakerMute = false; private static volatile boolean speakerMute = false;
private byte[] emptyBytes; private byte[] emptyBytes;
// Audio playout/track error handler functions.
public enum AudioTrackStartErrorCode {
AUDIO_TRACK_START_EXCEPTION,
AUDIO_TRACK_START_STATE_MISMATCH,
}
public static interface WebRtcAudioTrackErrorCallback { public static interface WebRtcAudioTrackErrorCallback {
void onWebRtcAudioTrackInitError(String errorMessage); void onWebRtcAudioTrackInitError(String errorMessage);
void onWebRtcAudioTrackStartError(AudioTrackStartErrorCode errorCode, String errorMessage); void onWebRtcAudioTrackStartError(String errorMessage);
void onWebRtcAudioTrackError(String errorMessage); void onWebRtcAudioTrackError(String errorMessage);
} }
@ -120,7 +113,26 @@ public class WebRtcAudioTrack {
public void run() { public void run() {
Process.setThreadPriority(Process.THREAD_PRIORITY_URGENT_AUDIO); Process.setThreadPriority(Process.THREAD_PRIORITY_URGENT_AUDIO);
Logging.d(TAG, "AudioTrackThread" + WebRtcAudioUtils.getThreadInfo()); Logging.d(TAG, "AudioTrackThread" + WebRtcAudioUtils.getThreadInfo());
assertTrue(audioTrack.getPlayState() == AudioTrack.PLAYSTATE_PLAYING);
try {
// In MODE_STREAM mode we can optionally prime the output buffer by
// writing up to bufferSizeInBytes (from constructor) before starting.
// This priming will avoid an immediate underrun, but is not required.
// TODO(henrika): initial tests have shown that priming is not required.
audioTrack.play();
} catch (IllegalStateException e) {
reportWebRtcAudioTrackStartError("AudioTrack.play failed: " + e.getMessage());
releaseAudioResources();
return;
}
// We have seen reports that AudioTrack.play() can sometimes start in a
// paued mode (e.g. when application is in background mode).
// TODO(henrika): consider calling reportWebRtcAudioTrackStartError()
// and release audio resources here as well. For now, let the thread start
// and hope that the audio session can be restored later.
if (audioTrack.getPlayState() != AudioTrack.PLAYSTATE_PLAYING) {
Logging.w(TAG, "AudioTrack failed to enter playing state.");
}
// Fixed size in bytes of each 10ms block of audio data that we ask for // Fixed size in bytes of each 10ms block of audio data that we ask for
// using callbacks to the native WebRTC client. // using callbacks to the native WebRTC client.
@ -169,10 +181,10 @@ public class WebRtcAudioTrack {
// MODE_STREAM mode, audio will stop playing after the last buffer that // MODE_STREAM mode, audio will stop playing after the last buffer that
// was written has been played. // was written has been played.
if (audioTrack != null) { if (audioTrack != null) {
Logging.d(TAG, "Calling AudioTrack.stop..."); Logging.d(TAG, "Stopping the audio track...");
try { try {
audioTrack.stop(); audioTrack.stop();
Logging.d(TAG, "AudioTrack.stop is done."); Logging.d(TAG, "The audio track has now been stopped.");
} catch (IllegalStateException e) { } catch (IllegalStateException e) {
Logging.e(TAG, "AudioTrack.stop failed: " + e.getMessage()); Logging.e(TAG, "AudioTrack.stop failed: " + e.getMessage());
} }
@ -188,7 +200,7 @@ public class WebRtcAudioTrack {
return audioTrack.write(byteBuffer.array(), byteBuffer.arrayOffset(), sizeInBytes); return audioTrack.write(byteBuffer.array(), byteBuffer.arrayOffset(), sizeInBytes);
} }
// Stops the inner thread loop which results in calling AudioTrack.stop(). // Stops the inner thread loop and also calls AudioTrack.pause() and flush().
// Does not block the calling thread. // Does not block the calling thread.
public void stopThread() { public void stopThread() {
Logging.d(TAG, "stopThread"); Logging.d(TAG, "stopThread");
@ -197,7 +209,6 @@ public class WebRtcAudioTrack {
} }
WebRtcAudioTrack(long nativeAudioTrack) { WebRtcAudioTrack(long nativeAudioTrack) {
threadChecker.checkIsOnValidThread();
Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo()); Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo());
this.nativeAudioTrack = nativeAudioTrack; this.nativeAudioTrack = nativeAudioTrack;
audioManager = audioManager =
@ -208,7 +219,6 @@ public class WebRtcAudioTrack {
} }
private boolean initPlayout(int sampleRate, int channels) { private boolean initPlayout(int sampleRate, int channels) {
threadChecker.checkIsOnValidThread();
Logging.d(TAG, "initPlayout(sampleRate=" + sampleRate + ", channels=" + channels + ")"); Logging.d(TAG, "initPlayout(sampleRate=" + sampleRate + ", channels=" + channels + ")");
final int bytesPerFrame = channels * (BITS_PER_SAMPLE / 8); final int bytesPerFrame = channels * (BITS_PER_SAMPLE / 8);
byteBuffer = ByteBuffer.allocateDirect(bytesPerFrame * (sampleRate / BUFFERS_PER_SECOND)); byteBuffer = ByteBuffer.allocateDirect(bytesPerFrame * (sampleRate / BUFFERS_PER_SECOND));
@ -280,66 +290,41 @@ public class WebRtcAudioTrack {
} }
private boolean startPlayout() { private boolean startPlayout() {
threadChecker.checkIsOnValidThread();
Logging.d(TAG, "startPlayout"); Logging.d(TAG, "startPlayout");
assertTrue(audioTrack != null); assertTrue(audioTrack != null);
assertTrue(audioThread == null); assertTrue(audioThread == null);
if (audioTrack.getState() != AudioTrack.STATE_INITIALIZED) {
// Starts playing an audio track. reportWebRtcAudioTrackStartError("AudioTrack instance is not successfully initialized.");
try {
audioTrack.play();
} catch (IllegalStateException e) {
reportWebRtcAudioTrackStartError(AudioTrackStartErrorCode.AUDIO_TRACK_START_EXCEPTION,
"AudioTrack.play failed: " + e.getMessage());
releaseAudioResources();
return false; return false;
} }
// Verify the playout state up to two times (with a sleep in between)
// before returning false and reporting an error.
int numberOfStateChecks = 0;
while (audioTrack.getPlayState() != AudioTrack.PLAYSTATE_PLAYING &&
++numberOfStateChecks < 2) {
threadSleep(200);
}
if (audioTrack.getPlayState() != AudioTrack.PLAYSTATE_PLAYING) {
reportWebRtcAudioTrackStartError(
AudioTrackStartErrorCode.AUDIO_TRACK_START_STATE_MISMATCH,
"AudioTrack.play failed - incorrect state :"
+ audioTrack.getPlayState());
releaseAudioResources();
return false;
}
// Create and start new high-priority thread which calls AudioTrack.write()
// and where we also call the native nativeGetPlayoutData() callback to
// request decoded audio from WebRTC.
audioThread = new AudioTrackThread("AudioTrackJavaThread"); audioThread = new AudioTrackThread("AudioTrackJavaThread");
audioThread.start(); audioThread.start();
return true; return true;
} }
private boolean stopPlayout() { private boolean stopPlayout() {
threadChecker.checkIsOnValidThread();
Logging.d(TAG, "stopPlayout"); Logging.d(TAG, "stopPlayout");
assertTrue(audioThread != null); assertTrue(audioThread != null);
logUnderrunCount(); logUnderrunCount();
audioThread.stopThread(); audioThread.stopThread();
final Thread aThread = audioThread;
audioThread = null;
if (aThread != null) {
Logging.d(TAG, "Stopping the AudioTrackThread..."); Logging.d(TAG, "Stopping the AudioTrackThread...");
audioThread.interrupt(); aThread.interrupt();
if (!ThreadUtils.joinUninterruptibly(audioThread, AUDIO_TRACK_THREAD_JOIN_TIMEOUT_MS)) { if (!ThreadUtils.joinUninterruptibly(aThread, AUDIO_TRACK_THREAD_JOIN_TIMEOUT_MS)) {
Logging.e(TAG, "Join of AudioTrackThread timed out."); Logging.e(TAG, "Join of AudioTrackThread timed out.");
} }
Logging.d(TAG, "AudioTrackThread has now been stopped."); Logging.d(TAG, "AudioTrackThread has now been stopped.");
audioThread = null; }
releaseAudioResources(); releaseAudioResources();
return true; return true;
} }
// Get max possible volume index for a phone call audio stream. // Get max possible volume index for a phone call audio stream.
private int getStreamMaxVolume() { private int getStreamMaxVolume() {
threadChecker.checkIsOnValidThread();
Logging.d(TAG, "getStreamMaxVolume"); Logging.d(TAG, "getStreamMaxVolume");
assertTrue(audioManager != null); assertTrue(audioManager != null);
return audioManager.getStreamMaxVolume(AudioManager.STREAM_VOICE_CALL); return audioManager.getStreamMaxVolume(AudioManager.STREAM_VOICE_CALL);
@ -347,7 +332,6 @@ public class WebRtcAudioTrack {
// Set current volume level for a phone call audio stream. // Set current volume level for a phone call audio stream.
private boolean setStreamVolume(int volume) { private boolean setStreamVolume(int volume) {
threadChecker.checkIsOnValidThread();
Logging.d(TAG, "setStreamVolume(" + volume + ")"); Logging.d(TAG, "setStreamVolume(" + volume + ")");
assertTrue(audioManager != null); assertTrue(audioManager != null);
if (isVolumeFixed()) { if (isVolumeFixed()) {
@ -366,7 +350,6 @@ public class WebRtcAudioTrack {
/** Get current volume level for a phone call audio stream. */ /** Get current volume level for a phone call audio stream. */
private int getStreamVolume() { private int getStreamVolume() {
threadChecker.checkIsOnValidThread();
Logging.d(TAG, "getStreamVolume"); Logging.d(TAG, "getStreamVolume");
assertTrue(audioManager != null); assertTrue(audioManager != null);
return audioManager.getStreamVolume(AudioManager.STREAM_VOICE_CALL); return audioManager.getStreamVolume(AudioManager.STREAM_VOICE_CALL);
@ -481,17 +464,16 @@ public class WebRtcAudioTrack {
} }
private void reportWebRtcAudioTrackInitError(String errorMessage) { private void reportWebRtcAudioTrackInitError(String errorMessage) {
Logging.e(TAG, "Init playout error: " + errorMessage); Logging.e(TAG, "Init error: " + errorMessage);
if (errorCallback != null) { if (errorCallback != null) {
errorCallback.onWebRtcAudioTrackInitError(errorMessage); errorCallback.onWebRtcAudioTrackInitError(errorMessage);
} }
} }
private void reportWebRtcAudioTrackStartError( private void reportWebRtcAudioTrackStartError(String errorMessage) {
AudioTrackStartErrorCode errorCode, String errorMessage) { Logging.e(TAG, "Start error: " + errorMessage);
Logging.e(TAG, "Start playout error: " + errorCode + ". " + errorMessage);
if (errorCallback != null) { if (errorCallback != null) {
errorCallback.onWebRtcAudioTrackStartError(errorCode, errorMessage); errorCallback.onWebRtcAudioTrackStartError(errorMessage);
} }
} }
@ -502,13 +484,4 @@ public class WebRtcAudioTrack {
} }
} }
// Causes the currently executing thread to sleep for the specified number
// of milliseconds.
private void threadSleep(long millis) {
try {
Thread.sleep(millis);
} catch (InterruptedException e) {
Logging.e(TAG, "Thread.sleep failed: " + e.getMessage());
}
}
} }