Adds timeout for audio record thread in Java layer
BUG=b/28448866 R=magjed@webrtc.org Review URL: https://codereview.webrtc.org/1933123002 . Cr-Commit-Position: refs/heads/master@{#12590}
This commit is contained in:
@ -21,6 +21,7 @@ import android.media.MediaRecorder.AudioSource;
|
|||||||
import android.os.Process;
|
import android.os.Process;
|
||||||
|
|
||||||
import org.webrtc.Logging;
|
import org.webrtc.Logging;
|
||||||
|
import org.webrtc.ThreadUtils;
|
||||||
|
|
||||||
public class WebRtcAudioRecord {
|
public class WebRtcAudioRecord {
|
||||||
private static final boolean DEBUG = false;
|
private static final boolean DEBUG = false;
|
||||||
@ -42,6 +43,10 @@ public class WebRtcAudioRecord {
|
|||||||
// high load.
|
// high load.
|
||||||
private static final int BUFFER_SIZE_FACTOR = 2;
|
private static final int BUFFER_SIZE_FACTOR = 2;
|
||||||
|
|
||||||
|
// The AudioRecordJavaThread is allowed to wait for successful call to join()
|
||||||
|
// but the wait times out afther this amount of time.
|
||||||
|
private static final long AUDIO_RECORD_THREAD_JOIN_TIMEOUT_MS = 2000;
|
||||||
|
|
||||||
private final long nativeAudioRecord;
|
private final long nativeAudioRecord;
|
||||||
private final Context context;
|
private final Context context;
|
||||||
|
|
||||||
@ -102,19 +107,15 @@ public class WebRtcAudioRecord {
|
|||||||
try {
|
try {
|
||||||
audioRecord.stop();
|
audioRecord.stop();
|
||||||
} catch (IllegalStateException e) {
|
} catch (IllegalStateException e) {
|
||||||
Logging.e(TAG,"AudioRecord.stop failed: " + e.getMessage());
|
Logging.e(TAG, "AudioRecord.stop failed: " + e.getMessage());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void joinThread() {
|
// Stops the inner thread loop and also calls AudioRecord.stop().
|
||||||
|
// Does not block the calling thread.
|
||||||
|
public void stopThread() {
|
||||||
|
Logging.d(TAG, "stopThread");
|
||||||
keepAlive = false;
|
keepAlive = false;
|
||||||
while (isAlive()) {
|
|
||||||
try {
|
|
||||||
join();
|
|
||||||
} catch (InterruptedException e) {
|
|
||||||
// Ignore.
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -131,7 +132,7 @@ public class WebRtcAudioRecord {
|
|||||||
private boolean enableBuiltInAEC(boolean enable) {
|
private boolean enableBuiltInAEC(boolean enable) {
|
||||||
Logging.d(TAG, "enableBuiltInAEC(" + enable + ')');
|
Logging.d(TAG, "enableBuiltInAEC(" + enable + ')');
|
||||||
if (effects == null) {
|
if (effects == null) {
|
||||||
Logging.e(TAG,"Built-in AEC is not supported on this platform");
|
Logging.e(TAG, "Built-in AEC is not supported on this platform");
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
return effects.setAEC(enable);
|
return effects.setAEC(enable);
|
||||||
@ -140,7 +141,7 @@ public class WebRtcAudioRecord {
|
|||||||
private boolean enableBuiltInAGC(boolean enable) {
|
private boolean enableBuiltInAGC(boolean enable) {
|
||||||
Logging.d(TAG, "enableBuiltInAGC(" + enable + ')');
|
Logging.d(TAG, "enableBuiltInAGC(" + enable + ')');
|
||||||
if (effects == null) {
|
if (effects == null) {
|
||||||
Logging.e(TAG,"Built-in AGC is not supported on this platform");
|
Logging.e(TAG, "Built-in AGC is not supported on this platform");
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
return effects.setAGC(enable);
|
return effects.setAGC(enable);
|
||||||
@ -149,7 +150,7 @@ public class WebRtcAudioRecord {
|
|||||||
private boolean enableBuiltInNS(boolean enable) {
|
private boolean enableBuiltInNS(boolean enable) {
|
||||||
Logging.d(TAG, "enableBuiltInNS(" + enable + ')');
|
Logging.d(TAG, "enableBuiltInNS(" + enable + ')');
|
||||||
if (effects == null) {
|
if (effects == null) {
|
||||||
Logging.e(TAG,"Built-in NS is not supported on this platform");
|
Logging.e(TAG, "Built-in NS is not supported on this platform");
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
return effects.setNS(enable);
|
return effects.setNS(enable);
|
||||||
@ -160,11 +161,11 @@ public class WebRtcAudioRecord {
|
|||||||
channels + ")");
|
channels + ")");
|
||||||
if (!WebRtcAudioUtils.hasPermission(
|
if (!WebRtcAudioUtils.hasPermission(
|
||||||
context, android.Manifest.permission.RECORD_AUDIO)) {
|
context, android.Manifest.permission.RECORD_AUDIO)) {
|
||||||
Logging.e(TAG,"RECORD_AUDIO permission is missing");
|
Logging.e(TAG, "RECORD_AUDIO permission is missing");
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
if (audioRecord != null) {
|
if (audioRecord != null) {
|
||||||
Logging.e(TAG,"InitRecording() called twice without StopRecording()");
|
Logging.e(TAG, "InitRecording() called twice without StopRecording()");
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
final int bytesPerFrame = channels * (BITS_PER_SAMPLE / 8);
|
final int bytesPerFrame = channels * (BITS_PER_SAMPLE / 8);
|
||||||
@ -209,7 +210,7 @@ public class WebRtcAudioRecord {
|
|||||||
}
|
}
|
||||||
if (audioRecord == null ||
|
if (audioRecord == null ||
|
||||||
audioRecord.getState() != AudioRecord.STATE_INITIALIZED) {
|
audioRecord.getState() != AudioRecord.STATE_INITIALIZED) {
|
||||||
Logging.e(TAG,"Failed to create a new AudioRecord instance");
|
Logging.e(TAG, "Failed to create a new AudioRecord instance");
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
Logging.d(TAG, "AudioRecord "
|
Logging.d(TAG, "AudioRecord "
|
||||||
@ -242,11 +243,11 @@ public class WebRtcAudioRecord {
|
|||||||
try {
|
try {
|
||||||
audioRecord.startRecording();
|
audioRecord.startRecording();
|
||||||
} catch (IllegalStateException e) {
|
} catch (IllegalStateException e) {
|
||||||
Logging.e(TAG,"AudioRecord.startRecording failed: " + e.getMessage());
|
Logging.e(TAG, "AudioRecord.startRecording failed: " + e.getMessage());
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
if (audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) {
|
if (audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) {
|
||||||
Logging.e(TAG,"AudioRecord.startRecording failed");
|
Logging.e(TAG, "AudioRecord.startRecording failed");
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
audioThread = new AudioRecordThread("AudioRecordJavaThread");
|
audioThread = new AudioRecordThread("AudioRecordJavaThread");
|
||||||
@ -257,7 +258,11 @@ public class WebRtcAudioRecord {
|
|||||||
private boolean stopRecording() {
|
private boolean stopRecording() {
|
||||||
Logging.d(TAG, "stopRecording");
|
Logging.d(TAG, "stopRecording");
|
||||||
assertTrue(audioThread != null);
|
assertTrue(audioThread != null);
|
||||||
audioThread.joinThread();
|
audioThread.stopThread();
|
||||||
|
if (!ThreadUtils.joinUninterruptibly(
|
||||||
|
audioThread, AUDIO_RECORD_THREAD_JOIN_TIMEOUT_MS)) {
|
||||||
|
Logging.e(TAG, "Join of AudioRecordJavaThread timed out");
|
||||||
|
}
|
||||||
audioThread = null;
|
audioThread = null;
|
||||||
if (effects != null) {
|
if (effects != null) {
|
||||||
effects.release();
|
effects.release();
|
||||||
@ -281,7 +286,7 @@ public class WebRtcAudioRecord {
|
|||||||
|
|
||||||
// TODO(glaznev): remove this API once SW mic mute can use AudioTrack.setEnabled().
|
// TODO(glaznev): remove this API once SW mic mute can use AudioTrack.setEnabled().
|
||||||
public static void setMicrophoneMute(boolean mute) {
|
public static void setMicrophoneMute(boolean mute) {
|
||||||
Logging.w(TAG,"setMicrophoneMute API will be deprecated soon.");
|
Logging.w(TAG, "setMicrophoneMute API will be deprecated soon.");
|
||||||
microphoneMute = mute;
|
microphoneMute = mute;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user