Android AppRTCMobile: Use new audio device code

This CL contains some follow-up fixes for
https://webrtc-review.googlesource.com/c/src/+/60541. It removes all use
of the old voiceengine implementation from AppRTCMobile.

Bug: webrtc:7452
Change-Id: Iea21a4b3be1f3cbb5062831164fffb2c8051d858
Reviewed-on: https://webrtc-review.googlesource.com/63480
Commit-Queue: Magnus Jedvert <magjed@webrtc.org>
Reviewed-by: Paulina Hensman <phensman@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#22530}
This commit is contained in:
Magnus Jedvert
2018-03-21 11:39:29 +01:00
committed by Commit Bot
parent 82fad3d513
commit 08006d4133
6 changed files with 59 additions and 57 deletions

View File

@ -108,7 +108,6 @@ if (is_android) {
deps = [ deps = [
":AppRTCMobile_resources", ":AppRTCMobile_resources",
"../modules/audio_device:audio_device_java",
"../rtc_base:base_java", "../rtc_base:base_java",
"../sdk/android:libjingle_peerconnection_java", "../sdk/android:libjingle_peerconnection_java",
"../sdk/android:libjingle_peerconnection_metrics_default_java", "../sdk/android:libjingle_peerconnection_metrics_default_java",
@ -830,7 +829,6 @@ if (is_android) {
requires_android = true requires_android = true
deps = [ deps = [
":webrtc_unity_java", ":webrtc_unity_java",
"../modules/audio_device:audio_device_java",
"../rtc_base:base_java", "../rtc_base:base_java",
"../sdk/android:libjingle_peerconnection_java", "../sdk/android:libjingle_peerconnection_java",
"../sdk/android:libjingle_peerconnection_metrics_default_java", "../sdk/android:libjingle_peerconnection_metrics_default_java",

View File

@ -19,15 +19,15 @@ import java.io.FileNotFoundException;
import java.io.IOException; import java.io.IOException;
import java.io.OutputStream; import java.io.OutputStream;
import java.util.concurrent.ExecutorService; import java.util.concurrent.ExecutorService;
import org.webrtc.voiceengine.WebRtcAudioRecord; import org.webrtc.audio.AudioDeviceModule;
import org.webrtc.voiceengine.WebRtcAudioRecord.AudioSamples; import org.webrtc.audio.AudioDeviceModule.AudioSamples;
import org.webrtc.voiceengine.WebRtcAudioRecord.WebRtcAudioRecordSamplesReadyCallback; import org.webrtc.audio.AudioDeviceModule.SamplesReadyCallback;
/** /**
* Implements the WebRtcAudioRecordSamplesReadyCallback interface and writes * Implements the AudioRecordSamplesReadyCallback interface and writes
* recorded raw audio samples to an output file. * recorded raw audio samples to an output file.
*/ */
public class RecordedAudioToFileController implements WebRtcAudioRecordSamplesReadyCallback { public class RecordedAudioToFileController implements SamplesReadyCallback {
private static final String TAG = "RecordedAudioToFile"; private static final String TAG = "RecordedAudioToFile";
private static final long MAX_FILE_SIZE_IN_BYTES = 58348800L; private static final long MAX_FILE_SIZE_IN_BYTES = 58348800L;
@ -52,7 +52,7 @@ public class RecordedAudioToFileController implements WebRtcAudioRecordSamplesRe
return false; return false;
} }
// Register this class as receiver of recorded audio samples for storage. // Register this class as receiver of recorded audio samples for storage.
WebRtcAudioRecord.setOnAudioSamplesReady(this); AudioDeviceModule.setOnAudioSamplesReady(this);
return true; return true;
} }
@ -63,7 +63,7 @@ public class RecordedAudioToFileController implements WebRtcAudioRecordSamplesRe
public void stop() { public void stop() {
Log.d(TAG, "stop"); Log.d(TAG, "stop");
// De-register this class as receiver of recorded audio samples for storage. // De-register this class as receiver of recorded audio samples for storage.
WebRtcAudioRecord.setOnAudioSamplesReady(null); AudioDeviceModule.setOnAudioSamplesReady(null);
synchronized (lock) { synchronized (lock) {
if (rawAudioFileOutputStream != null) { if (rawAudioFileOutputStream != null) {
try { try {

View File

@ -18,7 +18,6 @@ import android.preference.ListPreference;
import android.preference.Preference; import android.preference.Preference;
import org.webrtc.Camera2Enumerator; import org.webrtc.Camera2Enumerator;
import org.webrtc.audio.AudioDeviceModule; import org.webrtc.audio.AudioDeviceModule;
import org.webrtc.voiceengine.WebRtcAudioUtils;
/** /**
* Settings activity for AppRTC. * Settings activity for AppRTC.

View File

@ -45,10 +45,56 @@ public class AudioDeviceModule {
void onWebRtcAudioRecordError(String errorMessage); void onWebRtcAudioRecordError(String errorMessage);
} }
/**
* Contains audio sample information.
*/
public static class AudioSamples {
/** See {@link AudioRecord#getAudioFormat()} */
private final int audioFormat;
/** See {@link AudioRecord#getChannelCount()} */
private final int channelCount;
/** See {@link AudioRecord#getSampleRate()} */
private final int sampleRate;
private final byte[] data;
public AudioSamples(int audioFormat, int channelCount, int sampleRate, byte[] data) {
this.audioFormat = audioFormat;
this.channelCount = channelCount;
this.sampleRate = sampleRate;
this.data = data;
}
public int getAudioFormat() {
return audioFormat;
}
public int getChannelCount() {
return channelCount;
}
public int getSampleRate() {
return sampleRate;
}
public byte[] getData() {
return data;
}
}
/** Called when new audio samples are ready. This should only be set for debug purposes */
public static interface SamplesReadyCallback {
void onWebRtcAudioRecordSamplesReady(AudioSamples samples);
}
public static void setErrorCallback(AudioRecordErrorCallback errorCallback) { public static void setErrorCallback(AudioRecordErrorCallback errorCallback) {
WebRtcAudioRecord.setErrorCallback(errorCallback); WebRtcAudioRecord.setErrorCallback(errorCallback);
} }
public static void setOnAudioSamplesReady(SamplesReadyCallback callback) {
WebRtcAudioRecord.setOnAudioSamplesReady(callback);
}
/* AudioTrack */ /* AudioTrack */
// Audio playout/track error handler functions. // Audio playout/track error handler functions.
public enum AudioTrackStartErrorCode { public enum AudioTrackStartErrorCode {

View File

@ -25,6 +25,7 @@ import org.webrtc.audio.AudioDeviceModule.AudioRecordErrorCallback;
import org.webrtc.audio.AudioDeviceModule.AudioRecordStartErrorCode; import org.webrtc.audio.AudioDeviceModule.AudioRecordStartErrorCode;
import org.webrtc.CalledByNative; import org.webrtc.CalledByNative;
import org.webrtc.NativeClassQualifiedName; import org.webrtc.NativeClassQualifiedName;
import org.webrtc.audio.AudioDeviceModule.SamplesReadyCallback;
class WebRtcAudioRecord { class WebRtcAudioRecord {
private static final boolean DEBUG = false; private static final boolean DEBUG = false;
@ -72,52 +73,9 @@ class WebRtcAudioRecord {
WebRtcAudioRecord.errorCallback = errorCallback; WebRtcAudioRecord.errorCallback = errorCallback;
} }
/** private static SamplesReadyCallback audioSamplesReadyCallback = null;
* Contains audio sample information. Object is passed using {@link
* WebRtcAudioRecord.WebRtcAudioRecordSamplesReadyCallback}
*/
public static class AudioSamples {
/** See {@link AudioRecord#getAudioFormat()} */
private final int audioFormat;
/** See {@link AudioRecord#getChannelCount()} */
private final int channelCount;
/** See {@link AudioRecord#getSampleRate()} */
private final int sampleRate;
private final byte[] data; public static void setOnAudioSamplesReady(SamplesReadyCallback callback) {
private AudioSamples(AudioRecord audioRecord, byte[] data) {
this.audioFormat = audioRecord.getAudioFormat();
this.channelCount = audioRecord.getChannelCount();
this.sampleRate = audioRecord.getSampleRate();
this.data = data;
}
public int getAudioFormat() {
return audioFormat;
}
public int getChannelCount() {
return channelCount;
}
public int getSampleRate() {
return sampleRate;
}
public byte[] getData() {
return data;
}
}
/** Called when new audio samples are ready. This should only be set for debug purposes */
public static interface WebRtcAudioRecordSamplesReadyCallback {
void onWebRtcAudioRecordSamplesReady(AudioSamples samples);
}
private static WebRtcAudioRecordSamplesReadyCallback audioSamplesReadyCallback = null;
public static void setOnAudioSamplesReady(WebRtcAudioRecordSamplesReadyCallback callback) {
audioSamplesReadyCallback = callback; audioSamplesReadyCallback = callback;
} }
@ -159,7 +117,8 @@ class WebRtcAudioRecord {
// at index 0. // at index 0.
byte[] data = Arrays.copyOf(byteBuffer.array(), byteBuffer.capacity()); byte[] data = Arrays.copyOf(byteBuffer.array(), byteBuffer.capacity());
audioSamplesReadyCallback.onWebRtcAudioRecordSamplesReady( audioSamplesReadyCallback.onWebRtcAudioRecordSamplesReady(
new AudioSamples(audioRecord, data)); new AudioDeviceModule.AudioSamples(audioRecord.getAudioFormat(),
audioRecord.getChannelCount(), audioRecord.getSampleRate(), data));
} }
} else { } else {
String errorMessage = "AudioRecord.read failed: " + bytesRead; String errorMessage = "AudioRecord.read failed: " + bytesRead;

View File

@ -37,7 +37,7 @@ enum SdkCode {
SDK_CODE_N = 24, SDK_CODE_N = 24,
}; };
// Utility class used to query the Java class (org/webrtc/voiceengine/BuildInfo) // Utility class used to query the Java class (org/webrtc/audio/BuildInfo)
// for device and Android build information. // for device and Android build information.
// The calling thread is attached to the JVM at construction if needed and a // The calling thread is attached to the JVM at construction if needed and a
// valid Java environment object is also created. // valid Java environment object is also created.