Android: Generate audio JNI code

This CL only affects the forked Android audio device code. The old code
at webrtc/modules/audio_device/android/ is unaffected.

Bug: webrtc:8689, webrtc:8278
Change-Id: I696b8297baba9a0f657ea3df808f57ebf259cb06
Reviewed-on: https://webrtc-review.googlesource.com/36502
Reviewed-by: Paulina Hensman <phensman@webrtc.org>
Reviewed-by: Henrik Andreassson <henrika@webrtc.org>
Reviewed-by: Magnus Jedvert <magjed@webrtc.org>
Commit-Queue: Magnus Jedvert <magjed@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#22528}
This commit is contained in:
Magnus Jedvert
2018-03-20 19:11:19 +01:00
committed by Commit Bot
parent 37e36027e2
commit 8fc7948cc2
17 changed files with 221 additions and 342 deletions

View File

@ -11,12 +11,14 @@
package org.webrtc.audio;
import android.os.Build;
import org.webrtc.CalledByNative;
public final class BuildInfo {
public static String getDevice() {
return Build.DEVICE;
}
@CalledByNative
public static String getDeviceModel() {
return Build.MODEL;
}
@ -25,26 +27,32 @@ public final class BuildInfo {
return Build.PRODUCT;
}
@CalledByNative
public static String getBrand() {
return Build.BRAND;
}
@CalledByNative
public static String getDeviceManufacturer() {
return Build.MANUFACTURER;
}
@CalledByNative
public static String getAndroidBuildId() {
return Build.ID;
}
@CalledByNative
public static String getBuildType() {
return Build.TYPE;
}
@CalledByNative
public static String getBuildRelease() {
return Build.VERSION.RELEASE;
}
@CalledByNative
public static int getSdkVersion() {
return Build.VERSION.SDK_INT;
}

View File

@ -22,6 +22,8 @@ import java.util.Timer;
import java.util.TimerTask;
import org.webrtc.ContextUtils;
import org.webrtc.Logging;
import org.webrtc.CalledByNative;
import org.webrtc.NativeClassQualifiedName;
// WebRtcAudioManager handles tasks that uses android.media.AudioManager.
// At construction, storeAudioParameters() is called and it retrieves
@ -170,6 +172,7 @@ class WebRtcAudioManager {
private final VolumeLogger volumeLogger;
@CalledByNative
WebRtcAudioManager(long nativeAudioManager) {
Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo());
this.nativeAudioManager = nativeAudioManager;
@ -180,12 +183,13 @@ class WebRtcAudioManager {
}
volumeLogger = new VolumeLogger(audioManager);
storeAudioParameters();
nativeCacheAudioParameters(sampleRate, outputChannels, inputChannels, hardwareAEC, hardwareAGC,
hardwareNS, lowLatencyOutput, lowLatencyInput, proAudio, aAudio, outputBufferSize,
inputBufferSize, nativeAudioManager);
nativeCacheAudioParameters(nativeAudioManager, sampleRate, outputChannels, inputChannels,
hardwareAEC, hardwareAGC, hardwareNS, lowLatencyOutput, lowLatencyInput, proAudio, aAudio,
outputBufferSize, inputBufferSize);
WebRtcAudioUtils.logAudioState(TAG);
}
@CalledByNative
private boolean init() {
Logging.d(TAG, "init" + WebRtcAudioUtils.getThreadInfo());
if (initialized) {
@ -197,6 +201,7 @@ class WebRtcAudioManager {
return true;
}
@CalledByNative
private void dispose() {
Logging.d(TAG, "dispose" + WebRtcAudioUtils.getThreadInfo());
if (!initialized) {
@ -205,10 +210,12 @@ class WebRtcAudioManager {
volumeLogger.stop();
}
@CalledByNative
private boolean isCommunicationModeEnabled() {
return (audioManager.getMode() == AudioManager.MODE_IN_COMMUNICATION);
}
@CalledByNative
private boolean isDeviceBlacklistedForOpenSLESUsage() {
boolean blacklisted = blacklistDeviceForOpenSLESUsageIsOverridden
? blacklistDeviceForOpenSLESUsage
@ -376,8 +383,9 @@ class WebRtcAudioManager {
}
}
private native void nativeCacheAudioParameters(int sampleRate, int outputChannels,
int inputChannels, boolean hardwareAEC, boolean hardwareAGC, boolean hardwareNS,
boolean lowLatencyOutput, boolean lowLatencyInput, boolean proAudio, boolean aAudio,
int outputBufferSize, int inputBufferSize, long nativeAudioManager);
@NativeClassQualifiedName("webrtc::android_adm::AudioManager")
private native void nativeCacheAudioParameters(long nativeAudioManager, int sampleRate,
int outputChannels, int inputChannels, boolean hardwareAEC, boolean hardwareAGC,
boolean hardwareNS, boolean lowLatencyOutput, boolean lowLatencyInput, boolean proAudio,
boolean aAudio, int outputBufferSize, int inputBufferSize);
}

View File

@ -23,6 +23,8 @@ import org.webrtc.Logging;
import org.webrtc.ThreadUtils;
import org.webrtc.audio.AudioDeviceModule.AudioRecordErrorCallback;
import org.webrtc.audio.AudioDeviceModule.AudioRecordStartErrorCode;
import org.webrtc.CalledByNative;
import org.webrtc.NativeClassQualifiedName;
class WebRtcAudioRecord {
private static final boolean DEBUG = false;
@ -150,7 +152,7 @@ class WebRtcAudioRecord {
// failed to join this thread. To be a bit safer, try to avoid calling any native methods
// in case they've been unregistered after stopRecording() returned.
if (keepAlive) {
nativeDataIsRecorded(bytesRead, nativeAudioRecord);
nativeDataIsRecorded(nativeAudioRecord, bytesRead);
}
if (audioSamplesReadyCallback != null) {
// Copy the entire byte buffer array. Assume that the start of the byteBuffer is
@ -192,6 +194,7 @@ class WebRtcAudioRecord {
}
}
@CalledByNative
WebRtcAudioRecord(long nativeAudioRecord) {
Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo());
this.nativeAudioRecord = nativeAudioRecord;
@ -201,6 +204,7 @@ class WebRtcAudioRecord {
effects = WebRtcAudioEffects.create();
}
@CalledByNative
private boolean enableBuiltInAEC(boolean enable) {
Logging.d(TAG, "enableBuiltInAEC(" + enable + ')');
if (effects == null) {
@ -210,6 +214,7 @@ class WebRtcAudioRecord {
return effects.setAEC(enable);
}
@CalledByNative
private boolean enableBuiltInNS(boolean enable) {
Logging.d(TAG, "enableBuiltInNS(" + enable + ')');
if (effects == null) {
@ -219,6 +224,7 @@ class WebRtcAudioRecord {
return effects.setNS(enable);
}
@CalledByNative
private int initRecording(int sampleRate, int channels) {
Logging.d(TAG, "initRecording(sampleRate=" + sampleRate + ", channels=" + channels + ")");
if (audioRecord != null) {
@ -233,7 +239,7 @@ class WebRtcAudioRecord {
// Rather than passing the ByteBuffer with every callback (requiring
// the potentially expensive GetDirectBufferAddress) we simply have the
// the native class cache the address to the memory once.
nativeCacheDirectBufferAddress(byteBuffer, nativeAudioRecord);
nativeCacheDirectBufferAddress(nativeAudioRecord, byteBuffer);
// Get the minimum buffer size required for the successful creation of
// an AudioRecord object, in byte units.
@ -273,6 +279,7 @@ class WebRtcAudioRecord {
return framesPerBuffer;
}
@CalledByNative
private boolean startRecording() {
Logging.d(TAG, "startRecording");
assertTrue(audioRecord != null);
@ -295,6 +302,7 @@ class WebRtcAudioRecord {
return true;
}
@CalledByNative
private boolean stopRecording() {
Logging.d(TAG, "stopRecording");
assertTrue(audioThread != null);
@ -340,9 +348,11 @@ class WebRtcAudioRecord {
return (channels == 1 ? AudioFormat.CHANNEL_IN_MONO : AudioFormat.CHANNEL_IN_STEREO);
}
private native void nativeCacheDirectBufferAddress(ByteBuffer byteBuffer, long nativeAudioRecord);
@NativeClassQualifiedName("webrtc::android_adm::AudioRecordJni")
private native void nativeCacheDirectBufferAddress(long nativeAudioRecord, ByteBuffer byteBuffer);
private native void nativeDataIsRecorded(int bytes, long nativeAudioRecord);
@NativeClassQualifiedName("webrtc::android_adm::AudioRecordJni")
private native void nativeDataIsRecorded(long nativeAudioRecord, int bytes);
@SuppressWarnings("NoSynchronizedMethodCheck")
public static synchronized void setAudioSource(int source) {

View File

@ -25,6 +25,8 @@ import org.webrtc.Logging;
import org.webrtc.ThreadUtils;
import org.webrtc.audio.AudioDeviceModule.AudioTrackErrorCallback;
import org.webrtc.audio.AudioDeviceModule.AudioTrackStartErrorCode;
import org.webrtc.CalledByNative;
import org.webrtc.NativeClassQualifiedName;
class WebRtcAudioTrack {
private static final boolean DEBUG = false;
@ -122,7 +124,7 @@ class WebRtcAudioTrack {
// Get 10ms of PCM data from the native WebRTC client. Audio data is
// written into the common ByteBuffer using the address that was
// cached at construction.
nativeGetPlayoutData(sizeInBytes, nativeAudioTrack);
nativeGetPlayoutData(nativeAudioTrack, sizeInBytes);
// Write data until all data has been written to the audio sink.
// Upon return, the buffer position will have been advanced to reflect
// the amount of data that was successfully written to the AudioTrack.
@ -188,6 +190,7 @@ class WebRtcAudioTrack {
}
}
@CalledByNative
WebRtcAudioTrack(long nativeAudioTrack) {
threadChecker.checkIsOnValidThread();
Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo());
@ -199,6 +202,7 @@ class WebRtcAudioTrack {
}
}
@CalledByNative
private boolean initPlayout(int sampleRate, int channels) {
threadChecker.checkIsOnValidThread();
Logging.d(TAG, "initPlayout(sampleRate=" + sampleRate + ", channels=" + channels + ")");
@ -209,7 +213,7 @@ class WebRtcAudioTrack {
// Rather than passing the ByteBuffer with every callback (requiring
// the potentially expensive GetDirectBufferAddress) we simply have the
// the native class cache the address to the memory once.
nativeCacheDirectBufferAddress(byteBuffer, nativeAudioTrack);
nativeCacheDirectBufferAddress(nativeAudioTrack, byteBuffer);
// Get the minimum buffer size required for the successful creation of an
// AudioTrack object to be created in the MODE_STREAM mode.
@ -271,6 +275,7 @@ class WebRtcAudioTrack {
return true;
}
@CalledByNative
private boolean startPlayout() {
threadChecker.checkIsOnValidThread();
Logging.d(TAG, "startPlayout");
@ -301,6 +306,7 @@ class WebRtcAudioTrack {
return true;
}
@CalledByNative
private boolean stopPlayout() {
threadChecker.checkIsOnValidThread();
Logging.d(TAG, "stopPlayout");
@ -321,6 +327,7 @@ class WebRtcAudioTrack {
}
// Get max possible volume index for a phone call audio stream.
@CalledByNative
private int getStreamMaxVolume() {
threadChecker.checkIsOnValidThread();
Logging.d(TAG, "getStreamMaxVolume");
@ -329,6 +336,7 @@ class WebRtcAudioTrack {
}
// Set current volume level for a phone call audio stream.
@CalledByNative
private boolean setStreamVolume(int volume) {
threadChecker.checkIsOnValidThread();
Logging.d(TAG, "setStreamVolume(" + volume + ")");
@ -351,6 +359,7 @@ class WebRtcAudioTrack {
}
/** Get current volume level for a phone call audio stream. */
@CalledByNative
private int getStreamVolume() {
threadChecker.checkIsOnValidThread();
Logging.d(TAG, "getStreamVolume");
@ -447,9 +456,12 @@ class WebRtcAudioTrack {
return (channels == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO);
}
private native void nativeCacheDirectBufferAddress(ByteBuffer byteBuffer, long nativeAudioRecord);
@NativeClassQualifiedName("webrtc::android_adm::AudioTrackJni")
private static native void nativeCacheDirectBufferAddress(
long nativeAudioRecord, ByteBuffer byteBuffer);
private native void nativeGetPlayoutData(int bytes, long nativeAudioRecord);
@NativeClassQualifiedName("webrtc::android_adm::AudioTrackJni")
private static native void nativeGetPlayoutData(long nativeAudioRecord, int bytes);
// Sets all samples to be played out to zero if |mute| is true, i.e.,
// ensures that the speaker is muted.