Android: Generate audio JNI code
This CL only affects the forked Android audio device code. The old code at webrtc/modules/audio_device/android/ is unaffected. Bug: webrtc:8689, webrtc:8278 Change-Id: I696b8297baba9a0f657ea3df808f57ebf259cb06 Reviewed-on: https://webrtc-review.googlesource.com/36502 Reviewed-by: Paulina Hensman <phensman@webrtc.org> Reviewed-by: Henrik Andreassson <henrika@webrtc.org> Reviewed-by: Magnus Jedvert <magjed@webrtc.org> Commit-Queue: Magnus Jedvert <magjed@webrtc.org> Cr-Commit-Position: refs/heads/master@{#22528}
This commit is contained in:
committed by
Commit Bot
parent
37e36027e2
commit
8fc7948cc2
@ -195,6 +195,8 @@ rtc_source_set("audio_device_jni") {
|
|||||||
}
|
}
|
||||||
deps = [
|
deps = [
|
||||||
":base_jni",
|
":base_jni",
|
||||||
|
":generated_audio_jni",
|
||||||
|
":native_api_jni",
|
||||||
"../../api:array_view",
|
"../../api:array_view",
|
||||||
"../../modules/audio_device:audio_device",
|
"../../modules/audio_device:audio_device",
|
||||||
"../../modules/audio_device:audio_device_buffer",
|
"../../modules/audio_device:audio_device_buffer",
|
||||||
@ -221,6 +223,17 @@ rtc_static_library("null_audio_jni") {
|
|||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
generate_jni("generated_audio_jni") {
|
||||||
|
sources = [
|
||||||
|
"src/java/org/webrtc/audio/BuildInfo.java",
|
||||||
|
"src/java/org/webrtc/audio/WebRtcAudioManager.java",
|
||||||
|
"src/java/org/webrtc/audio/WebRtcAudioRecord.java",
|
||||||
|
"src/java/org/webrtc/audio/WebRtcAudioTrack.java",
|
||||||
|
]
|
||||||
|
jni_package = ""
|
||||||
|
jni_generator_include = "//sdk/android/src/jni/jni_generator_helper.h"
|
||||||
|
}
|
||||||
|
|
||||||
generate_jni("generated_video_jni") {
|
generate_jni("generated_video_jni") {
|
||||||
sources = [
|
sources = [
|
||||||
"api/org/webrtc/EncodedImage.java",
|
"api/org/webrtc/EncodedImage.java",
|
||||||
@ -809,6 +822,7 @@ rtc_android_library("libjingle_peerconnection_java") {
|
|||||||
"src/java/org/webrtc/WrappedNativeI420Buffer.java",
|
"src/java/org/webrtc/WrappedNativeI420Buffer.java",
|
||||||
"src/java/org/webrtc/WrappedNativeVideoEncoder.java",
|
"src/java/org/webrtc/WrappedNativeVideoEncoder.java",
|
||||||
"src/java/org/webrtc/WrappedNativeVideoDecoder.java",
|
"src/java/org/webrtc/WrappedNativeVideoDecoder.java",
|
||||||
|
"src/java/org/webrtc/audio/BuildInfo.java",
|
||||||
"src/java/org/webrtc/audio/WebRtcAudioEffects.java",
|
"src/java/org/webrtc/audio/WebRtcAudioEffects.java",
|
||||||
"src/java/org/webrtc/audio/WebRtcAudioManager.java",
|
"src/java/org/webrtc/audio/WebRtcAudioManager.java",
|
||||||
"src/java/org/webrtc/audio/WebRtcAudioRecord.java",
|
"src/java/org/webrtc/audio/WebRtcAudioRecord.java",
|
||||||
|
|||||||
@ -21,7 +21,7 @@ import java.lang.annotation.Target;
|
|||||||
*/
|
*/
|
||||||
@Target({ElementType.CONSTRUCTOR, ElementType.METHOD})
|
@Target({ElementType.CONSTRUCTOR, ElementType.METHOD})
|
||||||
@Retention(RetentionPolicy.CLASS)
|
@Retention(RetentionPolicy.CLASS)
|
||||||
@interface CalledByNative {
|
public @interface CalledByNative {
|
||||||
/*
|
/*
|
||||||
* If present, tells which inner class the method belongs to.
|
* If present, tells which inner class the method belongs to.
|
||||||
*/
|
*/
|
||||||
|
|||||||
@ -11,12 +11,14 @@
|
|||||||
package org.webrtc.audio;
|
package org.webrtc.audio;
|
||||||
|
|
||||||
import android.os.Build;
|
import android.os.Build;
|
||||||
|
import org.webrtc.CalledByNative;
|
||||||
|
|
||||||
public final class BuildInfo {
|
public final class BuildInfo {
|
||||||
public static String getDevice() {
|
public static String getDevice() {
|
||||||
return Build.DEVICE;
|
return Build.DEVICE;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@CalledByNative
|
||||||
public static String getDeviceModel() {
|
public static String getDeviceModel() {
|
||||||
return Build.MODEL;
|
return Build.MODEL;
|
||||||
}
|
}
|
||||||
@ -25,26 +27,32 @@ public final class BuildInfo {
|
|||||||
return Build.PRODUCT;
|
return Build.PRODUCT;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@CalledByNative
|
||||||
public static String getBrand() {
|
public static String getBrand() {
|
||||||
return Build.BRAND;
|
return Build.BRAND;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@CalledByNative
|
||||||
public static String getDeviceManufacturer() {
|
public static String getDeviceManufacturer() {
|
||||||
return Build.MANUFACTURER;
|
return Build.MANUFACTURER;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@CalledByNative
|
||||||
public static String getAndroidBuildId() {
|
public static String getAndroidBuildId() {
|
||||||
return Build.ID;
|
return Build.ID;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@CalledByNative
|
||||||
public static String getBuildType() {
|
public static String getBuildType() {
|
||||||
return Build.TYPE;
|
return Build.TYPE;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@CalledByNative
|
||||||
public static String getBuildRelease() {
|
public static String getBuildRelease() {
|
||||||
return Build.VERSION.RELEASE;
|
return Build.VERSION.RELEASE;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@CalledByNative
|
||||||
public static int getSdkVersion() {
|
public static int getSdkVersion() {
|
||||||
return Build.VERSION.SDK_INT;
|
return Build.VERSION.SDK_INT;
|
||||||
}
|
}
|
||||||
|
|||||||
@ -22,6 +22,8 @@ import java.util.Timer;
|
|||||||
import java.util.TimerTask;
|
import java.util.TimerTask;
|
||||||
import org.webrtc.ContextUtils;
|
import org.webrtc.ContextUtils;
|
||||||
import org.webrtc.Logging;
|
import org.webrtc.Logging;
|
||||||
|
import org.webrtc.CalledByNative;
|
||||||
|
import org.webrtc.NativeClassQualifiedName;
|
||||||
|
|
||||||
// WebRtcAudioManager handles tasks that uses android.media.AudioManager.
|
// WebRtcAudioManager handles tasks that uses android.media.AudioManager.
|
||||||
// At construction, storeAudioParameters() is called and it retrieves
|
// At construction, storeAudioParameters() is called and it retrieves
|
||||||
@ -170,6 +172,7 @@ class WebRtcAudioManager {
|
|||||||
|
|
||||||
private final VolumeLogger volumeLogger;
|
private final VolumeLogger volumeLogger;
|
||||||
|
|
||||||
|
@CalledByNative
|
||||||
WebRtcAudioManager(long nativeAudioManager) {
|
WebRtcAudioManager(long nativeAudioManager) {
|
||||||
Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo());
|
Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo());
|
||||||
this.nativeAudioManager = nativeAudioManager;
|
this.nativeAudioManager = nativeAudioManager;
|
||||||
@ -180,12 +183,13 @@ class WebRtcAudioManager {
|
|||||||
}
|
}
|
||||||
volumeLogger = new VolumeLogger(audioManager);
|
volumeLogger = new VolumeLogger(audioManager);
|
||||||
storeAudioParameters();
|
storeAudioParameters();
|
||||||
nativeCacheAudioParameters(sampleRate, outputChannels, inputChannels, hardwareAEC, hardwareAGC,
|
nativeCacheAudioParameters(nativeAudioManager, sampleRate, outputChannels, inputChannels,
|
||||||
hardwareNS, lowLatencyOutput, lowLatencyInput, proAudio, aAudio, outputBufferSize,
|
hardwareAEC, hardwareAGC, hardwareNS, lowLatencyOutput, lowLatencyInput, proAudio, aAudio,
|
||||||
inputBufferSize, nativeAudioManager);
|
outputBufferSize, inputBufferSize);
|
||||||
WebRtcAudioUtils.logAudioState(TAG);
|
WebRtcAudioUtils.logAudioState(TAG);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@CalledByNative
|
||||||
private boolean init() {
|
private boolean init() {
|
||||||
Logging.d(TAG, "init" + WebRtcAudioUtils.getThreadInfo());
|
Logging.d(TAG, "init" + WebRtcAudioUtils.getThreadInfo());
|
||||||
if (initialized) {
|
if (initialized) {
|
||||||
@ -197,6 +201,7 @@ class WebRtcAudioManager {
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@CalledByNative
|
||||||
private void dispose() {
|
private void dispose() {
|
||||||
Logging.d(TAG, "dispose" + WebRtcAudioUtils.getThreadInfo());
|
Logging.d(TAG, "dispose" + WebRtcAudioUtils.getThreadInfo());
|
||||||
if (!initialized) {
|
if (!initialized) {
|
||||||
@ -205,10 +210,12 @@ class WebRtcAudioManager {
|
|||||||
volumeLogger.stop();
|
volumeLogger.stop();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@CalledByNative
|
||||||
private boolean isCommunicationModeEnabled() {
|
private boolean isCommunicationModeEnabled() {
|
||||||
return (audioManager.getMode() == AudioManager.MODE_IN_COMMUNICATION);
|
return (audioManager.getMode() == AudioManager.MODE_IN_COMMUNICATION);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@CalledByNative
|
||||||
private boolean isDeviceBlacklistedForOpenSLESUsage() {
|
private boolean isDeviceBlacklistedForOpenSLESUsage() {
|
||||||
boolean blacklisted = blacklistDeviceForOpenSLESUsageIsOverridden
|
boolean blacklisted = blacklistDeviceForOpenSLESUsageIsOverridden
|
||||||
? blacklistDeviceForOpenSLESUsage
|
? blacklistDeviceForOpenSLESUsage
|
||||||
@ -376,8 +383,9 @@ class WebRtcAudioManager {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private native void nativeCacheAudioParameters(int sampleRate, int outputChannels,
|
@NativeClassQualifiedName("webrtc::android_adm::AudioManager")
|
||||||
int inputChannels, boolean hardwareAEC, boolean hardwareAGC, boolean hardwareNS,
|
private native void nativeCacheAudioParameters(long nativeAudioManager, int sampleRate,
|
||||||
boolean lowLatencyOutput, boolean lowLatencyInput, boolean proAudio, boolean aAudio,
|
int outputChannels, int inputChannels, boolean hardwareAEC, boolean hardwareAGC,
|
||||||
int outputBufferSize, int inputBufferSize, long nativeAudioManager);
|
boolean hardwareNS, boolean lowLatencyOutput, boolean lowLatencyInput, boolean proAudio,
|
||||||
|
boolean aAudio, int outputBufferSize, int inputBufferSize);
|
||||||
}
|
}
|
||||||
|
|||||||
@ -23,6 +23,8 @@ import org.webrtc.Logging;
|
|||||||
import org.webrtc.ThreadUtils;
|
import org.webrtc.ThreadUtils;
|
||||||
import org.webrtc.audio.AudioDeviceModule.AudioRecordErrorCallback;
|
import org.webrtc.audio.AudioDeviceModule.AudioRecordErrorCallback;
|
||||||
import org.webrtc.audio.AudioDeviceModule.AudioRecordStartErrorCode;
|
import org.webrtc.audio.AudioDeviceModule.AudioRecordStartErrorCode;
|
||||||
|
import org.webrtc.CalledByNative;
|
||||||
|
import org.webrtc.NativeClassQualifiedName;
|
||||||
|
|
||||||
class WebRtcAudioRecord {
|
class WebRtcAudioRecord {
|
||||||
private static final boolean DEBUG = false;
|
private static final boolean DEBUG = false;
|
||||||
@ -150,7 +152,7 @@ class WebRtcAudioRecord {
|
|||||||
// failed to join this thread. To be a bit safer, try to avoid calling any native methods
|
// failed to join this thread. To be a bit safer, try to avoid calling any native methods
|
||||||
// in case they've been unregistered after stopRecording() returned.
|
// in case they've been unregistered after stopRecording() returned.
|
||||||
if (keepAlive) {
|
if (keepAlive) {
|
||||||
nativeDataIsRecorded(bytesRead, nativeAudioRecord);
|
nativeDataIsRecorded(nativeAudioRecord, bytesRead);
|
||||||
}
|
}
|
||||||
if (audioSamplesReadyCallback != null) {
|
if (audioSamplesReadyCallback != null) {
|
||||||
// Copy the entire byte buffer array. Assume that the start of the byteBuffer is
|
// Copy the entire byte buffer array. Assume that the start of the byteBuffer is
|
||||||
@ -192,6 +194,7 @@ class WebRtcAudioRecord {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@CalledByNative
|
||||||
WebRtcAudioRecord(long nativeAudioRecord) {
|
WebRtcAudioRecord(long nativeAudioRecord) {
|
||||||
Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo());
|
Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo());
|
||||||
this.nativeAudioRecord = nativeAudioRecord;
|
this.nativeAudioRecord = nativeAudioRecord;
|
||||||
@ -201,6 +204,7 @@ class WebRtcAudioRecord {
|
|||||||
effects = WebRtcAudioEffects.create();
|
effects = WebRtcAudioEffects.create();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@CalledByNative
|
||||||
private boolean enableBuiltInAEC(boolean enable) {
|
private boolean enableBuiltInAEC(boolean enable) {
|
||||||
Logging.d(TAG, "enableBuiltInAEC(" + enable + ')');
|
Logging.d(TAG, "enableBuiltInAEC(" + enable + ')');
|
||||||
if (effects == null) {
|
if (effects == null) {
|
||||||
@ -210,6 +214,7 @@ class WebRtcAudioRecord {
|
|||||||
return effects.setAEC(enable);
|
return effects.setAEC(enable);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@CalledByNative
|
||||||
private boolean enableBuiltInNS(boolean enable) {
|
private boolean enableBuiltInNS(boolean enable) {
|
||||||
Logging.d(TAG, "enableBuiltInNS(" + enable + ')');
|
Logging.d(TAG, "enableBuiltInNS(" + enable + ')');
|
||||||
if (effects == null) {
|
if (effects == null) {
|
||||||
@ -219,6 +224,7 @@ class WebRtcAudioRecord {
|
|||||||
return effects.setNS(enable);
|
return effects.setNS(enable);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@CalledByNative
|
||||||
private int initRecording(int sampleRate, int channels) {
|
private int initRecording(int sampleRate, int channels) {
|
||||||
Logging.d(TAG, "initRecording(sampleRate=" + sampleRate + ", channels=" + channels + ")");
|
Logging.d(TAG, "initRecording(sampleRate=" + sampleRate + ", channels=" + channels + ")");
|
||||||
if (audioRecord != null) {
|
if (audioRecord != null) {
|
||||||
@ -233,7 +239,7 @@ class WebRtcAudioRecord {
|
|||||||
// Rather than passing the ByteBuffer with every callback (requiring
|
// Rather than passing the ByteBuffer with every callback (requiring
|
||||||
// the potentially expensive GetDirectBufferAddress) we simply have the
|
// the potentially expensive GetDirectBufferAddress) we simply have the
|
||||||
// the native class cache the address to the memory once.
|
// the native class cache the address to the memory once.
|
||||||
nativeCacheDirectBufferAddress(byteBuffer, nativeAudioRecord);
|
nativeCacheDirectBufferAddress(nativeAudioRecord, byteBuffer);
|
||||||
|
|
||||||
// Get the minimum buffer size required for the successful creation of
|
// Get the minimum buffer size required for the successful creation of
|
||||||
// an AudioRecord object, in byte units.
|
// an AudioRecord object, in byte units.
|
||||||
@ -273,6 +279,7 @@ class WebRtcAudioRecord {
|
|||||||
return framesPerBuffer;
|
return framesPerBuffer;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@CalledByNative
|
||||||
private boolean startRecording() {
|
private boolean startRecording() {
|
||||||
Logging.d(TAG, "startRecording");
|
Logging.d(TAG, "startRecording");
|
||||||
assertTrue(audioRecord != null);
|
assertTrue(audioRecord != null);
|
||||||
@ -295,6 +302,7 @@ class WebRtcAudioRecord {
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@CalledByNative
|
||||||
private boolean stopRecording() {
|
private boolean stopRecording() {
|
||||||
Logging.d(TAG, "stopRecording");
|
Logging.d(TAG, "stopRecording");
|
||||||
assertTrue(audioThread != null);
|
assertTrue(audioThread != null);
|
||||||
@ -340,9 +348,11 @@ class WebRtcAudioRecord {
|
|||||||
return (channels == 1 ? AudioFormat.CHANNEL_IN_MONO : AudioFormat.CHANNEL_IN_STEREO);
|
return (channels == 1 ? AudioFormat.CHANNEL_IN_MONO : AudioFormat.CHANNEL_IN_STEREO);
|
||||||
}
|
}
|
||||||
|
|
||||||
private native void nativeCacheDirectBufferAddress(ByteBuffer byteBuffer, long nativeAudioRecord);
|
@NativeClassQualifiedName("webrtc::android_adm::AudioRecordJni")
|
||||||
|
private native void nativeCacheDirectBufferAddress(long nativeAudioRecord, ByteBuffer byteBuffer);
|
||||||
|
|
||||||
private native void nativeDataIsRecorded(int bytes, long nativeAudioRecord);
|
@NativeClassQualifiedName("webrtc::android_adm::AudioRecordJni")
|
||||||
|
private native void nativeDataIsRecorded(long nativeAudioRecord, int bytes);
|
||||||
|
|
||||||
@SuppressWarnings("NoSynchronizedMethodCheck")
|
@SuppressWarnings("NoSynchronizedMethodCheck")
|
||||||
public static synchronized void setAudioSource(int source) {
|
public static synchronized void setAudioSource(int source) {
|
||||||
|
|||||||
@ -25,6 +25,8 @@ import org.webrtc.Logging;
|
|||||||
import org.webrtc.ThreadUtils;
|
import org.webrtc.ThreadUtils;
|
||||||
import org.webrtc.audio.AudioDeviceModule.AudioTrackErrorCallback;
|
import org.webrtc.audio.AudioDeviceModule.AudioTrackErrorCallback;
|
||||||
import org.webrtc.audio.AudioDeviceModule.AudioTrackStartErrorCode;
|
import org.webrtc.audio.AudioDeviceModule.AudioTrackStartErrorCode;
|
||||||
|
import org.webrtc.CalledByNative;
|
||||||
|
import org.webrtc.NativeClassQualifiedName;
|
||||||
|
|
||||||
class WebRtcAudioTrack {
|
class WebRtcAudioTrack {
|
||||||
private static final boolean DEBUG = false;
|
private static final boolean DEBUG = false;
|
||||||
@ -122,7 +124,7 @@ class WebRtcAudioTrack {
|
|||||||
// Get 10ms of PCM data from the native WebRTC client. Audio data is
|
// Get 10ms of PCM data from the native WebRTC client. Audio data is
|
||||||
// written into the common ByteBuffer using the address that was
|
// written into the common ByteBuffer using the address that was
|
||||||
// cached at construction.
|
// cached at construction.
|
||||||
nativeGetPlayoutData(sizeInBytes, nativeAudioTrack);
|
nativeGetPlayoutData(nativeAudioTrack, sizeInBytes);
|
||||||
// Write data until all data has been written to the audio sink.
|
// Write data until all data has been written to the audio sink.
|
||||||
// Upon return, the buffer position will have been advanced to reflect
|
// Upon return, the buffer position will have been advanced to reflect
|
||||||
// the amount of data that was successfully written to the AudioTrack.
|
// the amount of data that was successfully written to the AudioTrack.
|
||||||
@ -188,6 +190,7 @@ class WebRtcAudioTrack {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@CalledByNative
|
||||||
WebRtcAudioTrack(long nativeAudioTrack) {
|
WebRtcAudioTrack(long nativeAudioTrack) {
|
||||||
threadChecker.checkIsOnValidThread();
|
threadChecker.checkIsOnValidThread();
|
||||||
Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo());
|
Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo());
|
||||||
@ -199,6 +202,7 @@ class WebRtcAudioTrack {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@CalledByNative
|
||||||
private boolean initPlayout(int sampleRate, int channels) {
|
private boolean initPlayout(int sampleRate, int channels) {
|
||||||
threadChecker.checkIsOnValidThread();
|
threadChecker.checkIsOnValidThread();
|
||||||
Logging.d(TAG, "initPlayout(sampleRate=" + sampleRate + ", channels=" + channels + ")");
|
Logging.d(TAG, "initPlayout(sampleRate=" + sampleRate + ", channels=" + channels + ")");
|
||||||
@ -209,7 +213,7 @@ class WebRtcAudioTrack {
|
|||||||
// Rather than passing the ByteBuffer with every callback (requiring
|
// Rather than passing the ByteBuffer with every callback (requiring
|
||||||
// the potentially expensive GetDirectBufferAddress) we simply have the
|
// the potentially expensive GetDirectBufferAddress) we simply have the
|
||||||
// the native class cache the address to the memory once.
|
// the native class cache the address to the memory once.
|
||||||
nativeCacheDirectBufferAddress(byteBuffer, nativeAudioTrack);
|
nativeCacheDirectBufferAddress(nativeAudioTrack, byteBuffer);
|
||||||
|
|
||||||
// Get the minimum buffer size required for the successful creation of an
|
// Get the minimum buffer size required for the successful creation of an
|
||||||
// AudioTrack object to be created in the MODE_STREAM mode.
|
// AudioTrack object to be created in the MODE_STREAM mode.
|
||||||
@ -271,6 +275,7 @@ class WebRtcAudioTrack {
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@CalledByNative
|
||||||
private boolean startPlayout() {
|
private boolean startPlayout() {
|
||||||
threadChecker.checkIsOnValidThread();
|
threadChecker.checkIsOnValidThread();
|
||||||
Logging.d(TAG, "startPlayout");
|
Logging.d(TAG, "startPlayout");
|
||||||
@ -301,6 +306,7 @@ class WebRtcAudioTrack {
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@CalledByNative
|
||||||
private boolean stopPlayout() {
|
private boolean stopPlayout() {
|
||||||
threadChecker.checkIsOnValidThread();
|
threadChecker.checkIsOnValidThread();
|
||||||
Logging.d(TAG, "stopPlayout");
|
Logging.d(TAG, "stopPlayout");
|
||||||
@ -321,6 +327,7 @@ class WebRtcAudioTrack {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Get max possible volume index for a phone call audio stream.
|
// Get max possible volume index for a phone call audio stream.
|
||||||
|
@CalledByNative
|
||||||
private int getStreamMaxVolume() {
|
private int getStreamMaxVolume() {
|
||||||
threadChecker.checkIsOnValidThread();
|
threadChecker.checkIsOnValidThread();
|
||||||
Logging.d(TAG, "getStreamMaxVolume");
|
Logging.d(TAG, "getStreamMaxVolume");
|
||||||
@ -329,6 +336,7 @@ class WebRtcAudioTrack {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Set current volume level for a phone call audio stream.
|
// Set current volume level for a phone call audio stream.
|
||||||
|
@CalledByNative
|
||||||
private boolean setStreamVolume(int volume) {
|
private boolean setStreamVolume(int volume) {
|
||||||
threadChecker.checkIsOnValidThread();
|
threadChecker.checkIsOnValidThread();
|
||||||
Logging.d(TAG, "setStreamVolume(" + volume + ")");
|
Logging.d(TAG, "setStreamVolume(" + volume + ")");
|
||||||
@ -351,6 +359,7 @@ class WebRtcAudioTrack {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/** Get current volume level for a phone call audio stream. */
|
/** Get current volume level for a phone call audio stream. */
|
||||||
|
@CalledByNative
|
||||||
private int getStreamVolume() {
|
private int getStreamVolume() {
|
||||||
threadChecker.checkIsOnValidThread();
|
threadChecker.checkIsOnValidThread();
|
||||||
Logging.d(TAG, "getStreamVolume");
|
Logging.d(TAG, "getStreamVolume");
|
||||||
@ -447,9 +456,12 @@ class WebRtcAudioTrack {
|
|||||||
return (channels == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO);
|
return (channels == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO);
|
||||||
}
|
}
|
||||||
|
|
||||||
private native void nativeCacheDirectBufferAddress(ByteBuffer byteBuffer, long nativeAudioRecord);
|
@NativeClassQualifiedName("webrtc::android_adm::AudioTrackJni")
|
||||||
|
private static native void nativeCacheDirectBufferAddress(
|
||||||
|
long nativeAudioRecord, ByteBuffer byteBuffer);
|
||||||
|
|
||||||
private native void nativeGetPlayoutData(int bytes, long nativeAudioRecord);
|
@NativeClassQualifiedName("webrtc::android_adm::AudioTrackJni")
|
||||||
|
private static native void nativeGetPlayoutData(long nativeAudioRecord, int bytes);
|
||||||
|
|
||||||
// Sets all samples to be played out to zero if |mute| is true, i.e.,
|
// Sets all samples to be played out to zero if |mute| is true, i.e.,
|
||||||
// ensures that the speaker is muted.
|
// ensures that the speaker is muted.
|
||||||
|
|||||||
@ -1,6 +1,4 @@
|
|||||||
include_rules = [
|
include_rules = [
|
||||||
"+base/android/jni_android.h",
|
"+base/android/jni_android.h",
|
||||||
"+modules/audio_device",
|
"+modules/audio_device",
|
||||||
# TODO(bugs.webrtc.org/8689): Remove this dependency and use jni generation instead.
|
|
||||||
"+modules/utility/include/helpers_android.h",
|
|
||||||
]
|
]
|
||||||
|
|||||||
@ -12,12 +12,13 @@
|
|||||||
|
|
||||||
#include <utility>
|
#include <utility>
|
||||||
|
|
||||||
#include "modules/utility/include/helpers_android.h"
|
|
||||||
#include "rtc_base/arraysize.h"
|
#include "rtc_base/arraysize.h"
|
||||||
#include "rtc_base/checks.h"
|
#include "rtc_base/checks.h"
|
||||||
#include "rtc_base/logging.h"
|
#include "rtc_base/logging.h"
|
||||||
#include "rtc_base/platform_thread.h"
|
#include "rtc_base/platform_thread.h"
|
||||||
|
#include "sdk/android/generated_audio_jni/jni/WebRtcAudioManager_jni.h"
|
||||||
#include "sdk/android/src/jni/audio_device/audio_common.h"
|
#include "sdk/android/src/jni/audio_device/audio_common.h"
|
||||||
|
#include "sdk/android/src/jni/jni_helpers.h"
|
||||||
|
|
||||||
namespace webrtc {
|
namespace webrtc {
|
||||||
|
|
||||||
@ -25,16 +26,8 @@ namespace android_adm {
|
|||||||
|
|
||||||
// AudioManager::JavaAudioManager implementation
|
// AudioManager::JavaAudioManager implementation
|
||||||
AudioManager::JavaAudioManager::JavaAudioManager(
|
AudioManager::JavaAudioManager::JavaAudioManager(
|
||||||
NativeRegistration* native_reg,
|
const ScopedJavaLocalRef<jobject>& audio_manager)
|
||||||
std::unique_ptr<GlobalRef> audio_manager)
|
: env_(audio_manager.env()), audio_manager_(audio_manager) {
|
||||||
: audio_manager_(std::move(audio_manager)),
|
|
||||||
init_(native_reg->GetMethodId("init", "()Z")),
|
|
||||||
dispose_(native_reg->GetMethodId("dispose", "()V")),
|
|
||||||
is_communication_mode_enabled_(
|
|
||||||
native_reg->GetMethodId("isCommunicationModeEnabled", "()Z")),
|
|
||||||
is_device_blacklisted_for_open_sles_usage_(
|
|
||||||
native_reg->GetMethodId("isDeviceBlacklistedForOpenSLESUsage",
|
|
||||||
"()Z")) {
|
|
||||||
RTC_LOG(INFO) << "JavaAudioManager::ctor";
|
RTC_LOG(INFO) << "JavaAudioManager::ctor";
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -43,26 +36,30 @@ AudioManager::JavaAudioManager::~JavaAudioManager() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
bool AudioManager::JavaAudioManager::Init() {
|
bool AudioManager::JavaAudioManager::Init() {
|
||||||
return audio_manager_->CallBooleanMethod(init_);
|
thread_checker_.CalledOnValidThread();
|
||||||
|
return Java_WebRtcAudioManager_init(env_, audio_manager_);
|
||||||
}
|
}
|
||||||
|
|
||||||
void AudioManager::JavaAudioManager::Close() {
|
void AudioManager::JavaAudioManager::Close() {
|
||||||
audio_manager_->CallVoidMethod(dispose_);
|
thread_checker_.CalledOnValidThread();
|
||||||
|
Java_WebRtcAudioManager_dispose(env_, audio_manager_);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool AudioManager::JavaAudioManager::IsCommunicationModeEnabled() {
|
bool AudioManager::JavaAudioManager::IsCommunicationModeEnabled() {
|
||||||
return audio_manager_->CallBooleanMethod(is_communication_mode_enabled_);
|
thread_checker_.CalledOnValidThread();
|
||||||
|
return Java_WebRtcAudioManager_isCommunicationModeEnabled(env_,
|
||||||
|
audio_manager_);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool AudioManager::JavaAudioManager::IsDeviceBlacklistedForOpenSLESUsage() {
|
bool AudioManager::JavaAudioManager::IsDeviceBlacklistedForOpenSLESUsage() {
|
||||||
return audio_manager_->CallBooleanMethod(
|
thread_checker_.CalledOnValidThread();
|
||||||
is_device_blacklisted_for_open_sles_usage_);
|
return Java_WebRtcAudioManager_isDeviceBlacklistedForOpenSLESUsage(
|
||||||
|
env_, audio_manager_);
|
||||||
}
|
}
|
||||||
|
|
||||||
// AudioManager implementation
|
// AudioManager implementation
|
||||||
AudioManager::AudioManager()
|
AudioManager::AudioManager()
|
||||||
: j_environment_(JVM::GetInstance()->environment()),
|
: audio_layer_(AudioDeviceModule::kPlatformDefaultAudio),
|
||||||
audio_layer_(AudioDeviceModule::kPlatformDefaultAudio),
|
|
||||||
initialized_(false),
|
initialized_(false),
|
||||||
hardware_aec_(false),
|
hardware_aec_(false),
|
||||||
hardware_agc_(false),
|
hardware_agc_(false),
|
||||||
@ -71,17 +68,9 @@ AudioManager::AudioManager()
|
|||||||
low_latency_record_(false),
|
low_latency_record_(false),
|
||||||
delay_estimate_in_milliseconds_(0) {
|
delay_estimate_in_milliseconds_(0) {
|
||||||
RTC_LOG(INFO) << "ctor";
|
RTC_LOG(INFO) << "ctor";
|
||||||
RTC_CHECK(j_environment_);
|
|
||||||
JNINativeMethod native_methods[] = {
|
|
||||||
{"nativeCacheAudioParameters", "(IIIZZZZZZZIIJ)V",
|
|
||||||
reinterpret_cast<void*>(&AudioManager::CacheAudioParameters)}};
|
|
||||||
j_native_registration_ = j_environment_->RegisterNatives(
|
|
||||||
"org/webrtc/voiceengine/WebRtcAudioManager", native_methods,
|
|
||||||
arraysize(native_methods));
|
|
||||||
j_audio_manager_.reset(
|
j_audio_manager_.reset(
|
||||||
new JavaAudioManager(j_native_registration_.get(),
|
new JavaAudioManager(Java_WebRtcAudioManager_Constructor(
|
||||||
j_native_registration_->NewObject(
|
AttachCurrentThreadIfNeeded(), jni::jlongFromPointer(this))));
|
||||||
"<init>", "(J)V", PointerTojlong(this))));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
AudioManager::~AudioManager() {
|
AudioManager::~AudioManager() {
|
||||||
@ -238,30 +227,8 @@ int AudioManager::GetDelayEstimateInMilliseconds() const {
|
|||||||
return delay_estimate_in_milliseconds_;
|
return delay_estimate_in_milliseconds_;
|
||||||
}
|
}
|
||||||
|
|
||||||
void JNICALL AudioManager::CacheAudioParameters(JNIEnv* env,
|
void AudioManager::CacheAudioParameters(JNIEnv* env,
|
||||||
jobject obj,
|
const JavaParamRef<jobject>& j_caller,
|
||||||
jint sample_rate,
|
|
||||||
jint output_channels,
|
|
||||||
jint input_channels,
|
|
||||||
jboolean hardware_aec,
|
|
||||||
jboolean hardware_agc,
|
|
||||||
jboolean hardware_ns,
|
|
||||||
jboolean low_latency_output,
|
|
||||||
jboolean low_latency_input,
|
|
||||||
jboolean pro_audio,
|
|
||||||
jboolean a_audio,
|
|
||||||
jint output_buffer_size,
|
|
||||||
jint input_buffer_size,
|
|
||||||
jlong native_audio_manager) {
|
|
||||||
AudioManager* this_object =
|
|
||||||
reinterpret_cast<AudioManager*>(native_audio_manager);
|
|
||||||
this_object->OnCacheAudioParameters(
|
|
||||||
env, sample_rate, output_channels, input_channels, hardware_aec,
|
|
||||||
hardware_agc, hardware_ns, low_latency_output, low_latency_input,
|
|
||||||
pro_audio, a_audio, output_buffer_size, input_buffer_size);
|
|
||||||
}
|
|
||||||
|
|
||||||
void AudioManager::OnCacheAudioParameters(JNIEnv* env,
|
|
||||||
jint sample_rate,
|
jint sample_rate,
|
||||||
jint output_channels,
|
jint output_channels,
|
||||||
jint input_channels,
|
jint input_channels,
|
||||||
|
|||||||
@ -17,9 +17,8 @@
|
|||||||
|
|
||||||
#include "modules/audio_device/include/audio_device.h"
|
#include "modules/audio_device/include/audio_device.h"
|
||||||
#include "modules/audio_device/include/audio_device_defines.h"
|
#include "modules/audio_device/include/audio_device_defines.h"
|
||||||
#include "modules/utility/include/helpers_android.h"
|
|
||||||
#include "modules/utility/include/jvm_android.h"
|
|
||||||
#include "rtc_base/thread_checker.h"
|
#include "rtc_base/thread_checker.h"
|
||||||
|
#include "sdk/android/native_api/jni/scoped_java_ref.h"
|
||||||
#include "sdk/android/src/jni/audio_device/audio_common.h"
|
#include "sdk/android/src/jni/audio_device/audio_common.h"
|
||||||
#include "sdk/android/src/jni/audio_device/opensles_common.h"
|
#include "sdk/android/src/jni/audio_device/opensles_common.h"
|
||||||
|
|
||||||
@ -41,8 +40,7 @@ class AudioManager {
|
|||||||
// parts that are associated with this call.
|
// parts that are associated with this call.
|
||||||
class JavaAudioManager {
|
class JavaAudioManager {
|
||||||
public:
|
public:
|
||||||
JavaAudioManager(NativeRegistration* native_registration,
|
explicit JavaAudioManager(const ScopedJavaLocalRef<jobject>& audio_manager);
|
||||||
std::unique_ptr<GlobalRef> audio_manager);
|
|
||||||
~JavaAudioManager();
|
~JavaAudioManager();
|
||||||
|
|
||||||
bool Init();
|
bool Init();
|
||||||
@ -51,11 +49,9 @@ class AudioManager {
|
|||||||
bool IsDeviceBlacklistedForOpenSLESUsage();
|
bool IsDeviceBlacklistedForOpenSLESUsage();
|
||||||
|
|
||||||
private:
|
private:
|
||||||
std::unique_ptr<GlobalRef> audio_manager_;
|
JNIEnv* const env_;
|
||||||
jmethodID init_;
|
rtc::ThreadChecker thread_checker_;
|
||||||
jmethodID dispose_;
|
ScopedJavaGlobalRef<jobject> audio_manager_;
|
||||||
jmethodID is_communication_mode_enabled_;
|
|
||||||
jmethodID is_device_blacklisted_for_open_sles_usage_;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
AudioManager();
|
AudioManager();
|
||||||
@ -124,26 +120,11 @@ class AudioManager {
|
|||||||
// webrtc::kHighLatencyModeDelayEstimateInMilliseconds.
|
// webrtc::kHighLatencyModeDelayEstimateInMilliseconds.
|
||||||
int GetDelayEstimateInMilliseconds() const;
|
int GetDelayEstimateInMilliseconds() const;
|
||||||
|
|
||||||
private:
|
|
||||||
// Called from Java side so we can cache the native audio parameters.
|
// Called from Java side so we can cache the native audio parameters.
|
||||||
// This method will be called by the WebRtcAudioManager constructor, i.e.
|
// This method will be called by the WebRtcAudioManager constructor, i.e.
|
||||||
// on the same thread that this object is created on.
|
// on the same thread that this object is created on.
|
||||||
static void JNICALL CacheAudioParameters(JNIEnv* env,
|
void CacheAudioParameters(JNIEnv* env,
|
||||||
jobject obj,
|
const JavaParamRef<jobject>& j_caller,
|
||||||
jint sample_rate,
|
|
||||||
jint output_channels,
|
|
||||||
jint input_channels,
|
|
||||||
jboolean hardware_aec,
|
|
||||||
jboolean hardware_agc,
|
|
||||||
jboolean hardware_ns,
|
|
||||||
jboolean low_latency_output,
|
|
||||||
jboolean low_latency_input,
|
|
||||||
jboolean pro_audio,
|
|
||||||
jboolean a_audio,
|
|
||||||
jint output_buffer_size,
|
|
||||||
jint input_buffer_size,
|
|
||||||
jlong native_audio_manager);
|
|
||||||
void OnCacheAudioParameters(JNIEnv* env,
|
|
||||||
jint sample_rate,
|
jint sample_rate,
|
||||||
jint output_channels,
|
jint output_channels,
|
||||||
jint input_channels,
|
jint input_channels,
|
||||||
@ -157,21 +138,12 @@ class AudioManager {
|
|||||||
jint output_buffer_size,
|
jint output_buffer_size,
|
||||||
jint input_buffer_size);
|
jint input_buffer_size);
|
||||||
|
|
||||||
|
private:
|
||||||
// Stores thread ID in the constructor.
|
// Stores thread ID in the constructor.
|
||||||
// We can then use ThreadChecker::CalledOnValidThread() to ensure that
|
// We can then use ThreadChecker::CalledOnValidThread() to ensure that
|
||||||
// other methods are called from the same thread.
|
// other methods are called from the same thread.
|
||||||
rtc::ThreadChecker thread_checker_;
|
rtc::ThreadChecker thread_checker_;
|
||||||
|
|
||||||
// Calls AttachCurrentThread() if this thread is not attached at construction.
|
|
||||||
// Also ensures that DetachCurrentThread() is called at destruction.
|
|
||||||
AttachCurrentThreadIfNeeded attach_thread_if_needed_;
|
|
||||||
|
|
||||||
// Wraps the JNI interface pointer and methods associated with it.
|
|
||||||
std::unique_ptr<JNIEnvironment> j_environment_;
|
|
||||||
|
|
||||||
// Contains factory method for creating the Java object.
|
|
||||||
std::unique_ptr<NativeRegistration> j_native_registration_;
|
|
||||||
|
|
||||||
// Wraps the Java specific parts of the AudioManager.
|
// Wraps the Java specific parts of the AudioManager.
|
||||||
std::unique_ptr<AudioManager::JavaAudioManager> j_audio_manager_;
|
std::unique_ptr<AudioManager::JavaAudioManager> j_audio_manager_;
|
||||||
|
|
||||||
|
|||||||
@ -19,7 +19,9 @@
|
|||||||
#include "rtc_base/logging.h"
|
#include "rtc_base/logging.h"
|
||||||
#include "rtc_base/platform_thread.h"
|
#include "rtc_base/platform_thread.h"
|
||||||
#include "rtc_base/timeutils.h"
|
#include "rtc_base/timeutils.h"
|
||||||
|
#include "sdk/android/generated_audio_jni/jni/WebRtcAudioRecord_jni.h"
|
||||||
#include "sdk/android/src/jni/audio_device/audio_common.h"
|
#include "sdk/android/src/jni/audio_device/audio_common.h"
|
||||||
|
#include "sdk/android/src/jni/jni_helpers.h"
|
||||||
#include "system_wrappers/include/metrics.h"
|
#include "system_wrappers/include/metrics.h"
|
||||||
|
|
||||||
namespace webrtc {
|
namespace webrtc {
|
||||||
@ -47,46 +49,44 @@ class ScopedHistogramTimer {
|
|||||||
|
|
||||||
// AudioRecordJni::JavaAudioRecord implementation.
|
// AudioRecordJni::JavaAudioRecord implementation.
|
||||||
AudioRecordJni::JavaAudioRecord::JavaAudioRecord(
|
AudioRecordJni::JavaAudioRecord::JavaAudioRecord(
|
||||||
NativeRegistration* native_reg,
|
const ScopedJavaLocalRef<jobject>& audio_record)
|
||||||
std::unique_ptr<GlobalRef> audio_record)
|
: env_(audio_record.env()), audio_record_(audio_record) {}
|
||||||
: audio_record_(std::move(audio_record)),
|
|
||||||
init_recording_(native_reg->GetMethodId("initRecording", "(II)I")),
|
|
||||||
start_recording_(native_reg->GetMethodId("startRecording", "()Z")),
|
|
||||||
stop_recording_(native_reg->GetMethodId("stopRecording", "()Z")),
|
|
||||||
enable_built_in_aec_(native_reg->GetMethodId("enableBuiltInAEC", "(Z)Z")),
|
|
||||||
enable_built_in_ns_(native_reg->GetMethodId("enableBuiltInNS", "(Z)Z")) {}
|
|
||||||
|
|
||||||
AudioRecordJni::JavaAudioRecord::~JavaAudioRecord() {}
|
AudioRecordJni::JavaAudioRecord::~JavaAudioRecord() {}
|
||||||
|
|
||||||
int AudioRecordJni::JavaAudioRecord::InitRecording(int sample_rate,
|
int AudioRecordJni::JavaAudioRecord::InitRecording(int sample_rate,
|
||||||
size_t channels) {
|
size_t channels) {
|
||||||
return audio_record_->CallIntMethod(init_recording_,
|
thread_checker_.CalledOnValidThread();
|
||||||
|
return Java_WebRtcAudioRecord_initRecording(env_, audio_record_,
|
||||||
static_cast<jint>(sample_rate),
|
static_cast<jint>(sample_rate),
|
||||||
static_cast<jint>(channels));
|
static_cast<jint>(channels));
|
||||||
}
|
}
|
||||||
|
|
||||||
bool AudioRecordJni::JavaAudioRecord::StartRecording() {
|
bool AudioRecordJni::JavaAudioRecord::StartRecording() {
|
||||||
return audio_record_->CallBooleanMethod(start_recording_);
|
thread_checker_.CalledOnValidThread();
|
||||||
|
return Java_WebRtcAudioRecord_startRecording(env_, audio_record_);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool AudioRecordJni::JavaAudioRecord::StopRecording() {
|
bool AudioRecordJni::JavaAudioRecord::StopRecording() {
|
||||||
return audio_record_->CallBooleanMethod(stop_recording_);
|
thread_checker_.CalledOnValidThread();
|
||||||
|
return Java_WebRtcAudioRecord_stopRecording(env_, audio_record_);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool AudioRecordJni::JavaAudioRecord::EnableBuiltInAEC(bool enable) {
|
bool AudioRecordJni::JavaAudioRecord::EnableBuiltInAEC(bool enable) {
|
||||||
return audio_record_->CallBooleanMethod(enable_built_in_aec_,
|
thread_checker_.CalledOnValidThread();
|
||||||
|
return Java_WebRtcAudioRecord_enableBuiltInAEC(env_, audio_record_,
|
||||||
static_cast<jboolean>(enable));
|
static_cast<jboolean>(enable));
|
||||||
}
|
}
|
||||||
|
|
||||||
bool AudioRecordJni::JavaAudioRecord::EnableBuiltInNS(bool enable) {
|
bool AudioRecordJni::JavaAudioRecord::EnableBuiltInNS(bool enable) {
|
||||||
return audio_record_->CallBooleanMethod(enable_built_in_ns_,
|
thread_checker_.CalledOnValidThread();
|
||||||
|
return Java_WebRtcAudioRecord_enableBuiltInNS(env_, audio_record_,
|
||||||
static_cast<jboolean>(enable));
|
static_cast<jboolean>(enable));
|
||||||
}
|
}
|
||||||
|
|
||||||
// AudioRecordJni implementation.
|
// AudioRecordJni implementation.
|
||||||
AudioRecordJni::AudioRecordJni(AudioManager* audio_manager)
|
AudioRecordJni::AudioRecordJni(AudioManager* audio_manager)
|
||||||
: j_environment_(JVM::GetInstance()->environment()),
|
: audio_manager_(audio_manager),
|
||||||
audio_manager_(audio_manager),
|
|
||||||
audio_parameters_(audio_manager->GetRecordAudioParameters()),
|
audio_parameters_(audio_manager->GetRecordAudioParameters()),
|
||||||
total_delay_in_milliseconds_(0),
|
total_delay_in_milliseconds_(0),
|
||||||
direct_buffer_address_(nullptr),
|
direct_buffer_address_(nullptr),
|
||||||
@ -97,19 +97,8 @@ AudioRecordJni::AudioRecordJni(AudioManager* audio_manager)
|
|||||||
audio_device_buffer_(nullptr) {
|
audio_device_buffer_(nullptr) {
|
||||||
RTC_LOG(INFO) << "ctor";
|
RTC_LOG(INFO) << "ctor";
|
||||||
RTC_DCHECK(audio_parameters_.is_valid());
|
RTC_DCHECK(audio_parameters_.is_valid());
|
||||||
RTC_CHECK(j_environment_);
|
j_audio_record_.reset(new JavaAudioRecord(Java_WebRtcAudioRecord_Constructor(
|
||||||
JNINativeMethod native_methods[] = {
|
AttachCurrentThreadIfNeeded(), jni::jlongFromPointer(this))));
|
||||||
{"nativeCacheDirectBufferAddress", "(Ljava/nio/ByteBuffer;J)V",
|
|
||||||
reinterpret_cast<void*>(&AudioRecordJni::CacheDirectBufferAddress)},
|
|
||||||
{"nativeDataIsRecorded", "(IJ)V",
|
|
||||||
reinterpret_cast<void*>(&AudioRecordJni::DataIsRecorded)}};
|
|
||||||
j_native_registration_ = j_environment_->RegisterNatives(
|
|
||||||
"org/webrtc/voiceengine/WebRtcAudioRecord", native_methods,
|
|
||||||
arraysize(native_methods));
|
|
||||||
j_audio_record_.reset(
|
|
||||||
new JavaAudioRecord(j_native_registration_.get(),
|
|
||||||
j_native_registration_->NewObject(
|
|
||||||
"<init>", "(J)V", PointerTojlong(this))));
|
|
||||||
// Detach from this thread since we want to use the checker to verify calls
|
// Detach from this thread since we want to use the checker to verify calls
|
||||||
// from the Java based audio thread.
|
// from the Java based audio thread.
|
||||||
thread_checker_java_.DetachFromThread();
|
thread_checker_java_.DetachFromThread();
|
||||||
@ -230,38 +219,24 @@ int32_t AudioRecordJni::EnableBuiltInNS(bool enable) {
|
|||||||
return j_audio_record_->EnableBuiltInNS(enable) ? 0 : -1;
|
return j_audio_record_->EnableBuiltInNS(enable) ? 0 : -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
void JNICALL AudioRecordJni::CacheDirectBufferAddress(JNIEnv* env,
|
void AudioRecordJni::CacheDirectBufferAddress(
|
||||||
jobject obj,
|
JNIEnv* env,
|
||||||
jobject byte_buffer,
|
const JavaParamRef<jobject>& j_caller,
|
||||||
jlong nativeAudioRecord) {
|
const JavaParamRef<jobject>& byte_buffer) {
|
||||||
AudioRecordJni* this_object =
|
|
||||||
reinterpret_cast<AudioRecordJni*>(nativeAudioRecord);
|
|
||||||
this_object->OnCacheDirectBufferAddress(env, byte_buffer);
|
|
||||||
}
|
|
||||||
|
|
||||||
void AudioRecordJni::OnCacheDirectBufferAddress(JNIEnv* env,
|
|
||||||
jobject byte_buffer) {
|
|
||||||
RTC_LOG(INFO) << "OnCacheDirectBufferAddress";
|
RTC_LOG(INFO) << "OnCacheDirectBufferAddress";
|
||||||
RTC_DCHECK(thread_checker_.CalledOnValidThread());
|
RTC_DCHECK(thread_checker_.CalledOnValidThread());
|
||||||
RTC_DCHECK(!direct_buffer_address_);
|
RTC_DCHECK(!direct_buffer_address_);
|
||||||
direct_buffer_address_ = env->GetDirectBufferAddress(byte_buffer);
|
direct_buffer_address_ = env->GetDirectBufferAddress(byte_buffer.obj());
|
||||||
jlong capacity = env->GetDirectBufferCapacity(byte_buffer);
|
jlong capacity = env->GetDirectBufferCapacity(byte_buffer.obj());
|
||||||
RTC_LOG(INFO) << "direct buffer capacity: " << capacity;
|
RTC_LOG(INFO) << "direct buffer capacity: " << capacity;
|
||||||
direct_buffer_capacity_in_bytes_ = static_cast<size_t>(capacity);
|
direct_buffer_capacity_in_bytes_ = static_cast<size_t>(capacity);
|
||||||
}
|
}
|
||||||
|
|
||||||
void JNICALL AudioRecordJni::DataIsRecorded(JNIEnv* env,
|
|
||||||
jobject obj,
|
|
||||||
jint length,
|
|
||||||
jlong nativeAudioRecord) {
|
|
||||||
AudioRecordJni* this_object =
|
|
||||||
reinterpret_cast<AudioRecordJni*>(nativeAudioRecord);
|
|
||||||
this_object->OnDataIsRecorded(length);
|
|
||||||
}
|
|
||||||
|
|
||||||
// This method is called on a high-priority thread from Java. The name of
|
// This method is called on a high-priority thread from Java. The name of
|
||||||
// the thread is 'AudioRecordThread'.
|
// the thread is 'AudioRecordThread'.
|
||||||
void AudioRecordJni::OnDataIsRecorded(int length) {
|
void AudioRecordJni::DataIsRecorded(JNIEnv* env,
|
||||||
|
const JavaParamRef<jobject>& j_caller,
|
||||||
|
int length) {
|
||||||
RTC_DCHECK(thread_checker_java_.CalledOnValidThread());
|
RTC_DCHECK(thread_checker_java_.CalledOnValidThread());
|
||||||
if (!audio_device_buffer_) {
|
if (!audio_device_buffer_) {
|
||||||
RTC_LOG(LS_ERROR) << "AttachAudioBuffer has not been called";
|
RTC_LOG(LS_ERROR) << "AttachAudioBuffer has not been called";
|
||||||
|
|||||||
@ -16,8 +16,6 @@
|
|||||||
|
|
||||||
#include "modules/audio_device/audio_device_buffer.h"
|
#include "modules/audio_device/audio_device_buffer.h"
|
||||||
#include "modules/audio_device/include/audio_device_defines.h"
|
#include "modules/audio_device/include/audio_device_defines.h"
|
||||||
#include "modules/utility/include/helpers_android.h"
|
|
||||||
#include "modules/utility/include/jvm_android.h"
|
|
||||||
#include "rtc_base/thread_checker.h"
|
#include "rtc_base/thread_checker.h"
|
||||||
#include "sdk/android/src/jni/audio_device/audio_manager.h"
|
#include "sdk/android/src/jni/audio_device/audio_manager.h"
|
||||||
|
|
||||||
@ -48,8 +46,8 @@ class AudioRecordJni {
|
|||||||
// Wraps the Java specific parts of the AudioRecordJni into one helper class.
|
// Wraps the Java specific parts of the AudioRecordJni into one helper class.
|
||||||
class JavaAudioRecord {
|
class JavaAudioRecord {
|
||||||
public:
|
public:
|
||||||
JavaAudioRecord(NativeRegistration* native_registration,
|
explicit JavaAudioRecord(const ScopedJavaLocalRef<jobject>& audio_record);
|
||||||
std::unique_ptr<GlobalRef> audio_track);
|
|
||||||
~JavaAudioRecord();
|
~JavaAudioRecord();
|
||||||
|
|
||||||
int InitRecording(int sample_rate, size_t channels);
|
int InitRecording(int sample_rate, size_t channels);
|
||||||
@ -59,12 +57,9 @@ class AudioRecordJni {
|
|||||||
bool EnableBuiltInNS(bool enable);
|
bool EnableBuiltInNS(bool enable);
|
||||||
|
|
||||||
private:
|
private:
|
||||||
std::unique_ptr<GlobalRef> audio_record_;
|
JNIEnv* const env_;
|
||||||
jmethodID init_recording_;
|
rtc::ThreadChecker thread_checker_;
|
||||||
jmethodID start_recording_;
|
ScopedJavaGlobalRef<jobject> audio_record_;
|
||||||
jmethodID stop_recording_;
|
|
||||||
jmethodID enable_built_in_aec_;
|
|
||||||
jmethodID enable_built_in_ns_;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
explicit AudioRecordJni(AudioManager* audio_manager);
|
explicit AudioRecordJni(AudioManager* audio_manager);
|
||||||
@ -86,17 +81,14 @@ class AudioRecordJni {
|
|||||||
int32_t EnableBuiltInAGC(bool enable);
|
int32_t EnableBuiltInAGC(bool enable);
|
||||||
int32_t EnableBuiltInNS(bool enable);
|
int32_t EnableBuiltInNS(bool enable);
|
||||||
|
|
||||||
private:
|
|
||||||
// Called from Java side so we can cache the address of the Java-manged
|
// Called from Java side so we can cache the address of the Java-manged
|
||||||
// |byte_buffer| in |direct_buffer_address_|. The size of the buffer
|
// |byte_buffer| in |direct_buffer_address_|. The size of the buffer
|
||||||
// is also stored in |direct_buffer_capacity_in_bytes_|.
|
// is also stored in |direct_buffer_capacity_in_bytes_|.
|
||||||
// This method will be called by the WebRtcAudioRecord constructor, i.e.,
|
// This method will be called by the WebRtcAudioRecord constructor, i.e.,
|
||||||
// on the same thread that this object is created on.
|
// on the same thread that this object is created on.
|
||||||
static void JNICALL CacheDirectBufferAddress(JNIEnv* env,
|
void CacheDirectBufferAddress(JNIEnv* env,
|
||||||
jobject obj,
|
const JavaParamRef<jobject>& j_caller,
|
||||||
jobject byte_buffer,
|
const JavaParamRef<jobject>& byte_buffer);
|
||||||
jlong nativeAudioRecord);
|
|
||||||
void OnCacheDirectBufferAddress(JNIEnv* env, jobject byte_buffer);
|
|
||||||
|
|
||||||
// Called periodically by the Java based WebRtcAudioRecord object when
|
// Called periodically by the Java based WebRtcAudioRecord object when
|
||||||
// recording has started. Each call indicates that there are |length| new
|
// recording has started. Each call indicates that there are |length| new
|
||||||
@ -104,12 +96,11 @@ class AudioRecordJni {
|
|||||||
// now time to send these to the consumer.
|
// now time to send these to the consumer.
|
||||||
// This method is called on a high-priority thread from Java. The name of
|
// This method is called on a high-priority thread from Java. The name of
|
||||||
// the thread is 'AudioRecordThread'.
|
// the thread is 'AudioRecordThread'.
|
||||||
static void JNICALL DataIsRecorded(JNIEnv* env,
|
void DataIsRecorded(JNIEnv* env,
|
||||||
jobject obj,
|
const JavaParamRef<jobject>& j_caller,
|
||||||
jint length,
|
int length);
|
||||||
jlong nativeAudioRecord);
|
|
||||||
void OnDataIsRecorded(int length);
|
|
||||||
|
|
||||||
|
private:
|
||||||
// Stores thread ID in constructor.
|
// Stores thread ID in constructor.
|
||||||
rtc::ThreadChecker thread_checker_;
|
rtc::ThreadChecker thread_checker_;
|
||||||
|
|
||||||
@ -117,16 +108,6 @@ class AudioRecordJni {
|
|||||||
// thread in Java. Detached during construction of this object.
|
// thread in Java. Detached during construction of this object.
|
||||||
rtc::ThreadChecker thread_checker_java_;
|
rtc::ThreadChecker thread_checker_java_;
|
||||||
|
|
||||||
// Calls AttachCurrentThread() if this thread is not attached at construction.
|
|
||||||
// Also ensures that DetachCurrentThread() is called at destruction.
|
|
||||||
AttachCurrentThreadIfNeeded attach_thread_if_needed_;
|
|
||||||
|
|
||||||
// Wraps the JNI interface pointer and methods associated with it.
|
|
||||||
std::unique_ptr<JNIEnvironment> j_environment_;
|
|
||||||
|
|
||||||
// Contains factory method for creating the Java object.
|
|
||||||
std::unique_ptr<NativeRegistration> j_native_registration_;
|
|
||||||
|
|
||||||
// Wraps the Java specific parts of the AudioRecordJni class.
|
// Wraps the Java specific parts of the AudioRecordJni class.
|
||||||
std::unique_ptr<AudioRecordJni::JavaAudioRecord> j_audio_record_;
|
std::unique_ptr<AudioRecordJni::JavaAudioRecord> j_audio_record_;
|
||||||
|
|
||||||
|
|||||||
@ -18,6 +18,8 @@
|
|||||||
#include "rtc_base/format_macros.h"
|
#include "rtc_base/format_macros.h"
|
||||||
#include "rtc_base/logging.h"
|
#include "rtc_base/logging.h"
|
||||||
#include "rtc_base/platform_thread.h"
|
#include "rtc_base/platform_thread.h"
|
||||||
|
#include "sdk/android/generated_audio_jni/jni/WebRtcAudioTrack_jni.h"
|
||||||
|
#include "sdk/android/src/jni/jni_helpers.h"
|
||||||
|
|
||||||
namespace webrtc {
|
namespace webrtc {
|
||||||
|
|
||||||
@ -25,47 +27,45 @@ namespace android_adm {
|
|||||||
|
|
||||||
// AudioTrackJni::JavaAudioTrack implementation.
|
// AudioTrackJni::JavaAudioTrack implementation.
|
||||||
AudioTrackJni::JavaAudioTrack::JavaAudioTrack(
|
AudioTrackJni::JavaAudioTrack::JavaAudioTrack(
|
||||||
NativeRegistration* native_reg,
|
const ScopedJavaLocalRef<jobject>& audio_track)
|
||||||
std::unique_ptr<GlobalRef> audio_track)
|
: env_(audio_track.env()), audio_track_(audio_track) {}
|
||||||
: audio_track_(std::move(audio_track)),
|
|
||||||
init_playout_(native_reg->GetMethodId("initPlayout", "(II)Z")),
|
|
||||||
start_playout_(native_reg->GetMethodId("startPlayout", "()Z")),
|
|
||||||
stop_playout_(native_reg->GetMethodId("stopPlayout", "()Z")),
|
|
||||||
set_stream_volume_(native_reg->GetMethodId("setStreamVolume", "(I)Z")),
|
|
||||||
get_stream_max_volume_(
|
|
||||||
native_reg->GetMethodId("getStreamMaxVolume", "()I")),
|
|
||||||
get_stream_volume_(native_reg->GetMethodId("getStreamVolume", "()I")) {}
|
|
||||||
|
|
||||||
AudioTrackJni::JavaAudioTrack::~JavaAudioTrack() {}
|
AudioTrackJni::JavaAudioTrack::~JavaAudioTrack() {}
|
||||||
|
|
||||||
bool AudioTrackJni::JavaAudioTrack::InitPlayout(int sample_rate, int channels) {
|
bool AudioTrackJni::JavaAudioTrack::InitPlayout(int sample_rate, int channels) {
|
||||||
return audio_track_->CallBooleanMethod(init_playout_, sample_rate, channels);
|
thread_checker_.CalledOnValidThread();
|
||||||
|
return Java_WebRtcAudioTrack_initPlayout(env_, audio_track_, sample_rate,
|
||||||
|
channels);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool AudioTrackJni::JavaAudioTrack::StartPlayout() {
|
bool AudioTrackJni::JavaAudioTrack::StartPlayout() {
|
||||||
return audio_track_->CallBooleanMethod(start_playout_);
|
thread_checker_.CalledOnValidThread();
|
||||||
|
return Java_WebRtcAudioTrack_startPlayout(env_, audio_track_);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool AudioTrackJni::JavaAudioTrack::StopPlayout() {
|
bool AudioTrackJni::JavaAudioTrack::StopPlayout() {
|
||||||
return audio_track_->CallBooleanMethod(stop_playout_);
|
thread_checker_.CalledOnValidThread();
|
||||||
|
return Java_WebRtcAudioTrack_stopPlayout(env_, audio_track_);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool AudioTrackJni::JavaAudioTrack::SetStreamVolume(int volume) {
|
bool AudioTrackJni::JavaAudioTrack::SetStreamVolume(int volume) {
|
||||||
return audio_track_->CallBooleanMethod(set_stream_volume_, volume);
|
thread_checker_.CalledOnValidThread();
|
||||||
|
return Java_WebRtcAudioTrack_setStreamVolume(env_, audio_track_, volume);
|
||||||
}
|
}
|
||||||
|
|
||||||
int AudioTrackJni::JavaAudioTrack::GetStreamMaxVolume() {
|
int AudioTrackJni::JavaAudioTrack::GetStreamMaxVolume() {
|
||||||
return audio_track_->CallIntMethod(get_stream_max_volume_);
|
thread_checker_.CalledOnValidThread();
|
||||||
|
return Java_WebRtcAudioTrack_getStreamMaxVolume(env_, audio_track_);
|
||||||
}
|
}
|
||||||
|
|
||||||
int AudioTrackJni::JavaAudioTrack::GetStreamVolume() {
|
int AudioTrackJni::JavaAudioTrack::GetStreamVolume() {
|
||||||
return audio_track_->CallIntMethod(get_stream_volume_);
|
thread_checker_.CalledOnValidThread();
|
||||||
|
return Java_WebRtcAudioTrack_getStreamVolume(env_, audio_track_);
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO(henrika): possible extend usage of AudioManager and add it as member.
|
// TODO(henrika): possible extend usage of AudioManager and add it as member.
|
||||||
AudioTrackJni::AudioTrackJni(AudioManager* audio_manager)
|
AudioTrackJni::AudioTrackJni(AudioManager* audio_manager)
|
||||||
: j_environment_(JVM::GetInstance()->environment()),
|
: audio_parameters_(audio_manager->GetPlayoutAudioParameters()),
|
||||||
audio_parameters_(audio_manager->GetPlayoutAudioParameters()),
|
|
||||||
direct_buffer_address_(nullptr),
|
direct_buffer_address_(nullptr),
|
||||||
direct_buffer_capacity_in_bytes_(0),
|
direct_buffer_capacity_in_bytes_(0),
|
||||||
frames_per_buffer_(0),
|
frames_per_buffer_(0),
|
||||||
@ -74,19 +74,8 @@ AudioTrackJni::AudioTrackJni(AudioManager* audio_manager)
|
|||||||
audio_device_buffer_(nullptr) {
|
audio_device_buffer_(nullptr) {
|
||||||
RTC_LOG(INFO) << "ctor";
|
RTC_LOG(INFO) << "ctor";
|
||||||
RTC_DCHECK(audio_parameters_.is_valid());
|
RTC_DCHECK(audio_parameters_.is_valid());
|
||||||
RTC_CHECK(j_environment_);
|
j_audio_track_.reset(new JavaAudioTrack(Java_WebRtcAudioTrack_Constructor(
|
||||||
JNINativeMethod native_methods[] = {
|
AttachCurrentThreadIfNeeded(), jni::jlongFromPointer(this))));
|
||||||
{"nativeCacheDirectBufferAddress", "(Ljava/nio/ByteBuffer;J)V",
|
|
||||||
reinterpret_cast<void*>(&AudioTrackJni::CacheDirectBufferAddress)},
|
|
||||||
{"nativeGetPlayoutData", "(IJ)V",
|
|
||||||
reinterpret_cast<void*>(&AudioTrackJni::GetPlayoutData)}};
|
|
||||||
j_native_registration_ = j_environment_->RegisterNatives(
|
|
||||||
"org/webrtc/voiceengine/WebRtcAudioTrack", native_methods,
|
|
||||||
arraysize(native_methods));
|
|
||||||
j_audio_track_.reset(
|
|
||||||
new JavaAudioTrack(j_native_registration_.get(),
|
|
||||||
j_native_registration_->NewObject(
|
|
||||||
"<init>", "(J)V", PointerTojlong(this))));
|
|
||||||
// Detach from this thread since we want to use the checker to verify calls
|
// Detach from this thread since we want to use the checker to verify calls
|
||||||
// from the Java based audio thread.
|
// from the Java based audio thread.
|
||||||
thread_checker_java_.DetachFromThread();
|
thread_checker_java_.DetachFromThread();
|
||||||
@ -205,22 +194,15 @@ void AudioTrackJni::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) {
|
|||||||
audio_device_buffer_->SetPlayoutChannels(channels);
|
audio_device_buffer_->SetPlayoutChannels(channels);
|
||||||
}
|
}
|
||||||
|
|
||||||
void JNICALL AudioTrackJni::CacheDirectBufferAddress(JNIEnv* env,
|
void AudioTrackJni::CacheDirectBufferAddress(
|
||||||
jobject obj,
|
JNIEnv* env,
|
||||||
jobject byte_buffer,
|
const JavaParamRef<jobject>&,
|
||||||
jlong nativeAudioTrack) {
|
const JavaParamRef<jobject>& byte_buffer) {
|
||||||
AudioTrackJni* this_object =
|
|
||||||
reinterpret_cast<AudioTrackJni*>(nativeAudioTrack);
|
|
||||||
this_object->OnCacheDirectBufferAddress(env, byte_buffer);
|
|
||||||
}
|
|
||||||
|
|
||||||
void AudioTrackJni::OnCacheDirectBufferAddress(JNIEnv* env,
|
|
||||||
jobject byte_buffer) {
|
|
||||||
RTC_LOG(INFO) << "OnCacheDirectBufferAddress";
|
RTC_LOG(INFO) << "OnCacheDirectBufferAddress";
|
||||||
RTC_DCHECK(thread_checker_.CalledOnValidThread());
|
RTC_DCHECK(thread_checker_.CalledOnValidThread());
|
||||||
RTC_DCHECK(!direct_buffer_address_);
|
RTC_DCHECK(!direct_buffer_address_);
|
||||||
direct_buffer_address_ = env->GetDirectBufferAddress(byte_buffer);
|
direct_buffer_address_ = env->GetDirectBufferAddress(byte_buffer.obj());
|
||||||
jlong capacity = env->GetDirectBufferCapacity(byte_buffer);
|
jlong capacity = env->GetDirectBufferCapacity(byte_buffer.obj());
|
||||||
RTC_LOG(INFO) << "direct buffer capacity: " << capacity;
|
RTC_LOG(INFO) << "direct buffer capacity: " << capacity;
|
||||||
direct_buffer_capacity_in_bytes_ = static_cast<size_t>(capacity);
|
direct_buffer_capacity_in_bytes_ = static_cast<size_t>(capacity);
|
||||||
const size_t bytes_per_frame = audio_parameters_.channels() * sizeof(int16_t);
|
const size_t bytes_per_frame = audio_parameters_.channels() * sizeof(int16_t);
|
||||||
@ -228,18 +210,11 @@ void AudioTrackJni::OnCacheDirectBufferAddress(JNIEnv* env,
|
|||||||
RTC_LOG(INFO) << "frames_per_buffer: " << frames_per_buffer_;
|
RTC_LOG(INFO) << "frames_per_buffer: " << frames_per_buffer_;
|
||||||
}
|
}
|
||||||
|
|
||||||
void JNICALL AudioTrackJni::GetPlayoutData(JNIEnv* env,
|
|
||||||
jobject obj,
|
|
||||||
jint length,
|
|
||||||
jlong nativeAudioTrack) {
|
|
||||||
AudioTrackJni* this_object =
|
|
||||||
reinterpret_cast<AudioTrackJni*>(nativeAudioTrack);
|
|
||||||
this_object->OnGetPlayoutData(static_cast<size_t>(length));
|
|
||||||
}
|
|
||||||
|
|
||||||
// This method is called on a high-priority thread from Java. The name of
|
// This method is called on a high-priority thread from Java. The name of
|
||||||
// the thread is 'AudioRecordTrack'.
|
// the thread is 'AudioRecordTrack'.
|
||||||
void AudioTrackJni::OnGetPlayoutData(size_t length) {
|
void AudioTrackJni::GetPlayoutData(JNIEnv* env,
|
||||||
|
const JavaParamRef<jobject>&,
|
||||||
|
size_t length) {
|
||||||
RTC_DCHECK(thread_checker_java_.CalledOnValidThread());
|
RTC_DCHECK(thread_checker_java_.CalledOnValidThread());
|
||||||
const size_t bytes_per_frame = audio_parameters_.channels() * sizeof(int16_t);
|
const size_t bytes_per_frame = audio_parameters_.channels() * sizeof(int16_t);
|
||||||
RTC_DCHECK_EQ(frames_per_buffer_, length / bytes_per_frame);
|
RTC_DCHECK_EQ(frames_per_buffer_, length / bytes_per_frame);
|
||||||
|
|||||||
@ -16,8 +16,6 @@
|
|||||||
|
|
||||||
#include "modules/audio_device/audio_device_buffer.h"
|
#include "modules/audio_device/audio_device_buffer.h"
|
||||||
#include "modules/audio_device/include/audio_device_defines.h"
|
#include "modules/audio_device/include/audio_device_defines.h"
|
||||||
#include "modules/utility/include/helpers_android.h"
|
|
||||||
#include "modules/utility/include/jvm_android.h"
|
|
||||||
#include "rtc_base/thread_checker.h"
|
#include "rtc_base/thread_checker.h"
|
||||||
#include "sdk/android/src/jni/audio_device/audio_common.h"
|
#include "sdk/android/src/jni/audio_device/audio_common.h"
|
||||||
#include "sdk/android/src/jni/audio_device/audio_manager.h"
|
#include "sdk/android/src/jni/audio_device/audio_manager.h"
|
||||||
@ -44,8 +42,7 @@ class AudioTrackJni {
|
|||||||
// Wraps the Java specific parts of the AudioTrackJni into one helper class.
|
// Wraps the Java specific parts of the AudioTrackJni into one helper class.
|
||||||
class JavaAudioTrack {
|
class JavaAudioTrack {
|
||||||
public:
|
public:
|
||||||
JavaAudioTrack(NativeRegistration* native_registration,
|
explicit JavaAudioTrack(const ScopedJavaLocalRef<jobject>& audio_track);
|
||||||
std::unique_ptr<GlobalRef> audio_track);
|
|
||||||
~JavaAudioTrack();
|
~JavaAudioTrack();
|
||||||
|
|
||||||
bool InitPlayout(int sample_rate, int channels);
|
bool InitPlayout(int sample_rate, int channels);
|
||||||
@ -56,13 +53,9 @@ class AudioTrackJni {
|
|||||||
int GetStreamVolume();
|
int GetStreamVolume();
|
||||||
|
|
||||||
private:
|
private:
|
||||||
std::unique_ptr<GlobalRef> audio_track_;
|
JNIEnv* const env_;
|
||||||
jmethodID init_playout_;
|
rtc::ThreadChecker thread_checker_;
|
||||||
jmethodID start_playout_;
|
ScopedJavaGlobalRef<jobject> audio_track_;
|
||||||
jmethodID stop_playout_;
|
|
||||||
jmethodID set_stream_volume_;
|
|
||||||
jmethodID get_stream_max_volume_;
|
|
||||||
jmethodID get_stream_volume_;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
explicit AudioTrackJni(AudioManager* audio_manager);
|
explicit AudioTrackJni(AudioManager* audio_manager);
|
||||||
@ -86,28 +79,23 @@ class AudioTrackJni {
|
|||||||
|
|
||||||
void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer);
|
void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer);
|
||||||
|
|
||||||
private:
|
|
||||||
// Called from Java side so we can cache the address of the Java-manged
|
// Called from Java side so we can cache the address of the Java-manged
|
||||||
// |byte_buffer| in |direct_buffer_address_|. The size of the buffer
|
// |byte_buffer| in |direct_buffer_address_|. The size of the buffer
|
||||||
// is also stored in |direct_buffer_capacity_in_bytes_|.
|
// is also stored in |direct_buffer_capacity_in_bytes_|.
|
||||||
// Called on the same thread as the creating thread.
|
// Called on the same thread as the creating thread.
|
||||||
static void JNICALL CacheDirectBufferAddress(JNIEnv* env,
|
void CacheDirectBufferAddress(JNIEnv* env,
|
||||||
jobject obj,
|
const JavaParamRef<jobject>& j_caller,
|
||||||
jobject byte_buffer,
|
const JavaParamRef<jobject>& byte_buffer);
|
||||||
jlong nativeAudioTrack);
|
|
||||||
void OnCacheDirectBufferAddress(JNIEnv* env, jobject byte_buffer);
|
|
||||||
|
|
||||||
// Called periodically by the Java based WebRtcAudioTrack object when
|
// Called periodically by the Java based WebRtcAudioTrack object when
|
||||||
// playout has started. Each call indicates that |length| new bytes should
|
// playout has started. Each call indicates that |length| new bytes should
|
||||||
// be written to the memory area |direct_buffer_address_| for playout.
|
// be written to the memory area |direct_buffer_address_| for playout.
|
||||||
// This method is called on a high-priority thread from Java. The name of
|
// This method is called on a high-priority thread from Java. The name of
|
||||||
// the thread is 'AudioTrackThread'.
|
// the thread is 'AudioTrackThread'.
|
||||||
static void JNICALL GetPlayoutData(JNIEnv* env,
|
void GetPlayoutData(JNIEnv* env,
|
||||||
jobject obj,
|
const JavaParamRef<jobject>& j_caller,
|
||||||
jint length,
|
size_t length);
|
||||||
jlong nativeAudioTrack);
|
|
||||||
void OnGetPlayoutData(size_t length);
|
|
||||||
|
|
||||||
|
private:
|
||||||
// Stores thread ID in constructor.
|
// Stores thread ID in constructor.
|
||||||
rtc::ThreadChecker thread_checker_;
|
rtc::ThreadChecker thread_checker_;
|
||||||
|
|
||||||
@ -115,16 +103,6 @@ class AudioTrackJni {
|
|||||||
// thread in Java. Detached during construction of this object.
|
// thread in Java. Detached during construction of this object.
|
||||||
rtc::ThreadChecker thread_checker_java_;
|
rtc::ThreadChecker thread_checker_java_;
|
||||||
|
|
||||||
// Calls AttachCurrentThread() if this thread is not attached at construction.
|
|
||||||
// Also ensures that DetachCurrentThread() is called at destruction.
|
|
||||||
AttachCurrentThreadIfNeeded attach_thread_if_needed_;
|
|
||||||
|
|
||||||
// Wraps the JNI interface pointer and methods associated with it.
|
|
||||||
std::unique_ptr<JNIEnvironment> j_environment_;
|
|
||||||
|
|
||||||
// Contains factory method for creating the Java object.
|
|
||||||
std::unique_ptr<NativeRegistration> j_native_registration_;
|
|
||||||
|
|
||||||
// Wraps the Java specific parts of the AudioTrackJni class.
|
// Wraps the Java specific parts of the AudioTrackJni class.
|
||||||
std::unique_ptr<AudioTrackJni::JavaAudioTrack> j_audio_track_;
|
std::unique_ptr<AudioTrackJni::JavaAudioTrack> j_audio_track_;
|
||||||
|
|
||||||
|
|||||||
@ -10,52 +10,48 @@
|
|||||||
|
|
||||||
#include "sdk/android/src/jni/audio_device/build_info.h"
|
#include "sdk/android/src/jni/audio_device/build_info.h"
|
||||||
|
|
||||||
#include "modules/utility/include/helpers_android.h"
|
#include "sdk/android/generated_audio_jni/jni/BuildInfo_jni.h"
|
||||||
|
#include "sdk/android/src/jni/jni_helpers.h"
|
||||||
|
|
||||||
namespace webrtc {
|
namespace webrtc {
|
||||||
|
|
||||||
namespace android_adm {
|
namespace android_adm {
|
||||||
|
|
||||||
BuildInfo::BuildInfo()
|
BuildInfo::BuildInfo() : env_(AttachCurrentThreadIfNeeded()) {}
|
||||||
: j_environment_(JVM::GetInstance()->environment()),
|
|
||||||
j_build_info_(
|
|
||||||
JVM::GetInstance()->GetClass("org/webrtc/voiceengine/BuildInfo")) {}
|
|
||||||
|
|
||||||
std::string BuildInfo::GetStringFromJava(const char* name) {
|
|
||||||
jmethodID id = j_build_info_.GetStaticMethodId(name, "()Ljava/lang/String;");
|
|
||||||
jstring j_string =
|
|
||||||
static_cast<jstring>(j_build_info_.CallStaticObjectMethod(id));
|
|
||||||
return j_environment_->JavaToStdString(j_string);
|
|
||||||
}
|
|
||||||
|
|
||||||
std::string BuildInfo::GetDeviceModel() {
|
std::string BuildInfo::GetDeviceModel() {
|
||||||
return GetStringFromJava("getDeviceModel");
|
thread_checker_.CalledOnValidThread();
|
||||||
|
return JavaToStdString(env_, Java_BuildInfo_getDeviceModel(env_));
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string BuildInfo::GetBrand() {
|
std::string BuildInfo::GetBrand() {
|
||||||
return GetStringFromJava("getBrand");
|
thread_checker_.CalledOnValidThread();
|
||||||
|
return JavaToStdString(env_, Java_BuildInfo_getBrand(env_));
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string BuildInfo::GetDeviceManufacturer() {
|
std::string BuildInfo::GetDeviceManufacturer() {
|
||||||
return GetStringFromJava("getDeviceManufacturer");
|
thread_checker_.CalledOnValidThread();
|
||||||
|
return JavaToStdString(env_, Java_BuildInfo_getDeviceManufacturer(env_));
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string BuildInfo::GetAndroidBuildId() {
|
std::string BuildInfo::GetAndroidBuildId() {
|
||||||
return GetStringFromJava("getAndroidBuildId");
|
thread_checker_.CalledOnValidThread();
|
||||||
|
return JavaToStdString(env_, Java_BuildInfo_getAndroidBuildId(env_));
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string BuildInfo::GetBuildType() {
|
std::string BuildInfo::GetBuildType() {
|
||||||
return GetStringFromJava("getBuildType");
|
thread_checker_.CalledOnValidThread();
|
||||||
|
return JavaToStdString(env_, Java_BuildInfo_getBuildType(env_));
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string BuildInfo::GetBuildRelease() {
|
std::string BuildInfo::GetBuildRelease() {
|
||||||
return GetStringFromJava("getBuildRelease");
|
thread_checker_.CalledOnValidThread();
|
||||||
|
return JavaToStdString(env_, Java_BuildInfo_getBuildRelease(env_));
|
||||||
}
|
}
|
||||||
|
|
||||||
SdkCode BuildInfo::GetSdkVersion() {
|
SdkCode BuildInfo::GetSdkVersion() {
|
||||||
jmethodID id = j_build_info_.GetStaticMethodId("getSdkVersion", "()I");
|
thread_checker_.CalledOnValidThread();
|
||||||
jint j_version = j_build_info_.CallStaticIntMethod(id);
|
return static_cast<SdkCode>(Java_BuildInfo_getSdkVersion(env_));
|
||||||
return static_cast<SdkCode>(j_version);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace android_adm
|
} // namespace android_adm
|
||||||
|
|||||||
@ -15,7 +15,7 @@
|
|||||||
#include <memory>
|
#include <memory>
|
||||||
#include <string>
|
#include <string>
|
||||||
|
|
||||||
#include "modules/utility/include/jvm_android.h"
|
#include "rtc_base/thread_checker.h"
|
||||||
|
|
||||||
namespace webrtc {
|
namespace webrtc {
|
||||||
|
|
||||||
@ -65,21 +65,8 @@ class BuildInfo {
|
|||||||
SdkCode GetSdkVersion();
|
SdkCode GetSdkVersion();
|
||||||
|
|
||||||
private:
|
private:
|
||||||
// Helper method which calls a static getter method with |name| and returns
|
JNIEnv* const env_;
|
||||||
// a string from Java.
|
rtc::ThreadChecker thread_checker_;
|
||||||
std::string GetStringFromJava(const char* name);
|
|
||||||
|
|
||||||
// Ensures that this class can access a valid JNI interface pointer even
|
|
||||||
// if the creating thread was not attached to the JVM.
|
|
||||||
AttachCurrentThreadIfNeeded attach_thread_if_needed_;
|
|
||||||
|
|
||||||
// Provides access to the JNIEnv interface pointer and the JavaToStdString()
|
|
||||||
// method which is used to translate Java strings to std strings.
|
|
||||||
std::unique_ptr<JNIEnvironment> j_environment_;
|
|
||||||
|
|
||||||
// Holds the jclass object and provides access to CallStaticObjectMethod().
|
|
||||||
// Used by GetStringFromJava() during construction only.
|
|
||||||
JavaClass j_build_info_;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace android_adm
|
} // namespace android_adm
|
||||||
|
|||||||
@ -19,7 +19,6 @@
|
|||||||
#include "modules/audio_device/audio_device_buffer.h"
|
#include "modules/audio_device/audio_device_buffer.h"
|
||||||
#include "modules/audio_device/fine_audio_buffer.h"
|
#include "modules/audio_device/fine_audio_buffer.h"
|
||||||
#include "modules/audio_device/include/audio_device_defines.h"
|
#include "modules/audio_device/include/audio_device_defines.h"
|
||||||
#include "modules/utility/include/helpers_android.h"
|
|
||||||
#include "rtc_base/thread_checker.h"
|
#include "rtc_base/thread_checker.h"
|
||||||
#include "sdk/android/src/jni/audio_device/audio_common.h"
|
#include "sdk/android/src/jni/audio_device/audio_common.h"
|
||||||
#include "sdk/android/src/jni/audio_device/audio_manager.h"
|
#include "sdk/android/src/jni/audio_device/audio_manager.h"
|
||||||
|
|||||||
@ -20,7 +20,6 @@
|
|||||||
#include "modules/audio_device/audio_device_buffer.h"
|
#include "modules/audio_device/audio_device_buffer.h"
|
||||||
#include "modules/audio_device/fine_audio_buffer.h"
|
#include "modules/audio_device/fine_audio_buffer.h"
|
||||||
#include "modules/audio_device/include/audio_device_defines.h"
|
#include "modules/audio_device/include/audio_device_defines.h"
|
||||||
#include "modules/utility/include/helpers_android.h"
|
|
||||||
#include "rtc_base/thread_checker.h"
|
#include "rtc_base/thread_checker.h"
|
||||||
#include "sdk/android/src/jni/audio_device/audio_common.h"
|
#include "sdk/android/src/jni/audio_device/audio_common.h"
|
||||||
#include "sdk/android/src/jni/audio_device/audio_manager.h"
|
#include "sdk/android/src/jni/audio_device/audio_manager.h"
|
||||||
|
|||||||
Reference in New Issue
Block a user