Android: Generate audio JNI code

This CL only affects the forked Android audio device code. The old code
at webrtc/modules/audio_device/android/ is unaffected.

Bug: webrtc:8689, webrtc:8278
Change-Id: I696b8297baba9a0f657ea3df808f57ebf259cb06
Reviewed-on: https://webrtc-review.googlesource.com/36502
Reviewed-by: Paulina Hensman <phensman@webrtc.org>
Reviewed-by: Henrik Andreassson <henrika@webrtc.org>
Reviewed-by: Magnus Jedvert <magjed@webrtc.org>
Commit-Queue: Magnus Jedvert <magjed@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#22528}
This commit is contained in:
Magnus Jedvert
2018-03-20 19:11:19 +01:00
committed by Commit Bot
parent 37e36027e2
commit 8fc7948cc2
17 changed files with 221 additions and 342 deletions

View File

@ -195,6 +195,8 @@ rtc_source_set("audio_device_jni") {
}
deps = [
":base_jni",
":generated_audio_jni",
":native_api_jni",
"../../api:array_view",
"../../modules/audio_device:audio_device",
"../../modules/audio_device:audio_device_buffer",
@ -221,6 +223,17 @@ rtc_static_library("null_audio_jni") {
]
}
generate_jni("generated_audio_jni") {
sources = [
"src/java/org/webrtc/audio/BuildInfo.java",
"src/java/org/webrtc/audio/WebRtcAudioManager.java",
"src/java/org/webrtc/audio/WebRtcAudioRecord.java",
"src/java/org/webrtc/audio/WebRtcAudioTrack.java",
]
jni_package = ""
jni_generator_include = "//sdk/android/src/jni/jni_generator_helper.h"
}
generate_jni("generated_video_jni") {
sources = [
"api/org/webrtc/EncodedImage.java",
@ -809,6 +822,7 @@ rtc_android_library("libjingle_peerconnection_java") {
"src/java/org/webrtc/WrappedNativeI420Buffer.java",
"src/java/org/webrtc/WrappedNativeVideoEncoder.java",
"src/java/org/webrtc/WrappedNativeVideoDecoder.java",
"src/java/org/webrtc/audio/BuildInfo.java",
"src/java/org/webrtc/audio/WebRtcAudioEffects.java",
"src/java/org/webrtc/audio/WebRtcAudioManager.java",
"src/java/org/webrtc/audio/WebRtcAudioRecord.java",

View File

@ -21,7 +21,7 @@ import java.lang.annotation.Target;
*/
@Target({ElementType.CONSTRUCTOR, ElementType.METHOD})
@Retention(RetentionPolicy.CLASS)
@interface CalledByNative {
public @interface CalledByNative {
/*
* If present, tells which inner class the method belongs to.
*/

View File

@ -11,12 +11,14 @@
package org.webrtc.audio;
import android.os.Build;
import org.webrtc.CalledByNative;
public final class BuildInfo {
public static String getDevice() {
return Build.DEVICE;
}
@CalledByNative
public static String getDeviceModel() {
return Build.MODEL;
}
@ -25,26 +27,32 @@ public final class BuildInfo {
return Build.PRODUCT;
}
@CalledByNative
public static String getBrand() {
return Build.BRAND;
}
@CalledByNative
public static String getDeviceManufacturer() {
return Build.MANUFACTURER;
}
@CalledByNative
public static String getAndroidBuildId() {
return Build.ID;
}
@CalledByNative
public static String getBuildType() {
return Build.TYPE;
}
@CalledByNative
public static String getBuildRelease() {
return Build.VERSION.RELEASE;
}
@CalledByNative
public static int getSdkVersion() {
return Build.VERSION.SDK_INT;
}

View File

@ -22,6 +22,8 @@ import java.util.Timer;
import java.util.TimerTask;
import org.webrtc.ContextUtils;
import org.webrtc.Logging;
import org.webrtc.CalledByNative;
import org.webrtc.NativeClassQualifiedName;
// WebRtcAudioManager handles tasks that uses android.media.AudioManager.
// At construction, storeAudioParameters() is called and it retrieves
@ -170,6 +172,7 @@ class WebRtcAudioManager {
private final VolumeLogger volumeLogger;
@CalledByNative
WebRtcAudioManager(long nativeAudioManager) {
Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo());
this.nativeAudioManager = nativeAudioManager;
@ -180,12 +183,13 @@ class WebRtcAudioManager {
}
volumeLogger = new VolumeLogger(audioManager);
storeAudioParameters();
nativeCacheAudioParameters(sampleRate, outputChannels, inputChannels, hardwareAEC, hardwareAGC,
hardwareNS, lowLatencyOutput, lowLatencyInput, proAudio, aAudio, outputBufferSize,
inputBufferSize, nativeAudioManager);
nativeCacheAudioParameters(nativeAudioManager, sampleRate, outputChannels, inputChannels,
hardwareAEC, hardwareAGC, hardwareNS, lowLatencyOutput, lowLatencyInput, proAudio, aAudio,
outputBufferSize, inputBufferSize);
WebRtcAudioUtils.logAudioState(TAG);
}
@CalledByNative
private boolean init() {
Logging.d(TAG, "init" + WebRtcAudioUtils.getThreadInfo());
if (initialized) {
@ -197,6 +201,7 @@ class WebRtcAudioManager {
return true;
}
@CalledByNative
private void dispose() {
Logging.d(TAG, "dispose" + WebRtcAudioUtils.getThreadInfo());
if (!initialized) {
@ -205,10 +210,12 @@ class WebRtcAudioManager {
volumeLogger.stop();
}
@CalledByNative
private boolean isCommunicationModeEnabled() {
return (audioManager.getMode() == AudioManager.MODE_IN_COMMUNICATION);
}
@CalledByNative
private boolean isDeviceBlacklistedForOpenSLESUsage() {
boolean blacklisted = blacklistDeviceForOpenSLESUsageIsOverridden
? blacklistDeviceForOpenSLESUsage
@ -376,8 +383,9 @@ class WebRtcAudioManager {
}
}
private native void nativeCacheAudioParameters(int sampleRate, int outputChannels,
int inputChannels, boolean hardwareAEC, boolean hardwareAGC, boolean hardwareNS,
boolean lowLatencyOutput, boolean lowLatencyInput, boolean proAudio, boolean aAudio,
int outputBufferSize, int inputBufferSize, long nativeAudioManager);
@NativeClassQualifiedName("webrtc::android_adm::AudioManager")
private native void nativeCacheAudioParameters(long nativeAudioManager, int sampleRate,
int outputChannels, int inputChannels, boolean hardwareAEC, boolean hardwareAGC,
boolean hardwareNS, boolean lowLatencyOutput, boolean lowLatencyInput, boolean proAudio,
boolean aAudio, int outputBufferSize, int inputBufferSize);
}

View File

@ -23,6 +23,8 @@ import org.webrtc.Logging;
import org.webrtc.ThreadUtils;
import org.webrtc.audio.AudioDeviceModule.AudioRecordErrorCallback;
import org.webrtc.audio.AudioDeviceModule.AudioRecordStartErrorCode;
import org.webrtc.CalledByNative;
import org.webrtc.NativeClassQualifiedName;
class WebRtcAudioRecord {
private static final boolean DEBUG = false;
@ -150,7 +152,7 @@ class WebRtcAudioRecord {
// failed to join this thread. To be a bit safer, try to avoid calling any native methods
// in case they've been unregistered after stopRecording() returned.
if (keepAlive) {
nativeDataIsRecorded(bytesRead, nativeAudioRecord);
nativeDataIsRecorded(nativeAudioRecord, bytesRead);
}
if (audioSamplesReadyCallback != null) {
// Copy the entire byte buffer array. Assume that the start of the byteBuffer is
@ -192,6 +194,7 @@ class WebRtcAudioRecord {
}
}
@CalledByNative
WebRtcAudioRecord(long nativeAudioRecord) {
Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo());
this.nativeAudioRecord = nativeAudioRecord;
@ -201,6 +204,7 @@ class WebRtcAudioRecord {
effects = WebRtcAudioEffects.create();
}
@CalledByNative
private boolean enableBuiltInAEC(boolean enable) {
Logging.d(TAG, "enableBuiltInAEC(" + enable + ')');
if (effects == null) {
@ -210,6 +214,7 @@ class WebRtcAudioRecord {
return effects.setAEC(enable);
}
@CalledByNative
private boolean enableBuiltInNS(boolean enable) {
Logging.d(TAG, "enableBuiltInNS(" + enable + ')');
if (effects == null) {
@ -219,6 +224,7 @@ class WebRtcAudioRecord {
return effects.setNS(enable);
}
@CalledByNative
private int initRecording(int sampleRate, int channels) {
Logging.d(TAG, "initRecording(sampleRate=" + sampleRate + ", channels=" + channels + ")");
if (audioRecord != null) {
@ -233,7 +239,7 @@ class WebRtcAudioRecord {
// Rather than passing the ByteBuffer with every callback (requiring
// the potentially expensive GetDirectBufferAddress) we simply have the
// the native class cache the address to the memory once.
nativeCacheDirectBufferAddress(byteBuffer, nativeAudioRecord);
nativeCacheDirectBufferAddress(nativeAudioRecord, byteBuffer);
// Get the minimum buffer size required for the successful creation of
// an AudioRecord object, in byte units.
@ -273,6 +279,7 @@ class WebRtcAudioRecord {
return framesPerBuffer;
}
@CalledByNative
private boolean startRecording() {
Logging.d(TAG, "startRecording");
assertTrue(audioRecord != null);
@ -295,6 +302,7 @@ class WebRtcAudioRecord {
return true;
}
@CalledByNative
private boolean stopRecording() {
Logging.d(TAG, "stopRecording");
assertTrue(audioThread != null);
@ -340,9 +348,11 @@ class WebRtcAudioRecord {
return (channels == 1 ? AudioFormat.CHANNEL_IN_MONO : AudioFormat.CHANNEL_IN_STEREO);
}
private native void nativeCacheDirectBufferAddress(ByteBuffer byteBuffer, long nativeAudioRecord);
@NativeClassQualifiedName("webrtc::android_adm::AudioRecordJni")
private native void nativeCacheDirectBufferAddress(long nativeAudioRecord, ByteBuffer byteBuffer);
private native void nativeDataIsRecorded(int bytes, long nativeAudioRecord);
@NativeClassQualifiedName("webrtc::android_adm::AudioRecordJni")
private native void nativeDataIsRecorded(long nativeAudioRecord, int bytes);
@SuppressWarnings("NoSynchronizedMethodCheck")
public static synchronized void setAudioSource(int source) {

View File

@ -25,6 +25,8 @@ import org.webrtc.Logging;
import org.webrtc.ThreadUtils;
import org.webrtc.audio.AudioDeviceModule.AudioTrackErrorCallback;
import org.webrtc.audio.AudioDeviceModule.AudioTrackStartErrorCode;
import org.webrtc.CalledByNative;
import org.webrtc.NativeClassQualifiedName;
class WebRtcAudioTrack {
private static final boolean DEBUG = false;
@ -122,7 +124,7 @@ class WebRtcAudioTrack {
// Get 10ms of PCM data from the native WebRTC client. Audio data is
// written into the common ByteBuffer using the address that was
// cached at construction.
nativeGetPlayoutData(sizeInBytes, nativeAudioTrack);
nativeGetPlayoutData(nativeAudioTrack, sizeInBytes);
// Write data until all data has been written to the audio sink.
// Upon return, the buffer position will have been advanced to reflect
// the amount of data that was successfully written to the AudioTrack.
@ -188,6 +190,7 @@ class WebRtcAudioTrack {
}
}
@CalledByNative
WebRtcAudioTrack(long nativeAudioTrack) {
threadChecker.checkIsOnValidThread();
Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo());
@ -199,6 +202,7 @@ class WebRtcAudioTrack {
}
}
@CalledByNative
private boolean initPlayout(int sampleRate, int channels) {
threadChecker.checkIsOnValidThread();
Logging.d(TAG, "initPlayout(sampleRate=" + sampleRate + ", channels=" + channels + ")");
@ -209,7 +213,7 @@ class WebRtcAudioTrack {
// Rather than passing the ByteBuffer with every callback (requiring
// the potentially expensive GetDirectBufferAddress) we simply have the
// the native class cache the address to the memory once.
nativeCacheDirectBufferAddress(byteBuffer, nativeAudioTrack);
nativeCacheDirectBufferAddress(nativeAudioTrack, byteBuffer);
// Get the minimum buffer size required for the successful creation of an
// AudioTrack object to be created in the MODE_STREAM mode.
@ -271,6 +275,7 @@ class WebRtcAudioTrack {
return true;
}
@CalledByNative
private boolean startPlayout() {
threadChecker.checkIsOnValidThread();
Logging.d(TAG, "startPlayout");
@ -301,6 +306,7 @@ class WebRtcAudioTrack {
return true;
}
@CalledByNative
private boolean stopPlayout() {
threadChecker.checkIsOnValidThread();
Logging.d(TAG, "stopPlayout");
@ -321,6 +327,7 @@ class WebRtcAudioTrack {
}
// Get max possible volume index for a phone call audio stream.
@CalledByNative
private int getStreamMaxVolume() {
threadChecker.checkIsOnValidThread();
Logging.d(TAG, "getStreamMaxVolume");
@ -329,6 +336,7 @@ class WebRtcAudioTrack {
}
// Set current volume level for a phone call audio stream.
@CalledByNative
private boolean setStreamVolume(int volume) {
threadChecker.checkIsOnValidThread();
Logging.d(TAG, "setStreamVolume(" + volume + ")");
@ -351,6 +359,7 @@ class WebRtcAudioTrack {
}
/** Get current volume level for a phone call audio stream. */
@CalledByNative
private int getStreamVolume() {
threadChecker.checkIsOnValidThread();
Logging.d(TAG, "getStreamVolume");
@ -447,9 +456,12 @@ class WebRtcAudioTrack {
return (channels == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO);
}
private native void nativeCacheDirectBufferAddress(ByteBuffer byteBuffer, long nativeAudioRecord);
@NativeClassQualifiedName("webrtc::android_adm::AudioTrackJni")
private static native void nativeCacheDirectBufferAddress(
long nativeAudioRecord, ByteBuffer byteBuffer);
private native void nativeGetPlayoutData(int bytes, long nativeAudioRecord);
@NativeClassQualifiedName("webrtc::android_adm::AudioTrackJni")
private static native void nativeGetPlayoutData(long nativeAudioRecord, int bytes);
// Sets all samples to be played out to zero if |mute| is true, i.e.,
// ensures that the speaker is muted.

View File

@ -1,6 +1,4 @@
include_rules = [
"+base/android/jni_android.h",
"+modules/audio_device",
# TODO(bugs.webrtc.org/8689): Remove this dependency and use jni generation instead.
"+modules/utility/include/helpers_android.h",
]

View File

@ -12,12 +12,13 @@
#include <utility>
#include "modules/utility/include/helpers_android.h"
#include "rtc_base/arraysize.h"
#include "rtc_base/checks.h"
#include "rtc_base/logging.h"
#include "rtc_base/platform_thread.h"
#include "sdk/android/generated_audio_jni/jni/WebRtcAudioManager_jni.h"
#include "sdk/android/src/jni/audio_device/audio_common.h"
#include "sdk/android/src/jni/jni_helpers.h"
namespace webrtc {
@ -25,16 +26,8 @@ namespace android_adm {
// AudioManager::JavaAudioManager implementation
AudioManager::JavaAudioManager::JavaAudioManager(
NativeRegistration* native_reg,
std::unique_ptr<GlobalRef> audio_manager)
: audio_manager_(std::move(audio_manager)),
init_(native_reg->GetMethodId("init", "()Z")),
dispose_(native_reg->GetMethodId("dispose", "()V")),
is_communication_mode_enabled_(
native_reg->GetMethodId("isCommunicationModeEnabled", "()Z")),
is_device_blacklisted_for_open_sles_usage_(
native_reg->GetMethodId("isDeviceBlacklistedForOpenSLESUsage",
"()Z")) {
const ScopedJavaLocalRef<jobject>& audio_manager)
: env_(audio_manager.env()), audio_manager_(audio_manager) {
RTC_LOG(INFO) << "JavaAudioManager::ctor";
}
@ -43,26 +36,30 @@ AudioManager::JavaAudioManager::~JavaAudioManager() {
}
bool AudioManager::JavaAudioManager::Init() {
return audio_manager_->CallBooleanMethod(init_);
thread_checker_.CalledOnValidThread();
return Java_WebRtcAudioManager_init(env_, audio_manager_);
}
void AudioManager::JavaAudioManager::Close() {
audio_manager_->CallVoidMethod(dispose_);
thread_checker_.CalledOnValidThread();
Java_WebRtcAudioManager_dispose(env_, audio_manager_);
}
bool AudioManager::JavaAudioManager::IsCommunicationModeEnabled() {
return audio_manager_->CallBooleanMethod(is_communication_mode_enabled_);
thread_checker_.CalledOnValidThread();
return Java_WebRtcAudioManager_isCommunicationModeEnabled(env_,
audio_manager_);
}
bool AudioManager::JavaAudioManager::IsDeviceBlacklistedForOpenSLESUsage() {
return audio_manager_->CallBooleanMethod(
is_device_blacklisted_for_open_sles_usage_);
thread_checker_.CalledOnValidThread();
return Java_WebRtcAudioManager_isDeviceBlacklistedForOpenSLESUsage(
env_, audio_manager_);
}
// AudioManager implementation
AudioManager::AudioManager()
: j_environment_(JVM::GetInstance()->environment()),
audio_layer_(AudioDeviceModule::kPlatformDefaultAudio),
: audio_layer_(AudioDeviceModule::kPlatformDefaultAudio),
initialized_(false),
hardware_aec_(false),
hardware_agc_(false),
@ -71,17 +68,9 @@ AudioManager::AudioManager()
low_latency_record_(false),
delay_estimate_in_milliseconds_(0) {
RTC_LOG(INFO) << "ctor";
RTC_CHECK(j_environment_);
JNINativeMethod native_methods[] = {
{"nativeCacheAudioParameters", "(IIIZZZZZZZIIJ)V",
reinterpret_cast<void*>(&AudioManager::CacheAudioParameters)}};
j_native_registration_ = j_environment_->RegisterNatives(
"org/webrtc/voiceengine/WebRtcAudioManager", native_methods,
arraysize(native_methods));
j_audio_manager_.reset(
new JavaAudioManager(j_native_registration_.get(),
j_native_registration_->NewObject(
"<init>", "(J)V", PointerTojlong(this))));
new JavaAudioManager(Java_WebRtcAudioManager_Constructor(
AttachCurrentThreadIfNeeded(), jni::jlongFromPointer(this))));
}
AudioManager::~AudioManager() {
@ -238,30 +227,8 @@ int AudioManager::GetDelayEstimateInMilliseconds() const {
return delay_estimate_in_milliseconds_;
}
void JNICALL AudioManager::CacheAudioParameters(JNIEnv* env,
jobject obj,
jint sample_rate,
jint output_channels,
jint input_channels,
jboolean hardware_aec,
jboolean hardware_agc,
jboolean hardware_ns,
jboolean low_latency_output,
jboolean low_latency_input,
jboolean pro_audio,
jboolean a_audio,
jint output_buffer_size,
jint input_buffer_size,
jlong native_audio_manager) {
AudioManager* this_object =
reinterpret_cast<AudioManager*>(native_audio_manager);
this_object->OnCacheAudioParameters(
env, sample_rate, output_channels, input_channels, hardware_aec,
hardware_agc, hardware_ns, low_latency_output, low_latency_input,
pro_audio, a_audio, output_buffer_size, input_buffer_size);
}
void AudioManager::OnCacheAudioParameters(JNIEnv* env,
void AudioManager::CacheAudioParameters(JNIEnv* env,
const JavaParamRef<jobject>& j_caller,
jint sample_rate,
jint output_channels,
jint input_channels,

View File

@ -17,9 +17,8 @@
#include "modules/audio_device/include/audio_device.h"
#include "modules/audio_device/include/audio_device_defines.h"
#include "modules/utility/include/helpers_android.h"
#include "modules/utility/include/jvm_android.h"
#include "rtc_base/thread_checker.h"
#include "sdk/android/native_api/jni/scoped_java_ref.h"
#include "sdk/android/src/jni/audio_device/audio_common.h"
#include "sdk/android/src/jni/audio_device/opensles_common.h"
@ -41,8 +40,7 @@ class AudioManager {
// parts that are associated with this call.
class JavaAudioManager {
public:
JavaAudioManager(NativeRegistration* native_registration,
std::unique_ptr<GlobalRef> audio_manager);
explicit JavaAudioManager(const ScopedJavaLocalRef<jobject>& audio_manager);
~JavaAudioManager();
bool Init();
@ -51,11 +49,9 @@ class AudioManager {
bool IsDeviceBlacklistedForOpenSLESUsage();
private:
std::unique_ptr<GlobalRef> audio_manager_;
jmethodID init_;
jmethodID dispose_;
jmethodID is_communication_mode_enabled_;
jmethodID is_device_blacklisted_for_open_sles_usage_;
JNIEnv* const env_;
rtc::ThreadChecker thread_checker_;
ScopedJavaGlobalRef<jobject> audio_manager_;
};
AudioManager();
@ -124,26 +120,11 @@ class AudioManager {
// webrtc::kHighLatencyModeDelayEstimateInMilliseconds.
int GetDelayEstimateInMilliseconds() const;
private:
// Called from Java side so we can cache the native audio parameters.
// This method will be called by the WebRtcAudioManager constructor, i.e.
// on the same thread that this object is created on.
static void JNICALL CacheAudioParameters(JNIEnv* env,
jobject obj,
jint sample_rate,
jint output_channels,
jint input_channels,
jboolean hardware_aec,
jboolean hardware_agc,
jboolean hardware_ns,
jboolean low_latency_output,
jboolean low_latency_input,
jboolean pro_audio,
jboolean a_audio,
jint output_buffer_size,
jint input_buffer_size,
jlong native_audio_manager);
void OnCacheAudioParameters(JNIEnv* env,
void CacheAudioParameters(JNIEnv* env,
const JavaParamRef<jobject>& j_caller,
jint sample_rate,
jint output_channels,
jint input_channels,
@ -157,21 +138,12 @@ class AudioManager {
jint output_buffer_size,
jint input_buffer_size);
private:
// Stores thread ID in the constructor.
// We can then use ThreadChecker::CalledOnValidThread() to ensure that
// other methods are called from the same thread.
rtc::ThreadChecker thread_checker_;
// Calls AttachCurrentThread() if this thread is not attached at construction.
// Also ensures that DetachCurrentThread() is called at destruction.
AttachCurrentThreadIfNeeded attach_thread_if_needed_;
// Wraps the JNI interface pointer and methods associated with it.
std::unique_ptr<JNIEnvironment> j_environment_;
// Contains factory method for creating the Java object.
std::unique_ptr<NativeRegistration> j_native_registration_;
// Wraps the Java specific parts of the AudioManager.
std::unique_ptr<AudioManager::JavaAudioManager> j_audio_manager_;

View File

@ -19,7 +19,9 @@
#include "rtc_base/logging.h"
#include "rtc_base/platform_thread.h"
#include "rtc_base/timeutils.h"
#include "sdk/android/generated_audio_jni/jni/WebRtcAudioRecord_jni.h"
#include "sdk/android/src/jni/audio_device/audio_common.h"
#include "sdk/android/src/jni/jni_helpers.h"
#include "system_wrappers/include/metrics.h"
namespace webrtc {
@ -47,46 +49,44 @@ class ScopedHistogramTimer {
// AudioRecordJni::JavaAudioRecord implementation.
AudioRecordJni::JavaAudioRecord::JavaAudioRecord(
NativeRegistration* native_reg,
std::unique_ptr<GlobalRef> audio_record)
: audio_record_(std::move(audio_record)),
init_recording_(native_reg->GetMethodId("initRecording", "(II)I")),
start_recording_(native_reg->GetMethodId("startRecording", "()Z")),
stop_recording_(native_reg->GetMethodId("stopRecording", "()Z")),
enable_built_in_aec_(native_reg->GetMethodId("enableBuiltInAEC", "(Z)Z")),
enable_built_in_ns_(native_reg->GetMethodId("enableBuiltInNS", "(Z)Z")) {}
const ScopedJavaLocalRef<jobject>& audio_record)
: env_(audio_record.env()), audio_record_(audio_record) {}
AudioRecordJni::JavaAudioRecord::~JavaAudioRecord() {}
int AudioRecordJni::JavaAudioRecord::InitRecording(int sample_rate,
size_t channels) {
return audio_record_->CallIntMethod(init_recording_,
thread_checker_.CalledOnValidThread();
return Java_WebRtcAudioRecord_initRecording(env_, audio_record_,
static_cast<jint>(sample_rate),
static_cast<jint>(channels));
}
bool AudioRecordJni::JavaAudioRecord::StartRecording() {
return audio_record_->CallBooleanMethod(start_recording_);
thread_checker_.CalledOnValidThread();
return Java_WebRtcAudioRecord_startRecording(env_, audio_record_);
}
bool AudioRecordJni::JavaAudioRecord::StopRecording() {
return audio_record_->CallBooleanMethod(stop_recording_);
thread_checker_.CalledOnValidThread();
return Java_WebRtcAudioRecord_stopRecording(env_, audio_record_);
}
bool AudioRecordJni::JavaAudioRecord::EnableBuiltInAEC(bool enable) {
return audio_record_->CallBooleanMethod(enable_built_in_aec_,
thread_checker_.CalledOnValidThread();
return Java_WebRtcAudioRecord_enableBuiltInAEC(env_, audio_record_,
static_cast<jboolean>(enable));
}
bool AudioRecordJni::JavaAudioRecord::EnableBuiltInNS(bool enable) {
return audio_record_->CallBooleanMethod(enable_built_in_ns_,
thread_checker_.CalledOnValidThread();
return Java_WebRtcAudioRecord_enableBuiltInNS(env_, audio_record_,
static_cast<jboolean>(enable));
}
// AudioRecordJni implementation.
AudioRecordJni::AudioRecordJni(AudioManager* audio_manager)
: j_environment_(JVM::GetInstance()->environment()),
audio_manager_(audio_manager),
: audio_manager_(audio_manager),
audio_parameters_(audio_manager->GetRecordAudioParameters()),
total_delay_in_milliseconds_(0),
direct_buffer_address_(nullptr),
@ -97,19 +97,8 @@ AudioRecordJni::AudioRecordJni(AudioManager* audio_manager)
audio_device_buffer_(nullptr) {
RTC_LOG(INFO) << "ctor";
RTC_DCHECK(audio_parameters_.is_valid());
RTC_CHECK(j_environment_);
JNINativeMethod native_methods[] = {
{"nativeCacheDirectBufferAddress", "(Ljava/nio/ByteBuffer;J)V",
reinterpret_cast<void*>(&AudioRecordJni::CacheDirectBufferAddress)},
{"nativeDataIsRecorded", "(IJ)V",
reinterpret_cast<void*>(&AudioRecordJni::DataIsRecorded)}};
j_native_registration_ = j_environment_->RegisterNatives(
"org/webrtc/voiceengine/WebRtcAudioRecord", native_methods,
arraysize(native_methods));
j_audio_record_.reset(
new JavaAudioRecord(j_native_registration_.get(),
j_native_registration_->NewObject(
"<init>", "(J)V", PointerTojlong(this))));
j_audio_record_.reset(new JavaAudioRecord(Java_WebRtcAudioRecord_Constructor(
AttachCurrentThreadIfNeeded(), jni::jlongFromPointer(this))));
// Detach from this thread since we want to use the checker to verify calls
// from the Java based audio thread.
thread_checker_java_.DetachFromThread();
@ -230,38 +219,24 @@ int32_t AudioRecordJni::EnableBuiltInNS(bool enable) {
return j_audio_record_->EnableBuiltInNS(enable) ? 0 : -1;
}
void JNICALL AudioRecordJni::CacheDirectBufferAddress(JNIEnv* env,
jobject obj,
jobject byte_buffer,
jlong nativeAudioRecord) {
AudioRecordJni* this_object =
reinterpret_cast<AudioRecordJni*>(nativeAudioRecord);
this_object->OnCacheDirectBufferAddress(env, byte_buffer);
}
void AudioRecordJni::OnCacheDirectBufferAddress(JNIEnv* env,
jobject byte_buffer) {
void AudioRecordJni::CacheDirectBufferAddress(
JNIEnv* env,
const JavaParamRef<jobject>& j_caller,
const JavaParamRef<jobject>& byte_buffer) {
RTC_LOG(INFO) << "OnCacheDirectBufferAddress";
RTC_DCHECK(thread_checker_.CalledOnValidThread());
RTC_DCHECK(!direct_buffer_address_);
direct_buffer_address_ = env->GetDirectBufferAddress(byte_buffer);
jlong capacity = env->GetDirectBufferCapacity(byte_buffer);
direct_buffer_address_ = env->GetDirectBufferAddress(byte_buffer.obj());
jlong capacity = env->GetDirectBufferCapacity(byte_buffer.obj());
RTC_LOG(INFO) << "direct buffer capacity: " << capacity;
direct_buffer_capacity_in_bytes_ = static_cast<size_t>(capacity);
}
void JNICALL AudioRecordJni::DataIsRecorded(JNIEnv* env,
jobject obj,
jint length,
jlong nativeAudioRecord) {
AudioRecordJni* this_object =
reinterpret_cast<AudioRecordJni*>(nativeAudioRecord);
this_object->OnDataIsRecorded(length);
}
// This method is called on a high-priority thread from Java. The name of
// the thread is 'AudioRecordThread'.
void AudioRecordJni::OnDataIsRecorded(int length) {
void AudioRecordJni::DataIsRecorded(JNIEnv* env,
const JavaParamRef<jobject>& j_caller,
int length) {
RTC_DCHECK(thread_checker_java_.CalledOnValidThread());
if (!audio_device_buffer_) {
RTC_LOG(LS_ERROR) << "AttachAudioBuffer has not been called";

View File

@ -16,8 +16,6 @@
#include "modules/audio_device/audio_device_buffer.h"
#include "modules/audio_device/include/audio_device_defines.h"
#include "modules/utility/include/helpers_android.h"
#include "modules/utility/include/jvm_android.h"
#include "rtc_base/thread_checker.h"
#include "sdk/android/src/jni/audio_device/audio_manager.h"
@ -48,8 +46,8 @@ class AudioRecordJni {
// Wraps the Java specific parts of the AudioRecordJni into one helper class.
class JavaAudioRecord {
public:
JavaAudioRecord(NativeRegistration* native_registration,
std::unique_ptr<GlobalRef> audio_track);
explicit JavaAudioRecord(const ScopedJavaLocalRef<jobject>& audio_record);
~JavaAudioRecord();
int InitRecording(int sample_rate, size_t channels);
@ -59,12 +57,9 @@ class AudioRecordJni {
bool EnableBuiltInNS(bool enable);
private:
std::unique_ptr<GlobalRef> audio_record_;
jmethodID init_recording_;
jmethodID start_recording_;
jmethodID stop_recording_;
jmethodID enable_built_in_aec_;
jmethodID enable_built_in_ns_;
JNIEnv* const env_;
rtc::ThreadChecker thread_checker_;
ScopedJavaGlobalRef<jobject> audio_record_;
};
explicit AudioRecordJni(AudioManager* audio_manager);
@ -86,17 +81,14 @@ class AudioRecordJni {
int32_t EnableBuiltInAGC(bool enable);
int32_t EnableBuiltInNS(bool enable);
private:
// Called from Java side so we can cache the address of the Java-manged
// |byte_buffer| in |direct_buffer_address_|. The size of the buffer
// is also stored in |direct_buffer_capacity_in_bytes_|.
// This method will be called by the WebRtcAudioRecord constructor, i.e.,
// on the same thread that this object is created on.
static void JNICALL CacheDirectBufferAddress(JNIEnv* env,
jobject obj,
jobject byte_buffer,
jlong nativeAudioRecord);
void OnCacheDirectBufferAddress(JNIEnv* env, jobject byte_buffer);
void CacheDirectBufferAddress(JNIEnv* env,
const JavaParamRef<jobject>& j_caller,
const JavaParamRef<jobject>& byte_buffer);
// Called periodically by the Java based WebRtcAudioRecord object when
// recording has started. Each call indicates that there are |length| new
@ -104,12 +96,11 @@ class AudioRecordJni {
// now time to send these to the consumer.
// This method is called on a high-priority thread from Java. The name of
// the thread is 'AudioRecordThread'.
static void JNICALL DataIsRecorded(JNIEnv* env,
jobject obj,
jint length,
jlong nativeAudioRecord);
void OnDataIsRecorded(int length);
void DataIsRecorded(JNIEnv* env,
const JavaParamRef<jobject>& j_caller,
int length);
private:
// Stores thread ID in constructor.
rtc::ThreadChecker thread_checker_;
@ -117,16 +108,6 @@ class AudioRecordJni {
// thread in Java. Detached during construction of this object.
rtc::ThreadChecker thread_checker_java_;
// Calls AttachCurrentThread() if this thread is not attached at construction.
// Also ensures that DetachCurrentThread() is called at destruction.
AttachCurrentThreadIfNeeded attach_thread_if_needed_;
// Wraps the JNI interface pointer and methods associated with it.
std::unique_ptr<JNIEnvironment> j_environment_;
// Contains factory method for creating the Java object.
std::unique_ptr<NativeRegistration> j_native_registration_;
// Wraps the Java specific parts of the AudioRecordJni class.
std::unique_ptr<AudioRecordJni::JavaAudioRecord> j_audio_record_;

View File

@ -18,6 +18,8 @@
#include "rtc_base/format_macros.h"
#include "rtc_base/logging.h"
#include "rtc_base/platform_thread.h"
#include "sdk/android/generated_audio_jni/jni/WebRtcAudioTrack_jni.h"
#include "sdk/android/src/jni/jni_helpers.h"
namespace webrtc {
@ -25,47 +27,45 @@ namespace android_adm {
// AudioTrackJni::JavaAudioTrack implementation.
AudioTrackJni::JavaAudioTrack::JavaAudioTrack(
NativeRegistration* native_reg,
std::unique_ptr<GlobalRef> audio_track)
: audio_track_(std::move(audio_track)),
init_playout_(native_reg->GetMethodId("initPlayout", "(II)Z")),
start_playout_(native_reg->GetMethodId("startPlayout", "()Z")),
stop_playout_(native_reg->GetMethodId("stopPlayout", "()Z")),
set_stream_volume_(native_reg->GetMethodId("setStreamVolume", "(I)Z")),
get_stream_max_volume_(
native_reg->GetMethodId("getStreamMaxVolume", "()I")),
get_stream_volume_(native_reg->GetMethodId("getStreamVolume", "()I")) {}
const ScopedJavaLocalRef<jobject>& audio_track)
: env_(audio_track.env()), audio_track_(audio_track) {}
AudioTrackJni::JavaAudioTrack::~JavaAudioTrack() {}
bool AudioTrackJni::JavaAudioTrack::InitPlayout(int sample_rate, int channels) {
return audio_track_->CallBooleanMethod(init_playout_, sample_rate, channels);
thread_checker_.CalledOnValidThread();
return Java_WebRtcAudioTrack_initPlayout(env_, audio_track_, sample_rate,
channels);
}
bool AudioTrackJni::JavaAudioTrack::StartPlayout() {
return audio_track_->CallBooleanMethod(start_playout_);
thread_checker_.CalledOnValidThread();
return Java_WebRtcAudioTrack_startPlayout(env_, audio_track_);
}
bool AudioTrackJni::JavaAudioTrack::StopPlayout() {
return audio_track_->CallBooleanMethod(stop_playout_);
thread_checker_.CalledOnValidThread();
return Java_WebRtcAudioTrack_stopPlayout(env_, audio_track_);
}
bool AudioTrackJni::JavaAudioTrack::SetStreamVolume(int volume) {
return audio_track_->CallBooleanMethod(set_stream_volume_, volume);
thread_checker_.CalledOnValidThread();
return Java_WebRtcAudioTrack_setStreamVolume(env_, audio_track_, volume);
}
int AudioTrackJni::JavaAudioTrack::GetStreamMaxVolume() {
return audio_track_->CallIntMethod(get_stream_max_volume_);
thread_checker_.CalledOnValidThread();
return Java_WebRtcAudioTrack_getStreamMaxVolume(env_, audio_track_);
}
int AudioTrackJni::JavaAudioTrack::GetStreamVolume() {
return audio_track_->CallIntMethod(get_stream_volume_);
thread_checker_.CalledOnValidThread();
return Java_WebRtcAudioTrack_getStreamVolume(env_, audio_track_);
}
// TODO(henrika): possible extend usage of AudioManager and add it as member.
AudioTrackJni::AudioTrackJni(AudioManager* audio_manager)
: j_environment_(JVM::GetInstance()->environment()),
audio_parameters_(audio_manager->GetPlayoutAudioParameters()),
: audio_parameters_(audio_manager->GetPlayoutAudioParameters()),
direct_buffer_address_(nullptr),
direct_buffer_capacity_in_bytes_(0),
frames_per_buffer_(0),
@ -74,19 +74,8 @@ AudioTrackJni::AudioTrackJni(AudioManager* audio_manager)
audio_device_buffer_(nullptr) {
RTC_LOG(INFO) << "ctor";
RTC_DCHECK(audio_parameters_.is_valid());
RTC_CHECK(j_environment_);
JNINativeMethod native_methods[] = {
{"nativeCacheDirectBufferAddress", "(Ljava/nio/ByteBuffer;J)V",
reinterpret_cast<void*>(&AudioTrackJni::CacheDirectBufferAddress)},
{"nativeGetPlayoutData", "(IJ)V",
reinterpret_cast<void*>(&AudioTrackJni::GetPlayoutData)}};
j_native_registration_ = j_environment_->RegisterNatives(
"org/webrtc/voiceengine/WebRtcAudioTrack", native_methods,
arraysize(native_methods));
j_audio_track_.reset(
new JavaAudioTrack(j_native_registration_.get(),
j_native_registration_->NewObject(
"<init>", "(J)V", PointerTojlong(this))));
j_audio_track_.reset(new JavaAudioTrack(Java_WebRtcAudioTrack_Constructor(
AttachCurrentThreadIfNeeded(), jni::jlongFromPointer(this))));
// Detach from this thread since we want to use the checker to verify calls
// from the Java based audio thread.
thread_checker_java_.DetachFromThread();
@ -205,22 +194,15 @@ void AudioTrackJni::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) {
audio_device_buffer_->SetPlayoutChannels(channels);
}
void JNICALL AudioTrackJni::CacheDirectBufferAddress(JNIEnv* env,
jobject obj,
jobject byte_buffer,
jlong nativeAudioTrack) {
AudioTrackJni* this_object =
reinterpret_cast<AudioTrackJni*>(nativeAudioTrack);
this_object->OnCacheDirectBufferAddress(env, byte_buffer);
}
void AudioTrackJni::OnCacheDirectBufferAddress(JNIEnv* env,
jobject byte_buffer) {
void AudioTrackJni::CacheDirectBufferAddress(
JNIEnv* env,
const JavaParamRef<jobject>&,
const JavaParamRef<jobject>& byte_buffer) {
RTC_LOG(INFO) << "OnCacheDirectBufferAddress";
RTC_DCHECK(thread_checker_.CalledOnValidThread());
RTC_DCHECK(!direct_buffer_address_);
direct_buffer_address_ = env->GetDirectBufferAddress(byte_buffer);
jlong capacity = env->GetDirectBufferCapacity(byte_buffer);
direct_buffer_address_ = env->GetDirectBufferAddress(byte_buffer.obj());
jlong capacity = env->GetDirectBufferCapacity(byte_buffer.obj());
RTC_LOG(INFO) << "direct buffer capacity: " << capacity;
direct_buffer_capacity_in_bytes_ = static_cast<size_t>(capacity);
const size_t bytes_per_frame = audio_parameters_.channels() * sizeof(int16_t);
@ -228,18 +210,11 @@ void AudioTrackJni::OnCacheDirectBufferAddress(JNIEnv* env,
RTC_LOG(INFO) << "frames_per_buffer: " << frames_per_buffer_;
}
void JNICALL AudioTrackJni::GetPlayoutData(JNIEnv* env,
jobject obj,
jint length,
jlong nativeAudioTrack) {
AudioTrackJni* this_object =
reinterpret_cast<AudioTrackJni*>(nativeAudioTrack);
this_object->OnGetPlayoutData(static_cast<size_t>(length));
}
// This method is called on a high-priority thread from Java. The name of
// the thread is 'AudioRecordTrack'.
void AudioTrackJni::OnGetPlayoutData(size_t length) {
void AudioTrackJni::GetPlayoutData(JNIEnv* env,
const JavaParamRef<jobject>&,
size_t length) {
RTC_DCHECK(thread_checker_java_.CalledOnValidThread());
const size_t bytes_per_frame = audio_parameters_.channels() * sizeof(int16_t);
RTC_DCHECK_EQ(frames_per_buffer_, length / bytes_per_frame);

View File

@ -16,8 +16,6 @@
#include "modules/audio_device/audio_device_buffer.h"
#include "modules/audio_device/include/audio_device_defines.h"
#include "modules/utility/include/helpers_android.h"
#include "modules/utility/include/jvm_android.h"
#include "rtc_base/thread_checker.h"
#include "sdk/android/src/jni/audio_device/audio_common.h"
#include "sdk/android/src/jni/audio_device/audio_manager.h"
@ -44,8 +42,7 @@ class AudioTrackJni {
// Wraps the Java specific parts of the AudioTrackJni into one helper class.
class JavaAudioTrack {
public:
JavaAudioTrack(NativeRegistration* native_registration,
std::unique_ptr<GlobalRef> audio_track);
explicit JavaAudioTrack(const ScopedJavaLocalRef<jobject>& audio_track);
~JavaAudioTrack();
bool InitPlayout(int sample_rate, int channels);
@ -56,13 +53,9 @@ class AudioTrackJni {
int GetStreamVolume();
private:
std::unique_ptr<GlobalRef> audio_track_;
jmethodID init_playout_;
jmethodID start_playout_;
jmethodID stop_playout_;
jmethodID set_stream_volume_;
jmethodID get_stream_max_volume_;
jmethodID get_stream_volume_;
JNIEnv* const env_;
rtc::ThreadChecker thread_checker_;
ScopedJavaGlobalRef<jobject> audio_track_;
};
explicit AudioTrackJni(AudioManager* audio_manager);
@ -86,28 +79,23 @@ class AudioTrackJni {
void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer);
private:
// Called from Java side so we can cache the address of the Java-manged
// |byte_buffer| in |direct_buffer_address_|. The size of the buffer
// is also stored in |direct_buffer_capacity_in_bytes_|.
// Called on the same thread as the creating thread.
static void JNICALL CacheDirectBufferAddress(JNIEnv* env,
jobject obj,
jobject byte_buffer,
jlong nativeAudioTrack);
void OnCacheDirectBufferAddress(JNIEnv* env, jobject byte_buffer);
void CacheDirectBufferAddress(JNIEnv* env,
const JavaParamRef<jobject>& j_caller,
const JavaParamRef<jobject>& byte_buffer);
// Called periodically by the Java based WebRtcAudioTrack object when
// playout has started. Each call indicates that |length| new bytes should
// be written to the memory area |direct_buffer_address_| for playout.
// This method is called on a high-priority thread from Java. The name of
// the thread is 'AudioTrackThread'.
static void JNICALL GetPlayoutData(JNIEnv* env,
jobject obj,
jint length,
jlong nativeAudioTrack);
void OnGetPlayoutData(size_t length);
void GetPlayoutData(JNIEnv* env,
const JavaParamRef<jobject>& j_caller,
size_t length);
private:
// Stores thread ID in constructor.
rtc::ThreadChecker thread_checker_;
@ -115,16 +103,6 @@ class AudioTrackJni {
// thread in Java. Detached during construction of this object.
rtc::ThreadChecker thread_checker_java_;
// Calls AttachCurrentThread() if this thread is not attached at construction.
// Also ensures that DetachCurrentThread() is called at destruction.
AttachCurrentThreadIfNeeded attach_thread_if_needed_;
// Wraps the JNI interface pointer and methods associated with it.
std::unique_ptr<JNIEnvironment> j_environment_;
// Contains factory method for creating the Java object.
std::unique_ptr<NativeRegistration> j_native_registration_;
// Wraps the Java specific parts of the AudioTrackJni class.
std::unique_ptr<AudioTrackJni::JavaAudioTrack> j_audio_track_;

View File

@ -10,52 +10,48 @@
#include "sdk/android/src/jni/audio_device/build_info.h"
#include "modules/utility/include/helpers_android.h"
#include "sdk/android/generated_audio_jni/jni/BuildInfo_jni.h"
#include "sdk/android/src/jni/jni_helpers.h"
namespace webrtc {
namespace android_adm {
BuildInfo::BuildInfo()
: j_environment_(JVM::GetInstance()->environment()),
j_build_info_(
JVM::GetInstance()->GetClass("org/webrtc/voiceengine/BuildInfo")) {}
std::string BuildInfo::GetStringFromJava(const char* name) {
jmethodID id = j_build_info_.GetStaticMethodId(name, "()Ljava/lang/String;");
jstring j_string =
static_cast<jstring>(j_build_info_.CallStaticObjectMethod(id));
return j_environment_->JavaToStdString(j_string);
}
BuildInfo::BuildInfo() : env_(AttachCurrentThreadIfNeeded()) {}
std::string BuildInfo::GetDeviceModel() {
return GetStringFromJava("getDeviceModel");
thread_checker_.CalledOnValidThread();
return JavaToStdString(env_, Java_BuildInfo_getDeviceModel(env_));
}
std::string BuildInfo::GetBrand() {
return GetStringFromJava("getBrand");
thread_checker_.CalledOnValidThread();
return JavaToStdString(env_, Java_BuildInfo_getBrand(env_));
}
std::string BuildInfo::GetDeviceManufacturer() {
return GetStringFromJava("getDeviceManufacturer");
thread_checker_.CalledOnValidThread();
return JavaToStdString(env_, Java_BuildInfo_getDeviceManufacturer(env_));
}
std::string BuildInfo::GetAndroidBuildId() {
return GetStringFromJava("getAndroidBuildId");
thread_checker_.CalledOnValidThread();
return JavaToStdString(env_, Java_BuildInfo_getAndroidBuildId(env_));
}
std::string BuildInfo::GetBuildType() {
return GetStringFromJava("getBuildType");
thread_checker_.CalledOnValidThread();
return JavaToStdString(env_, Java_BuildInfo_getBuildType(env_));
}
std::string BuildInfo::GetBuildRelease() {
return GetStringFromJava("getBuildRelease");
thread_checker_.CalledOnValidThread();
return JavaToStdString(env_, Java_BuildInfo_getBuildRelease(env_));
}
SdkCode BuildInfo::GetSdkVersion() {
jmethodID id = j_build_info_.GetStaticMethodId("getSdkVersion", "()I");
jint j_version = j_build_info_.CallStaticIntMethod(id);
return static_cast<SdkCode>(j_version);
thread_checker_.CalledOnValidThread();
return static_cast<SdkCode>(Java_BuildInfo_getSdkVersion(env_));
}
} // namespace android_adm

View File

@ -15,7 +15,7 @@
#include <memory>
#include <string>
#include "modules/utility/include/jvm_android.h"
#include "rtc_base/thread_checker.h"
namespace webrtc {
@ -65,21 +65,8 @@ class BuildInfo {
SdkCode GetSdkVersion();
private:
// Helper method which calls a static getter method with |name| and returns
// a string from Java.
std::string GetStringFromJava(const char* name);
// Ensures that this class can access a valid JNI interface pointer even
// if the creating thread was not attached to the JVM.
AttachCurrentThreadIfNeeded attach_thread_if_needed_;
// Provides access to the JNIEnv interface pointer and the JavaToStdString()
// method which is used to translate Java strings to std strings.
std::unique_ptr<JNIEnvironment> j_environment_;
// Holds the jclass object and provides access to CallStaticObjectMethod().
// Used by GetStringFromJava() during construction only.
JavaClass j_build_info_;
JNIEnv* const env_;
rtc::ThreadChecker thread_checker_;
};
} // namespace android_adm

View File

@ -19,7 +19,6 @@
#include "modules/audio_device/audio_device_buffer.h"
#include "modules/audio_device/fine_audio_buffer.h"
#include "modules/audio_device/include/audio_device_defines.h"
#include "modules/utility/include/helpers_android.h"
#include "rtc_base/thread_checker.h"
#include "sdk/android/src/jni/audio_device/audio_common.h"
#include "sdk/android/src/jni/audio_device/audio_manager.h"

View File

@ -20,7 +20,6 @@
#include "modules/audio_device/audio_device_buffer.h"
#include "modules/audio_device/fine_audio_buffer.h"
#include "modules/audio_device/include/audio_device_defines.h"
#include "modules/utility/include/helpers_android.h"
#include "rtc_base/thread_checker.h"
#include "sdk/android/src/jni/audio_device/audio_common.h"
#include "sdk/android/src/jni/audio_device/audio_manager.h"