Android: Simplify moved audio device module
This CL performs some simplifications and cleanups of the moved audio code. * All JNI interaction now goes from the C++ audio manager calling into the Java audio manager. The calls back from the Java code to the C++ audio manager are removed (this was related to caching audio parameters). It's simpler this way because the Java code is now unaware of the C++ layer and it will be easier to make this into a Java interface. * A bunch of state was removed that was related to caching the audio parameters. * Some unused functions from audio manager was removed. * The Java audio manager no longer depends on ContextUtils, and the context has to be passed in externally instead. This is done because we want to get rid of ContextUtils eventually. * The selection of what AudioDeviceModule to create (AAudio, OpenSLES input/output is now exposed in the interface. The reason is that client should decide and create what they need explicitly instead of setting blacklists in static global WebRTC classes. This will be more modular long term. * Selection of what audio device module to create (OpenSLES combinations) no longer requires instantiating a C++ AudioManager and is done with static enumeration methods instead. Bug: webrtc:7452 Change-Id: Iba29cf7447a1f6063abd9544d7315e10095167c8 Reviewed-on: https://webrtc-review.googlesource.com/63760 Reviewed-by: Magnus Jedvert <magjed@webrtc.org> Reviewed-by: Paulina Hensman <phensman@webrtc.org> Commit-Queue: Magnus Jedvert <magjed@webrtc.org> Cr-Commit-Position: refs/heads/master@{#22542}
This commit is contained in:
committed by
Commit Bot
parent
2955d82eca
commit
32362a6729
@ -15,55 +15,27 @@
|
||||
#include "rtc_base/refcount.h"
|
||||
#include "rtc_base/refcountedobject.h"
|
||||
#include "system_wrappers/include/metrics.h"
|
||||
|
||||
#if defined(AUDIO_DEVICE_INCLUDE_ANDROID_AAUDIO)
|
||||
#include "sdk/android/src/jni/audio_device/aaudio_player.h"
|
||||
#include "sdk/android/src/jni/audio_device/aaudio_recorder.h"
|
||||
#endif
|
||||
#include "sdk/android/src/jni/audio_device/audio_device_template_android.h"
|
||||
#include "sdk/android/src/jni/audio_device/audio_manager.h"
|
||||
#include "sdk/android/src/jni/audio_device/audio_record_jni.h"
|
||||
#include "sdk/android/src/jni/audio_device/audio_track_jni.h"
|
||||
#include "sdk/android/src/jni/audio_device/opensles_player.h"
|
||||
#include "sdk/android/src/jni/audio_device/opensles_recorder.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
rtc::scoped_refptr<AudioDeviceModule> CreateAndroidAudioDeviceModule() {
|
||||
RTC_LOG(INFO) << __FUNCTION__;
|
||||
// Create an Android audio manager.
|
||||
android_adm::AudioManager audio_manager_android;
|
||||
// Select best possible combination of audio layers.
|
||||
if (audio_manager_android.IsAAudioSupported()) {
|
||||
#if defined(AUDIO_DEVICE_INCLUDE_ANDROID_AAUDIO)
|
||||
return new rtc::RefCountedObject<android_adm::AudioDeviceTemplateAndroid<
|
||||
android_adm::AAudioRecorder, android_adm::AAudioPlayer>>(
|
||||
AudioDeviceModule::kAndroidAAudioAudio);
|
||||
rtc::scoped_refptr<AudioDeviceModule> CreateAAudioAudioDeviceModule(
|
||||
JNIEnv* env,
|
||||
jobject application_context) {
|
||||
return android_adm::AudioManager::CreateAAudioAudioDeviceModule(
|
||||
env, JavaParamRef<jobject>(application_context));
|
||||
}
|
||||
#endif
|
||||
} else if (audio_manager_android.IsLowLatencyPlayoutSupported() &&
|
||||
audio_manager_android.IsLowLatencyRecordSupported()) {
|
||||
// Use OpenSL ES for both playout and recording.
|
||||
return new rtc::RefCountedObject<android_adm::AudioDeviceTemplateAndroid<
|
||||
android_adm::OpenSLESRecorder, android_adm::OpenSLESPlayer>>(
|
||||
AudioDeviceModule::kAndroidOpenSLESAudio);
|
||||
} else if (audio_manager_android.IsLowLatencyPlayoutSupported() &&
|
||||
!audio_manager_android.IsLowLatencyRecordSupported()) {
|
||||
// Use OpenSL ES for output on devices that only supports the
|
||||
// low-latency output audio path.
|
||||
// This combination provides low-latency output audio and at the same
|
||||
// time support for HW AEC using the AudioRecord Java API.
|
||||
return new rtc::RefCountedObject<android_adm::AudioDeviceTemplateAndroid<
|
||||
android_adm::AudioRecordJni, android_adm::OpenSLESPlayer>>(
|
||||
AudioDeviceModule::kAndroidJavaInputAndOpenSLESOutputAudio);
|
||||
} else {
|
||||
// Use Java-based audio in both directions when low-latency output is
|
||||
// not supported.
|
||||
return new rtc::RefCountedObject<android_adm::AudioDeviceTemplateAndroid<
|
||||
android_adm::AudioRecordJni, android_adm::AudioTrackJni>>(
|
||||
AudioDeviceModule::kAndroidJavaAudio);
|
||||
}
|
||||
RTC_LOG(LS_ERROR) << "The requested audio layer is not supported";
|
||||
return nullptr;
|
||||
|
||||
rtc::scoped_refptr<AudioDeviceModule> CreateAudioDeviceModule(
|
||||
JNIEnv* env,
|
||||
jobject application_context,
|
||||
bool use_opensles_input,
|
||||
bool use_opensles_output) {
|
||||
return android_adm::AudioManager::CreateAudioDeviceModule(
|
||||
env, JavaParamRef<jobject>(application_context), use_opensles_input,
|
||||
use_opensles_output);
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
@ -11,11 +11,23 @@
|
||||
#ifndef SDK_ANDROID_NATIVE_API_AUDIO_DEVICE_MODULE_AUDIO_DEVICE_ANDROID_H_
|
||||
#define SDK_ANDROID_NATIVE_API_AUDIO_DEVICE_MODULE_AUDIO_DEVICE_ANDROID_H_
|
||||
|
||||
#include <jni.h>
|
||||
|
||||
#include "modules/audio_device/include/audio_device.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
rtc::scoped_refptr<AudioDeviceModule> CreateAndroidAudioDeviceModule();
|
||||
#if defined(AUDIO_DEVICE_INCLUDE_ANDROID_AAUDIO)
|
||||
rtc::scoped_refptr<AudioDeviceModule> CreateAAudioAudioDeviceModule(
|
||||
JNIEnv* env,
|
||||
jobject application_context);
|
||||
#endif
|
||||
|
||||
rtc::scoped_refptr<AudioDeviceModule> CreateAudioDeviceModule(
|
||||
JNIEnv* env,
|
||||
jobject application_context,
|
||||
bool use_opensles_input,
|
||||
bool use_opensles_output);
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
|
||||
Reference in New Issue
Block a user