Revert "Reland "Delete old Android ADM.""

This reverts commit 6e4d7e606c4327eaa9298193e22794fcb9b30218.

Reason for revert: Still breaks downstream build (though in a different way this time)

Original change's description:
> Reland "Delete old Android ADM."
>
> This is a reland of commit 4ec3e9c98873520b3171d40ab0426b2f05edbbd2
>
> Original change's description:
> > Delete old Android ADM.
> >
> > The schedule move Android ADM code to sdk directory have been around
> > for several years, but the old code still not delete.
> >
> > Bug: webrtc:7452
> > Change-Id: I0f75c680f71f0b2ce614de6cbd9f124c2a59d453
> > Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/264620
> > Reviewed-by: Magnus Jedvert <magjed@webrtc.org>
> > Commit-Queue: Henrik Andreassson <henrika@webrtc.org>
> > Reviewed-by: Henrik Andreassson <henrika@webrtc.org>
> > Cr-Commit-Position: refs/heads/main@{#37174}
>
> Bug: webrtc:7452
> Change-Id: Icabad23e72c8258a854b7809a93811161517266c
> Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/265872
> Reviewed-by: Henrik Andreassson <henrika@webrtc.org>
> Commit-Queue: Björn Terelius <terelius@webrtc.org>
> Cr-Commit-Position: refs/heads/main@{#37236}

Bug: webrtc:7452
Change-Id: Ide8fbd55fadd7aed9989053afff7c63c04f1320f
No-Presubmit: true
No-Tree-Checks: true
No-Try: true
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/266023
Bot-Commit: rubber-stamper@appspot.gserviceaccount.com <rubber-stamper@appspot.gserviceaccount.com>
Commit-Queue: Björn Terelius <terelius@webrtc.org>
Owners-Override: Björn Terelius <terelius@webrtc.org>
Cr-Commit-Position: refs/heads/main@{#37242}
This commit is contained in:
Björn Terelius
2022-06-16 16:06:42 +00:00
committed by WebRTC LUCI CQ
parent 7517fb639b
commit 38a28603fd
53 changed files with 8366 additions and 216 deletions

View File

@ -26,7 +26,16 @@
#endif
#elif defined(WEBRTC_ANDROID)
#include <stdlib.h>
#include "sdk/android/native_api/audio_device_module/audio_device_android.h"
#if defined(WEBRTC_AUDIO_DEVICE_INCLUDE_ANDROID_AAUDIO)
#include "modules/audio_device/android/aaudio_player.h"
#include "modules/audio_device/android/aaudio_recorder.h"
#endif
#include "modules/audio_device/android/audio_device_template.h"
#include "modules/audio_device/android/audio_manager.h"
#include "modules/audio_device/android/audio_record_jni.h"
#include "modules/audio_device/android/audio_track_jni.h"
#include "modules/audio_device/android/opensles_player.h"
#include "modules/audio_device/android/opensles_recorder.h"
#elif defined(WEBRTC_LINUX)
#if defined(WEBRTC_ENABLE_LINUX_ALSA)
#include "modules/audio_device/linux/audio_device_alsa_linux.h"
@ -65,11 +74,7 @@ rtc::scoped_refptr<AudioDeviceModule> AudioDeviceModule::Create(
AudioLayer audio_layer,
TaskQueueFactory* task_queue_factory) {
RTC_DLOG(LS_INFO) << __FUNCTION__;
#if defined(WEBRTC_ANDROID)
return CreateAndroidAudioDeviceModule(audio_layer);
#else
return AudioDeviceModule::CreateForTest(audio_layer, task_queue_factory);
#endif
}
// static
@ -84,14 +89,6 @@ rtc::scoped_refptr<AudioDeviceModuleForTest> AudioDeviceModule::CreateForTest(
RTC_LOG(LS_ERROR) << "Use the CreateWindowsCoreAudioAudioDeviceModule() "
"factory method instead for this option.";
return nullptr;
} else if (audio_layer == AudioDeviceModule::kAndroidJavaAudio ||
audio_layer == AudioDeviceModule::kAndroidOpenSLESAudio ||
audio_layer == AudioDeviceModule::kAndroidJavaInputAndOpenSLESOutputAudio ||
audio_layer == kAndroidAAudioAudio ||
audio_layer == kAndroidJavaInputAndAAudioOutputAudio) {
RTC_LOG(LS_ERROR) << "Use the CreateAndroidAudioDeviceModule() "
"factory method instead for this option.";
return nullptr;
}
// Create the generic reference counted (platform independent) implementation.
@ -185,13 +182,70 @@ int32_t AudioDeviceModuleImpl::CreatePlatformSpecificObjects() {
}
#endif // defined(WEBRTC_WINDOWS_CORE_AUDIO_BUILD)
#if defined(WEBRTC_ANDROID)
// Create an Android audio manager.
audio_manager_android_.reset(new AudioManager());
// Select best possible combination of audio layers.
if (audio_layer == kPlatformDefaultAudio) {
if (audio_manager_android_->IsAAudioSupported()) {
// Use of AAudio for both playout and recording has highest priority.
audio_layer = kAndroidAAudioAudio;
} else if (audio_manager_android_->IsLowLatencyPlayoutSupported() &&
audio_manager_android_->IsLowLatencyRecordSupported()) {
// Use OpenSL ES for both playout and recording.
audio_layer = kAndroidOpenSLESAudio;
} else if (audio_manager_android_->IsLowLatencyPlayoutSupported() &&
!audio_manager_android_->IsLowLatencyRecordSupported()) {
// Use OpenSL ES for output on devices that only supports the
// low-latency output audio path.
audio_layer = kAndroidJavaInputAndOpenSLESOutputAudio;
} else {
// Use Java-based audio in both directions when low-latency output is
// not supported.
audio_layer = kAndroidJavaAudio;
}
}
AudioManager* audio_manager = audio_manager_android_.get();
if (audio_layer == kAndroidJavaAudio) {
// Java audio for both input and output audio.
audio_device_.reset(new AudioDeviceTemplate<AudioRecordJni, AudioTrackJni>(
audio_layer, audio_manager));
} else if (audio_layer == kAndroidOpenSLESAudio) {
// OpenSL ES based audio for both input and output audio.
audio_device_.reset(
new AudioDeviceTemplate<OpenSLESRecorder, OpenSLESPlayer>(
audio_layer, audio_manager));
} else if (audio_layer == kAndroidJavaInputAndOpenSLESOutputAudio) {
// Java audio for input and OpenSL ES for output audio (i.e. mixed APIs).
// This combination provides low-latency output audio and at the same
// time support for HW AEC using the AudioRecord Java API.
audio_device_.reset(new AudioDeviceTemplate<AudioRecordJni, OpenSLESPlayer>(
audio_layer, audio_manager));
} else if (audio_layer == kAndroidAAudioAudio) {
#if defined(WEBRTC_AUDIO_DEVICE_INCLUDE_ANDROID_AAUDIO)
// AAudio based audio for both input and output.
audio_device_.reset(new AudioDeviceTemplate<AAudioRecorder, AAudioPlayer>(
audio_layer, audio_manager));
#endif
} else if (audio_layer == kAndroidJavaInputAndAAudioOutputAudio) {
#if defined(WEBRTC_AUDIO_DEVICE_INCLUDE_ANDROID_AAUDIO)
// Java audio for input and AAudio for output audio (i.e. mixed APIs).
audio_device_.reset(new AudioDeviceTemplate<AudioRecordJni, AAudioPlayer>(
audio_layer, audio_manager));
#endif
} else {
RTC_LOG(LS_ERROR) << "The requested audio layer is not supported";
audio_device_.reset(nullptr);
}
// END #if defined(WEBRTC_ANDROID)
// Linux ADM implementation.
// Note that, WEBRTC_ENABLE_LINUX_ALSA is always defined by default when
// WEBRTC_LINUX is defined. WEBRTC_ENABLE_LINUX_PULSE depends on the
// 'rtc_include_pulse_audio' build flag.
// TODO(bugs.webrtc.org/9127): improve support and make it more clear that
// PulseAudio is the default selection.
#if !defined(WEBRTC_ANDROID) && defined(WEBRTC_LINUX)
#elif defined(WEBRTC_LINUX)
#if !defined(WEBRTC_ENABLE_LINUX_PULSE)
// Build flag 'rtc_include_pulse_audio' is set to false. In this mode:
// - kPlatformDefaultAudio => ALSA, and