Android: Add AudioDeviceModule interface and clean up implementation code
This CL introduces sdk/android/api/org/webrtc/audio/AudioDeviceModule.java, which is the new interface for audio device modules on Android. This CL also refactors the main AudioDeviceModule implementation, which is sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java and makes it conform to the new interface. The old code used global static methods to configure the audio device code. This CL gets rid of all that and uses a builder pattern in JavaAudioDeviceModule instead. The only two dynamic methods left in the interface are setSpeakerMute() and setMicrophoneMute(). Removing the global static methods allowed a significant cleanup, and e.g. the file sdk/android/src/jni/audio_device/audio_manager.cc has been completely removed. The PeerConnectionFactory interface is also updated to allow passing in an external AudioDeviceModule. The current built-in ADM is encapsulated under LegacyAudioDeviceModule.java, which is the default for now to ensure backwards compatibility. Bug: webrtc:7452 Change-Id: I64d5f4dba9a004da001f1acb2bd0c1b1f2b64f21 Reviewed-on: https://webrtc-review.googlesource.com/65360 Commit-Queue: Magnus Jedvert <magjed@webrtc.org> Reviewed-by: Magnus Jedvert <magjed@webrtc.org> Reviewed-by: Paulina Hensman <phensman@webrtc.org> Cr-Commit-Position: refs/heads/master@{#22765}
This commit is contained in:
committed by
Commit Bot
parent
3ab5c40f72
commit
66f1e9eb34
@ -14,7 +14,6 @@
|
||||
#include "modules/audio_device/fine_audio_buffer.h"
|
||||
#include "rtc_base/checks.h"
|
||||
#include "rtc_base/logging.h"
|
||||
#include "sdk/android/src/jni/audio_device/audio_manager.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
@ -24,9 +23,9 @@ enum AudioDeviceMessageType : uint32_t {
|
||||
kMessageOutputStreamDisconnected,
|
||||
};
|
||||
|
||||
AAudioPlayer::AAudioPlayer(AudioManager* audio_manager)
|
||||
AAudioPlayer::AAudioPlayer(const AudioParameters& audio_parameters)
|
||||
: main_thread_(rtc::Thread::Current()),
|
||||
aaudio_(audio_manager, AAUDIO_DIRECTION_OUTPUT, this) {
|
||||
aaudio_(audio_parameters, AAUDIO_DIRECTION_OUTPUT, this) {
|
||||
RTC_LOG(INFO) << "ctor";
|
||||
thread_checker_aaudio_.DetachFromThread();
|
||||
}
|
||||
|
||||
@ -55,7 +55,7 @@ class AAudioPlayer final : public AudioOutput,
|
||||
public AAudioObserverInterface,
|
||||
public rtc::MessageHandler {
|
||||
public:
|
||||
explicit AAudioPlayer(AudioManager* audio_manager);
|
||||
explicit AAudioPlayer(const AudioParameters& audio_parameters);
|
||||
~AAudioPlayer() override;
|
||||
|
||||
int Init() override;
|
||||
|
||||
@ -15,7 +15,7 @@
|
||||
#include "rtc_base/checks.h"
|
||||
#include "rtc_base/logging.h"
|
||||
#include "rtc_base/timeutils.h"
|
||||
#include "sdk/android/src/jni/audio_device/audio_manager.h"
|
||||
|
||||
#include "system_wrappers/include/sleep.h"
|
||||
|
||||
namespace webrtc {
|
||||
@ -26,9 +26,9 @@ enum AudioDeviceMessageType : uint32_t {
|
||||
kMessageInputStreamDisconnected,
|
||||
};
|
||||
|
||||
AAudioRecorder::AAudioRecorder(AudioManager* audio_manager)
|
||||
AAudioRecorder::AAudioRecorder(const AudioParameters& audio_parameters)
|
||||
: main_thread_(rtc::Thread::Current()),
|
||||
aaudio_(audio_manager, AAUDIO_DIRECTION_INPUT, this) {
|
||||
aaudio_(audio_parameters, AAUDIO_DIRECTION_INPUT, this) {
|
||||
RTC_LOG(INFO) << "ctor";
|
||||
thread_checker_aaudio_.DetachFromThread();
|
||||
}
|
||||
@ -122,6 +122,14 @@ void AAudioRecorder::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) {
|
||||
audio_device_buffer_, audio_parameters.sample_rate(), capacity));
|
||||
}
|
||||
|
||||
bool AAudioRecorder::IsAcousticEchoCancelerSupported() const {
|
||||
return false;
|
||||
}
|
||||
|
||||
bool AAudioRecorder::IsNoiseSuppressorSupported() const {
|
||||
return false;
|
||||
}
|
||||
|
||||
int AAudioRecorder::EnableBuiltInAEC(bool enable) {
|
||||
RTC_LOG(INFO) << "EnableBuiltInAEC: " << enable;
|
||||
RTC_LOG(LS_ERROR) << "Not implemented";
|
||||
|
||||
@ -47,7 +47,7 @@ class AAudioRecorder : public AudioInput,
|
||||
public AAudioObserverInterface,
|
||||
public rtc::MessageHandler {
|
||||
public:
|
||||
explicit AAudioRecorder(AudioManager* audio_manager);
|
||||
explicit AAudioRecorder(const AudioParameters& audio_parameters);
|
||||
~AAudioRecorder() override;
|
||||
|
||||
int Init() override;
|
||||
@ -63,6 +63,8 @@ class AAudioRecorder : public AudioInput,
|
||||
void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) override;
|
||||
|
||||
// TODO(henrika): add support using AAudio APIs when available.
|
||||
bool IsAcousticEchoCancelerSupported() const override;
|
||||
bool IsNoiseSuppressorSupported() const override;
|
||||
int EnableBuiltInAEC(bool enable) override;
|
||||
int EnableBuiltInAGC(bool enable) override;
|
||||
int EnableBuiltInNS(bool enable) override;
|
||||
|
||||
@ -13,7 +13,6 @@
|
||||
#include "rtc_base/logging.h"
|
||||
#include "rtc_base/strings/string_builder.h"
|
||||
#include "rtc_base/timeutils.h"
|
||||
#include "sdk/android/src/jni/audio_device/audio_manager.h"
|
||||
|
||||
#define LOG_ON_ERROR(op) \
|
||||
do { \
|
||||
@ -132,15 +131,14 @@ class ScopedStreamBuilder {
|
||||
|
||||
} // namespace
|
||||
|
||||
AAudioWrapper::AAudioWrapper(AudioManager* audio_manager,
|
||||
AAudioWrapper::AAudioWrapper(const AudioParameters& audio_parameters,
|
||||
aaudio_direction_t direction,
|
||||
AAudioObserverInterface* observer)
|
||||
: direction_(direction), observer_(observer) {
|
||||
: audio_parameters_(audio_parameters),
|
||||
direction_(direction),
|
||||
observer_(observer) {
|
||||
RTC_LOG(INFO) << "ctor";
|
||||
RTC_DCHECK(observer_);
|
||||
direction_ == AAUDIO_DIRECTION_OUTPUT
|
||||
? audio_parameters_ = audio_manager->GetPlayoutAudioParameters()
|
||||
: audio_parameters_ = audio_manager->GetRecordAudioParameters();
|
||||
aaudio_thread_checker_.DetachFromThread();
|
||||
RTC_LOG(INFO) << audio_parameters_.ToString();
|
||||
}
|
||||
|
||||
@ -20,8 +20,6 @@ namespace webrtc {
|
||||
|
||||
namespace android_adm {
|
||||
|
||||
class AudioManager;
|
||||
|
||||
// AAudio callback interface for audio transport to/from the AAudio stream.
|
||||
// The interface also contains an error callback method for notifications of
|
||||
// e.g. device changes.
|
||||
@ -60,7 +58,7 @@ class AAudioObserverInterface {
|
||||
// ensure that the audio device and stream direction agree.
|
||||
class AAudioWrapper {
|
||||
public:
|
||||
AAudioWrapper(AudioManager* audio_manager,
|
||||
AAudioWrapper(const AudioParameters& audio_parameters,
|
||||
aaudio_direction_t direction,
|
||||
AAudioObserverInterface* observer);
|
||||
~AAudioWrapper();
|
||||
@ -117,7 +115,7 @@ class AAudioWrapper {
|
||||
|
||||
rtc::ThreadChecker thread_checker_;
|
||||
rtc::ThreadChecker aaudio_thread_checker_;
|
||||
AudioParameters audio_parameters_;
|
||||
const AudioParameters audio_parameters_;
|
||||
const aaudio_direction_t direction_;
|
||||
AAudioObserverInterface* observer_ = nullptr;
|
||||
AAudioStream* stream_ = nullptr;
|
||||
|
||||
@ -16,6 +16,7 @@
|
||||
#include "rtc_base/logging.h"
|
||||
#include "rtc_base/refcountedobject.h"
|
||||
#include "rtc_base/thread_checker.h"
|
||||
#include "sdk/android/generated_audio_device_base_jni/jni/WebRtcAudioManager_jni.h"
|
||||
#include "system_wrappers/include/metrics.h"
|
||||
|
||||
#define CHECKinitialized_() \
|
||||
@ -38,15 +39,15 @@ namespace android_adm {
|
||||
|
||||
namespace {
|
||||
|
||||
// InputType/OutputType can be any class that implements the capturing/rendering
|
||||
// part of the AudioDeviceGeneric API.
|
||||
// Construction and destruction must be done on one and the same thread. Each
|
||||
// internal implementation of InputType and OutputType will RTC_DCHECK if that
|
||||
// is not the case. All implemented methods must also be called on the same
|
||||
// thread. See comments in each InputType/OutputType class for more info.
|
||||
// It is possible to call the two static methods (SetAndroidAudioDeviceObjects
|
||||
// and ClearAndroidAudioDeviceObjects) from a different thread but both will
|
||||
// RTC_CHECK that the calling thread is attached to a Java VM.
|
||||
// This class combines a generic instance of an AudioInput and a generic
|
||||
// instance of an AudioOutput to create an AudioDeviceModule. This is mostly
|
||||
// done by delegating to the audio input/output with some glue code. This class
|
||||
// also directly implements some of the AudioDeviceModule methods with dummy
|
||||
// implementations.
|
||||
//
|
||||
// An instance can be created on any thread, but must then be used on one and
|
||||
// the same thread. All public methods must also be called on the same thread. A
|
||||
// thread checker will RTC_DCHECK if any method is called on an invalid thread.
|
||||
class AndroidAudioDeviceModule : public AudioDeviceModule {
|
||||
public:
|
||||
// For use with UMA logging. Must be kept in sync with histograms.xml in
|
||||
@ -61,17 +62,20 @@ class AndroidAudioDeviceModule : public AudioDeviceModule {
|
||||
};
|
||||
|
||||
AndroidAudioDeviceModule(AudioDeviceModule::AudioLayer audio_layer,
|
||||
std::unique_ptr<AudioManager> audio_manager,
|
||||
bool is_stereo_playout_supported,
|
||||
bool is_stereo_record_supported,
|
||||
uint16_t playout_delay_ms,
|
||||
std::unique_ptr<AudioInput> audio_input,
|
||||
std::unique_ptr<AudioOutput> audio_output)
|
||||
: audio_layer_(audio_layer),
|
||||
audio_manager_(std::move(audio_manager)),
|
||||
is_stereo_playout_supported_(is_stereo_playout_supported),
|
||||
is_stereo_record_supported_(is_stereo_record_supported),
|
||||
playout_delay_ms_(playout_delay_ms),
|
||||
input_(std::move(audio_input)),
|
||||
output_(std::move(audio_output)),
|
||||
initialized_(false) {
|
||||
RTC_CHECK(input_);
|
||||
RTC_CHECK(output_);
|
||||
RTC_CHECK(audio_manager_);
|
||||
RTC_LOG(INFO) << __FUNCTION__;
|
||||
thread_checker_.DetachFromThread();
|
||||
}
|
||||
@ -99,14 +103,10 @@ class AndroidAudioDeviceModule : public AudioDeviceModule {
|
||||
return 0;
|
||||
}
|
||||
InitStatus status;
|
||||
if (!audio_manager_->Init()) {
|
||||
status = InitStatus::OTHER_ERROR;
|
||||
} else if (output_->Init() != 0) {
|
||||
audio_manager_->Close();
|
||||
if (output_->Init() != 0) {
|
||||
status = InitStatus::PLAYOUT_ERROR;
|
||||
} else if (input_->Init() != 0) {
|
||||
output_->Terminate();
|
||||
audio_manager_->Close();
|
||||
status = InitStatus::RECORDING_ERROR;
|
||||
} else {
|
||||
initialized_ = true;
|
||||
@ -129,7 +129,6 @@ class AndroidAudioDeviceModule : public AudioDeviceModule {
|
||||
RTC_DCHECK(thread_checker_.CalledOnValidThread());
|
||||
int32_t err = input_->Terminate();
|
||||
err |= output_->Terminate();
|
||||
err |= !audio_manager_->Close();
|
||||
initialized_ = false;
|
||||
RTC_DCHECK_EQ(err, 0);
|
||||
return err;
|
||||
@ -262,7 +261,6 @@ class AndroidAudioDeviceModule : public AudioDeviceModule {
|
||||
int32_t StopPlayout() override {
|
||||
RTC_LOG(INFO) << __FUNCTION__;
|
||||
CHECKinitialized_();
|
||||
// Avoid using audio manger (JNI/Java cost) if playout was inactive.
|
||||
if (!Playing())
|
||||
return 0;
|
||||
RTC_LOG(INFO) << __FUNCTION__;
|
||||
@ -463,12 +461,10 @@ class AndroidAudioDeviceModule : public AudioDeviceModule {
|
||||
return -1;
|
||||
}
|
||||
|
||||
// Returns true if the audio manager has been configured to support stereo
|
||||
// and false otherwised. Default is mono.
|
||||
int32_t StereoPlayoutIsAvailable(bool* available) const override {
|
||||
RTC_LOG(INFO) << __FUNCTION__;
|
||||
CHECKinitialized_();
|
||||
*available = audio_manager_->IsStereoPlayoutSupported();
|
||||
*available = is_stereo_playout_supported_;
|
||||
RTC_LOG(INFO) << "output: " << *available;
|
||||
return 0;
|
||||
}
|
||||
@ -480,11 +476,9 @@ class AndroidAudioDeviceModule : public AudioDeviceModule {
|
||||
RTC_LOG(WARNING) << "recording in stereo is not supported";
|
||||
return -1;
|
||||
}
|
||||
bool available = audio_manager_->IsStereoPlayoutSupported();
|
||||
// Android does not support changes between mono and stero on the fly.
|
||||
// Instead, the native audio layer is configured via the audio manager
|
||||
// to either support mono or stereo. It is allowed to call this method
|
||||
// if that same state is not modified.
|
||||
bool available = is_stereo_playout_supported_;
|
||||
// Android does not support changes between mono and stero on the fly. It is
|
||||
// allowed to call this method if that same state is not modified.
|
||||
if (enable != available) {
|
||||
RTC_LOG(WARNING) << "failed to change stereo recording";
|
||||
return -1;
|
||||
@ -500,7 +494,7 @@ class AndroidAudioDeviceModule : public AudioDeviceModule {
|
||||
int32_t StereoPlayout(bool* enabled) const override {
|
||||
RTC_LOG(INFO) << __FUNCTION__;
|
||||
CHECKinitialized_();
|
||||
*enabled = audio_manager_->IsStereoPlayoutSupported();
|
||||
*enabled = is_stereo_playout_supported_;
|
||||
RTC_LOG(INFO) << "output: " << *enabled;
|
||||
return 0;
|
||||
}
|
||||
@ -508,7 +502,7 @@ class AndroidAudioDeviceModule : public AudioDeviceModule {
|
||||
int32_t StereoRecordingIsAvailable(bool* available) const override {
|
||||
RTC_LOG(INFO) << __FUNCTION__;
|
||||
CHECKinitialized_();
|
||||
*available = audio_manager_->IsStereoRecordSupported();
|
||||
*available = is_stereo_record_supported_;
|
||||
RTC_LOG(INFO) << "output: " << *available;
|
||||
return 0;
|
||||
}
|
||||
@ -520,11 +514,9 @@ class AndroidAudioDeviceModule : public AudioDeviceModule {
|
||||
RTC_LOG(WARNING) << "recording in stereo is not supported";
|
||||
return -1;
|
||||
}
|
||||
bool available = audio_manager_->IsStereoRecordSupported();
|
||||
// Android does not support changes between mono and stero on the fly.
|
||||
// Instead, the native audio layer is configured via the audio manager
|
||||
// to either support mono or stereo. It is allowed to call this method
|
||||
// if that same state is not modified.
|
||||
bool available = is_stereo_record_supported_;
|
||||
// Android does not support changes between mono and stero on the fly. It is
|
||||
// allowed to call this method if that same state is not modified.
|
||||
if (enable != available) {
|
||||
RTC_LOG(WARNING) << "failed to change stereo recording";
|
||||
return -1;
|
||||
@ -540,7 +532,7 @@ class AndroidAudioDeviceModule : public AudioDeviceModule {
|
||||
int32_t StereoRecording(bool* enabled) const override {
|
||||
RTC_LOG(INFO) << __FUNCTION__;
|
||||
CHECKinitialized_();
|
||||
*enabled = audio_manager_->IsStereoRecordSupported();
|
||||
*enabled = is_stereo_record_supported_;
|
||||
RTC_LOG(INFO) << "output: " << *enabled;
|
||||
return 0;
|
||||
}
|
||||
@ -548,7 +540,7 @@ class AndroidAudioDeviceModule : public AudioDeviceModule {
|
||||
int32_t PlayoutDelay(uint16_t* delay_ms) const override {
|
||||
CHECKinitialized_();
|
||||
// Best guess we can do is to use half of the estimated total delay.
|
||||
*delay_ms = audio_manager_->GetDelayEstimateInMilliseconds() / 2;
|
||||
*delay_ms = playout_delay_ms_;
|
||||
RTC_DCHECK_GT(*delay_ms, 0);
|
||||
return 0;
|
||||
}
|
||||
@ -568,7 +560,7 @@ class AndroidAudioDeviceModule : public AudioDeviceModule {
|
||||
bool BuiltInAECIsAvailable() const override {
|
||||
RTC_LOG(INFO) << __FUNCTION__;
|
||||
CHECKinitialized__BOOL();
|
||||
bool isAvailable = audio_manager_->IsAcousticEchoCancelerSupported();
|
||||
bool isAvailable = input_->IsAcousticEchoCancelerSupported();
|
||||
RTC_LOG(INFO) << "output: " << isAvailable;
|
||||
return isAvailable;
|
||||
}
|
||||
@ -592,7 +584,7 @@ class AndroidAudioDeviceModule : public AudioDeviceModule {
|
||||
bool BuiltInNSIsAvailable() const override {
|
||||
RTC_LOG(INFO) << __FUNCTION__;
|
||||
CHECKinitialized__BOOL();
|
||||
bool isAvailable = audio_manager_->IsNoiseSuppressorSupported();
|
||||
bool isAvailable = input_->IsNoiseSuppressorSupported();
|
||||
RTC_LOG(INFO) << "output: " << isAvailable;
|
||||
return isAvailable;
|
||||
}
|
||||
@ -638,7 +630,9 @@ class AndroidAudioDeviceModule : public AudioDeviceModule {
|
||||
rtc::ThreadChecker thread_checker_;
|
||||
|
||||
const AudioDeviceModule::AudioLayer audio_layer_;
|
||||
const std::unique_ptr<AudioManager> audio_manager_;
|
||||
const bool is_stereo_playout_supported_;
|
||||
const bool is_stereo_record_supported_;
|
||||
const uint16_t playout_delay_ms_;
|
||||
const std::unique_ptr<AudioInput> input_;
|
||||
const std::unique_ptr<AudioOutput> output_;
|
||||
std::unique_ptr<AudioDeviceBuffer> audio_device_buffer_;
|
||||
@ -648,14 +642,47 @@ class AndroidAudioDeviceModule : public AudioDeviceModule {
|
||||
|
||||
} // namespace
|
||||
|
||||
ScopedJavaLocalRef<jobject> GetAudioManager(JNIEnv* env,
|
||||
const JavaRef<jobject>& j_context) {
|
||||
return Java_WebRtcAudioManager_getAudioManager(env, j_context);
|
||||
}
|
||||
|
||||
int GetDefaultSampleRate(JNIEnv* env, const JavaRef<jobject>& j_audio_manager) {
|
||||
return Java_WebRtcAudioManager_getSampleRate(env, j_audio_manager);
|
||||
}
|
||||
|
||||
void GetAudioParameters(JNIEnv* env,
|
||||
const JavaRef<jobject>& j_context,
|
||||
const JavaRef<jobject>& j_audio_manager,
|
||||
int sample_rate,
|
||||
bool use_stereo_input,
|
||||
bool use_stereo_output,
|
||||
AudioParameters* input_parameters,
|
||||
AudioParameters* output_parameters) {
|
||||
const size_t output_channels = use_stereo_output ? 2 : 1;
|
||||
const size_t input_channels = use_stereo_input ? 2 : 1;
|
||||
const size_t output_buffer_size = Java_WebRtcAudioManager_getOutputBufferSize(
|
||||
env, j_context, j_audio_manager, sample_rate, output_channels);
|
||||
const size_t input_buffer_size = Java_WebRtcAudioManager_getInputBufferSize(
|
||||
env, j_context, j_audio_manager, sample_rate, input_channels);
|
||||
output_parameters->reset(sample_rate, static_cast<size_t>(output_channels),
|
||||
static_cast<size_t>(output_buffer_size));
|
||||
input_parameters->reset(sample_rate, static_cast<size_t>(input_channels),
|
||||
static_cast<size_t>(input_buffer_size));
|
||||
RTC_CHECK(input_parameters->is_valid());
|
||||
RTC_CHECK(output_parameters->is_valid());
|
||||
}
|
||||
|
||||
rtc::scoped_refptr<AudioDeviceModule> CreateAudioDeviceModuleFromInputAndOutput(
|
||||
AudioDeviceModule::AudioLayer audio_layer,
|
||||
std::unique_ptr<AudioManager> audio_manager,
|
||||
bool is_stereo_playout_supported,
|
||||
bool is_stereo_record_supported,
|
||||
uint16_t playout_delay_ms,
|
||||
std::unique_ptr<AudioInput> audio_input,
|
||||
std::unique_ptr<AudioOutput> audio_output) {
|
||||
return new rtc::RefCountedObject<AndroidAudioDeviceModule>(
|
||||
audio_layer, std::move(audio_manager), std::move(audio_input),
|
||||
std::move(audio_output));
|
||||
audio_layer, is_stereo_playout_supported, is_stereo_record_supported,
|
||||
playout_delay_ms, std::move(audio_input), std::move(audio_output));
|
||||
}
|
||||
|
||||
} // namespace android_adm
|
||||
|
||||
@ -16,14 +16,11 @@
|
||||
#include "api/optional.h"
|
||||
#include "modules/audio_device/audio_device_buffer.h"
|
||||
#include "sdk/android/native_api/jni/scoped_java_ref.h"
|
||||
#include "sdk/android/src/jni/audio_device/audio_manager.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
namespace android_adm {
|
||||
|
||||
class AudioManager;
|
||||
|
||||
class AudioInput {
|
||||
public:
|
||||
virtual ~AudioInput() {}
|
||||
@ -40,6 +37,11 @@ class AudioInput {
|
||||
|
||||
virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) = 0;
|
||||
|
||||
// Returns true if the audio input supports built-in audio effects for AEC and
|
||||
// NS.
|
||||
virtual bool IsAcousticEchoCancelerSupported() const = 0;
|
||||
virtual bool IsNoiseSuppressorSupported() const = 0;
|
||||
|
||||
virtual int32_t EnableBuiltInAEC(bool enable) = 0;
|
||||
virtual int32_t EnableBuiltInAGC(bool enable) = 0;
|
||||
virtual int32_t EnableBuiltInNS(bool enable) = 0;
|
||||
@ -64,9 +66,29 @@ class AudioOutput {
|
||||
virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) = 0;
|
||||
};
|
||||
|
||||
// Extract an android.media.AudioManager from an android.content.Context.
|
||||
ScopedJavaLocalRef<jobject> GetAudioManager(JNIEnv* env,
|
||||
const JavaRef<jobject>& j_context);
|
||||
|
||||
// Get default audio sample rate by querying an android.media.AudioManager.
|
||||
int GetDefaultSampleRate(JNIEnv* env, const JavaRef<jobject>& j_audio_manager);
|
||||
|
||||
// Get audio input and output parameters based on a number of settings.
|
||||
void GetAudioParameters(JNIEnv* env,
|
||||
const JavaRef<jobject>& j_context,
|
||||
const JavaRef<jobject>& j_audio_manager,
|
||||
int sample_rate,
|
||||
bool use_stereo_input,
|
||||
bool use_stereo_output,
|
||||
AudioParameters* input_parameters,
|
||||
AudioParameters* output_parameters);
|
||||
|
||||
// Glue together an audio input and audio output to get an AudioDeviceModule.
|
||||
rtc::scoped_refptr<AudioDeviceModule> CreateAudioDeviceModuleFromInputAndOutput(
|
||||
AudioDeviceModule::AudioLayer audio_layer,
|
||||
std::unique_ptr<AudioManager> audio_manager,
|
||||
bool is_stereo_playout_supported,
|
||||
bool is_stereo_record_supported,
|
||||
uint16_t playout_delay_ms,
|
||||
std::unique_ptr<AudioInput> audio_input,
|
||||
std::unique_ptr<AudioOutput> audio_output);
|
||||
|
||||
|
||||
@ -1,132 +0,0 @@
|
||||
/*
|
||||
* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "sdk/android/src/jni/audio_device/audio_manager.h"
|
||||
|
||||
#include <utility>
|
||||
|
||||
#include "rtc_base/arraysize.h"
|
||||
#include "rtc_base/checks.h"
|
||||
#include "rtc_base/logging.h"
|
||||
#include "rtc_base/platform_thread.h"
|
||||
#include "rtc_base/ptr_util.h"
|
||||
#include "rtc_base/refcount.h"
|
||||
#include "rtc_base/refcountedobject.h"
|
||||
|
||||
#include "sdk/android/generated_audio_device_base_jni/jni/WebRtcAudioManager_jni.h"
|
||||
#include "sdk/android/src/jni/audio_device/audio_common.h"
|
||||
#include "sdk/android/src/jni/audio_device/audio_device_module.h"
|
||||
#include "sdk/android/src/jni/audio_device/audio_manager.h"
|
||||
#include "sdk/android/src/jni/jni_helpers.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
namespace android_adm {
|
||||
|
||||
// AudioManager implementation
|
||||
AudioManager::AudioManager(JNIEnv* env,
|
||||
AudioDeviceModule::AudioLayer audio_layer,
|
||||
const JavaParamRef<jobject>& application_context)
|
||||
: j_audio_manager_(
|
||||
Java_WebRtcAudioManager_Constructor(env, application_context)),
|
||||
audio_layer_(audio_layer),
|
||||
initialized_(false) {
|
||||
RTC_LOG(INFO) << "ctor";
|
||||
const int sample_rate =
|
||||
Java_WebRtcAudioManager_getSampleRate(env, j_audio_manager_);
|
||||
const size_t output_channels =
|
||||
Java_WebRtcAudioManager_getStereoOutput(env, j_audio_manager_) ? 2 : 1;
|
||||
const size_t input_channels =
|
||||
Java_WebRtcAudioManager_getStereoInput(env, j_audio_manager_) ? 2 : 1;
|
||||
const size_t output_buffer_size =
|
||||
Java_WebRtcAudioManager_getOutputBufferSize(env, j_audio_manager_);
|
||||
const size_t input_buffer_size =
|
||||
Java_WebRtcAudioManager_getInputBufferSize(env, j_audio_manager_);
|
||||
playout_parameters_.reset(sample_rate, static_cast<size_t>(output_channels),
|
||||
static_cast<size_t>(output_buffer_size));
|
||||
record_parameters_.reset(sample_rate, static_cast<size_t>(input_channels),
|
||||
static_cast<size_t>(input_buffer_size));
|
||||
RTC_CHECK(playout_parameters_.is_valid());
|
||||
RTC_CHECK(record_parameters_.is_valid());
|
||||
thread_checker_.DetachFromThread();
|
||||
}
|
||||
|
||||
AudioManager::~AudioManager() {
|
||||
RTC_LOG(INFO) << "dtor";
|
||||
RTC_DCHECK(thread_checker_.CalledOnValidThread());
|
||||
Close();
|
||||
}
|
||||
|
||||
bool AudioManager::Init() {
|
||||
RTC_LOG(INFO) << "Init";
|
||||
RTC_DCHECK(thread_checker_.CalledOnValidThread());
|
||||
RTC_DCHECK(!initialized_);
|
||||
RTC_DCHECK_NE(audio_layer_, AudioDeviceModule::kPlatformDefaultAudio);
|
||||
JNIEnv* env = AttachCurrentThreadIfNeeded();
|
||||
if (!Java_WebRtcAudioManager_init(env, j_audio_manager_)) {
|
||||
RTC_LOG(LS_ERROR) << "Init() failed";
|
||||
return false;
|
||||
}
|
||||
initialized_ = true;
|
||||
return true;
|
||||
}
|
||||
|
||||
bool AudioManager::Close() {
|
||||
RTC_LOG(INFO) << "Close";
|
||||
RTC_DCHECK(thread_checker_.CalledOnValidThread());
|
||||
if (!initialized_)
|
||||
return true;
|
||||
JNIEnv* env = AttachCurrentThreadIfNeeded();
|
||||
Java_WebRtcAudioManager_dispose(env, j_audio_manager_);
|
||||
initialized_ = false;
|
||||
return true;
|
||||
}
|
||||
|
||||
bool AudioManager::IsAcousticEchoCancelerSupported() const {
|
||||
RTC_DCHECK(thread_checker_.CalledOnValidThread());
|
||||
JNIEnv* env = AttachCurrentThreadIfNeeded();
|
||||
return Java_WebRtcAudioManager_isAcousticEchoCancelerSupported(
|
||||
env, j_audio_manager_);
|
||||
}
|
||||
|
||||
bool AudioManager::IsNoiseSuppressorSupported() const {
|
||||
RTC_DCHECK(thread_checker_.CalledOnValidThread());
|
||||
JNIEnv* env = AttachCurrentThreadIfNeeded();
|
||||
return Java_WebRtcAudioManager_isNoiseSuppressorSupported(env,
|
||||
j_audio_manager_);
|
||||
}
|
||||
|
||||
bool AudioManager::IsStereoPlayoutSupported() const {
|
||||
return (playout_parameters_.channels() == 2);
|
||||
}
|
||||
|
||||
bool AudioManager::IsStereoRecordSupported() const {
|
||||
return (record_parameters_.channels() == 2);
|
||||
}
|
||||
|
||||
int AudioManager::GetDelayEstimateInMilliseconds() const {
|
||||
return audio_layer_ == AudioDeviceModule::kAndroidJavaAudio
|
||||
? kHighLatencyModeDelayEstimateInMilliseconds
|
||||
: kLowLatencyModeDelayEstimateInMilliseconds;
|
||||
}
|
||||
|
||||
const AudioParameters& AudioManager::GetPlayoutAudioParameters() {
|
||||
RTC_CHECK(playout_parameters_.is_valid());
|
||||
return playout_parameters_;
|
||||
}
|
||||
|
||||
const AudioParameters& AudioManager::GetRecordAudioParameters() {
|
||||
RTC_CHECK(record_parameters_.is_valid());
|
||||
return record_parameters_;
|
||||
}
|
||||
|
||||
} // namespace android_adm
|
||||
|
||||
} // namespace webrtc
|
||||
@ -1,95 +0,0 @@
|
||||
/*
|
||||
* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_MANAGER_H_
|
||||
#define SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_MANAGER_H_
|
||||
|
||||
#include <jni.h>
|
||||
#include <memory>
|
||||
|
||||
#include "modules/audio_device/include/audio_device.h"
|
||||
#include "modules/audio_device/include/audio_device_defines.h"
|
||||
#include "rtc_base/thread_checker.h"
|
||||
#include "sdk/android/native_api/jni/scoped_java_ref.h"
|
||||
#include "sdk/android/src/jni/audio_device/audio_common.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
namespace android_adm {
|
||||
|
||||
// Implements support for functions in the WebRTC audio stack for Android that
|
||||
// relies on the AudioManager in android.media. It also populates an
|
||||
// AudioParameter structure with native audio parameters detected at
|
||||
// construction. This class does not make any audio-related modifications
|
||||
// unless Init() is called.
|
||||
class AudioManager {
|
||||
public:
|
||||
AudioManager(JNIEnv* env,
|
||||
AudioDeviceModule::AudioLayer audio_layer,
|
||||
const JavaParamRef<jobject>& application_context);
|
||||
~AudioManager();
|
||||
|
||||
// Initializes the audio manager and stores the current audio mode.
|
||||
bool Init();
|
||||
// Revert any setting done by Init().
|
||||
bool Close();
|
||||
|
||||
// Native audio parameters stored during construction.
|
||||
const AudioParameters& GetPlayoutAudioParameters();
|
||||
const AudioParameters& GetRecordAudioParameters();
|
||||
|
||||
// Returns true if the device supports built-in audio effects for AEC, AGC
|
||||
// and NS. Some devices can also be blacklisted for use in combination with
|
||||
// platform effects and these devices will return false.
|
||||
// Can currently only be used in combination with a Java based audio backend
|
||||
// for the recoring side (i.e. using the android.media.AudioRecord API).
|
||||
bool IsAcousticEchoCancelerSupported() const;
|
||||
bool IsNoiseSuppressorSupported() const;
|
||||
|
||||
// Returns true if the device supports (and has been configured for) stereo.
|
||||
// Call the Java API WebRtcAudioManager.setStereoOutput/Input() with true as
|
||||
// paramter to enable stereo. Default is mono in both directions and the
|
||||
// setting is set once and for all when the audio manager object is created.
|
||||
// TODO(henrika): stereo is not supported in combination with OpenSL ES.
|
||||
bool IsStereoPlayoutSupported() const;
|
||||
bool IsStereoRecordSupported() const;
|
||||
|
||||
// Returns the estimated total delay of this device. Unit is in milliseconds.
|
||||
// The vaule is set once at construction and never changes after that.
|
||||
// Possible values are webrtc::kLowLatencyModeDelayEstimateInMilliseconds and
|
||||
// webrtc::kHighLatencyModeDelayEstimateInMilliseconds.
|
||||
int GetDelayEstimateInMilliseconds() const;
|
||||
|
||||
private:
|
||||
// This class is single threaded except that construction might happen on a
|
||||
// different thread.
|
||||
rtc::ThreadChecker thread_checker_;
|
||||
|
||||
// Wraps the Java specific parts of the AudioManager.
|
||||
ScopedJavaGlobalRef<jobject> j_audio_manager_;
|
||||
|
||||
// Contains the selected audio layer specified by the AudioLayer enumerator
|
||||
// in the AudioDeviceModule class.
|
||||
const AudioDeviceModule::AudioLayer audio_layer_;
|
||||
|
||||
// Set to true by Init() and false by Close().
|
||||
bool initialized_;
|
||||
|
||||
// Contains native parameters (e.g. sample rate, channel configuration). Set
|
||||
// at construction.
|
||||
AudioParameters playout_parameters_;
|
||||
AudioParameters record_parameters_;
|
||||
};
|
||||
|
||||
} // namespace android_adm
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_MANAGER_H_
|
||||
@ -48,14 +48,20 @@ class ScopedHistogramTimer {
|
||||
|
||||
} // namespace
|
||||
|
||||
// AudioRecordJni implementation.
|
||||
AudioRecordJni::AudioRecordJni(AudioManager* audio_manager)
|
||||
: j_audio_record_(
|
||||
Java_WebRtcAudioRecord_Constructor(AttachCurrentThreadIfNeeded(),
|
||||
jni::jlongFromPointer(this))),
|
||||
audio_manager_(audio_manager),
|
||||
audio_parameters_(audio_manager->GetRecordAudioParameters()),
|
||||
total_delay_in_milliseconds_(0),
|
||||
ScopedJavaLocalRef<jobject> AudioRecordJni::CreateJavaWebRtcAudioRecord(
|
||||
JNIEnv* env,
|
||||
const JavaRef<jobject>& j_context,
|
||||
const JavaRef<jobject>& j_audio_manager) {
|
||||
return Java_WebRtcAudioRecord_Constructor(env, j_context, j_audio_manager);
|
||||
}
|
||||
|
||||
AudioRecordJni::AudioRecordJni(JNIEnv* env,
|
||||
const AudioParameters& audio_parameters,
|
||||
int total_delay_ms,
|
||||
const JavaRef<jobject>& j_audio_record)
|
||||
: j_audio_record_(env, j_audio_record),
|
||||
audio_parameters_(audio_parameters),
|
||||
total_delay_ms_(total_delay_ms),
|
||||
direct_buffer_address_(nullptr),
|
||||
direct_buffer_capacity_in_bytes_(0),
|
||||
frames_per_buffer_(0),
|
||||
@ -64,6 +70,8 @@ AudioRecordJni::AudioRecordJni(AudioManager* audio_manager)
|
||||
audio_device_buffer_(nullptr) {
|
||||
RTC_LOG(INFO) << "ctor";
|
||||
RTC_DCHECK(audio_parameters_.is_valid());
|
||||
Java_WebRtcAudioRecord_setNativeAudioRecord(env, j_audio_record_,
|
||||
jni::jlongFromPointer(this));
|
||||
// Detach from this thread since construction is allowed to happen on a
|
||||
// different thread.
|
||||
thread_checker_.DetachFromThread();
|
||||
@ -171,11 +179,16 @@ void AudioRecordJni::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) {
|
||||
const size_t channels = audio_parameters_.channels();
|
||||
RTC_LOG(INFO) << "SetRecordingChannels(" << channels << ")";
|
||||
audio_device_buffer_->SetRecordingChannels(channels);
|
||||
total_delay_in_milliseconds_ =
|
||||
audio_manager_->GetDelayEstimateInMilliseconds();
|
||||
RTC_DCHECK_GT(total_delay_in_milliseconds_, 0);
|
||||
RTC_LOG(INFO) << "total_delay_in_milliseconds: "
|
||||
<< total_delay_in_milliseconds_;
|
||||
}
|
||||
|
||||
bool AudioRecordJni::IsAcousticEchoCancelerSupported() const {
|
||||
return Java_WebRtcAudioRecord_isAcousticEchoCancelerSupported(
|
||||
env_, j_audio_record_);
|
||||
}
|
||||
|
||||
bool AudioRecordJni::IsNoiseSuppressorSupported() const {
|
||||
return Java_WebRtcAudioRecord_isNoiseSuppressorSupported(env_,
|
||||
j_audio_record_);
|
||||
}
|
||||
|
||||
int32_t AudioRecordJni::EnableBuiltInAEC(bool enable) {
|
||||
@ -228,7 +241,7 @@ void AudioRecordJni::DataIsRecorded(JNIEnv* env,
|
||||
// We provide one (combined) fixed delay estimate for the APM and use the
|
||||
// |playDelayMs| parameter only. Components like the AEC only sees the sum
|
||||
// of |playDelayMs| and |recDelayMs|, hence the distributions does not matter.
|
||||
audio_device_buffer_->SetVQEData(total_delay_in_milliseconds_, 0);
|
||||
audio_device_buffer_->SetVQEData(total_delay_ms_, 0);
|
||||
if (audio_device_buffer_->DeliverRecordedData() == -1) {
|
||||
RTC_LOG(INFO) << "AudioDeviceBuffer::DeliverRecordedData failed";
|
||||
}
|
||||
|
||||
@ -18,7 +18,6 @@
|
||||
#include "modules/audio_device/include/audio_device_defines.h"
|
||||
#include "rtc_base/thread_checker.h"
|
||||
#include "sdk/android/src/jni/audio_device/audio_device_module.h"
|
||||
#include "sdk/android/src/jni/audio_device/audio_manager.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
@ -35,16 +34,24 @@ namespace android_adm {
|
||||
// SDKs, the AEC provided by the APM in WebRTC must be used and enabled
|
||||
// separately instead.
|
||||
//
|
||||
// An instance must be created and destroyed on one and the same thread.
|
||||
// All public methods must also be called on the same thread. A thread checker
|
||||
// will RTC_DCHECK if any method is called on an invalid thread.
|
||||
// An instance can be created on any thread, but must then be used on one and
|
||||
// the same thread. All public methods must also be called on the same thread. A
|
||||
// thread checker will RTC_DCHECK if any method is called on an invalid thread.
|
||||
//
|
||||
// This class uses AttachCurrentThreadIfNeeded to attach to a Java VM if needed
|
||||
// and detach when the object goes out of scope. Additional thread checking
|
||||
// guarantees that no other (possibly non attached) thread is used.
|
||||
// This class uses AttachCurrentThreadIfNeeded to attach to a Java VM if needed.
|
||||
// Additional thread checking guarantees that no other (possibly non attached)
|
||||
// thread is used.
|
||||
class AudioRecordJni : public AudioInput {
|
||||
public:
|
||||
explicit AudioRecordJni(AudioManager* audio_manager);
|
||||
static ScopedJavaLocalRef<jobject> CreateJavaWebRtcAudioRecord(
|
||||
JNIEnv* env,
|
||||
const JavaRef<jobject>& j_context,
|
||||
const JavaRef<jobject>& j_audio_manager);
|
||||
|
||||
AudioRecordJni(JNIEnv* env,
|
||||
const AudioParameters& audio_parameters,
|
||||
int total_delay_ms,
|
||||
const JavaRef<jobject>& j_webrtc_audio_record);
|
||||
~AudioRecordJni() override;
|
||||
|
||||
int32_t Init() override;
|
||||
@ -59,6 +66,9 @@ class AudioRecordJni : public AudioInput {
|
||||
|
||||
void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) override;
|
||||
|
||||
bool IsAcousticEchoCancelerSupported() const override;
|
||||
bool IsNoiseSuppressorSupported() const override;
|
||||
|
||||
int32_t EnableBuiltInAEC(bool enable) override;
|
||||
int32_t EnableBuiltInAGC(bool enable) override;
|
||||
int32_t EnableBuiltInNS(bool enable) override;
|
||||
@ -94,17 +104,12 @@ class AudioRecordJni : public AudioInput {
|
||||
JNIEnv* env_ = nullptr;
|
||||
ScopedJavaGlobalRef<jobject> j_audio_record_;
|
||||
|
||||
// Raw pointer to the audio manger.
|
||||
const AudioManager* audio_manager_;
|
||||
|
||||
// Contains audio parameters provided to this class at construction by the
|
||||
// AudioManager.
|
||||
const AudioParameters audio_parameters_;
|
||||
|
||||
// Delay estimate of the total round-trip delay (input + output).
|
||||
// Fixed value set once in AttachAudioBuffer() and it can take one out of two
|
||||
// possible values. See audio_common.h for details.
|
||||
int total_delay_in_milliseconds_;
|
||||
const int total_delay_ms_;
|
||||
|
||||
// Cached copy of address to direct audio buffer owned by |j_audio_record_|.
|
||||
void* direct_buffer_address_;
|
||||
|
||||
@ -18,19 +18,24 @@
|
||||
#include "rtc_base/logging.h"
|
||||
#include "rtc_base/platform_thread.h"
|
||||
#include "sdk/android/generated_java_audio_device_jni/jni/WebRtcAudioTrack_jni.h"
|
||||
#include "sdk/android/src/jni/audio_device/audio_manager.h"
|
||||
#include "sdk/android/src/jni/jni_helpers.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
namespace android_adm {
|
||||
|
||||
// TODO(henrika): possible extend usage of AudioManager and add it as member.
|
||||
AudioTrackJni::AudioTrackJni(AudioManager* audio_manager)
|
||||
: j_audio_track_(
|
||||
Java_WebRtcAudioTrack_Constructor(AttachCurrentThreadIfNeeded(),
|
||||
jni::jlongFromPointer(this))),
|
||||
audio_parameters_(audio_manager->GetPlayoutAudioParameters()),
|
||||
ScopedJavaLocalRef<jobject> AudioTrackJni::CreateJavaWebRtcAudioTrack(
|
||||
JNIEnv* env,
|
||||
const JavaRef<jobject>& j_context,
|
||||
const JavaRef<jobject>& j_audio_manager) {
|
||||
return Java_WebRtcAudioTrack_Constructor(env, j_context, j_audio_manager);
|
||||
}
|
||||
|
||||
AudioTrackJni::AudioTrackJni(JNIEnv* env,
|
||||
const AudioParameters& audio_parameters,
|
||||
const JavaRef<jobject>& j_webrtc_audio_track)
|
||||
: j_audio_track_(env, j_webrtc_audio_track),
|
||||
audio_parameters_(audio_parameters),
|
||||
direct_buffer_address_(nullptr),
|
||||
direct_buffer_capacity_in_bytes_(0),
|
||||
frames_per_buffer_(0),
|
||||
@ -39,6 +44,8 @@ AudioTrackJni::AudioTrackJni(AudioManager* audio_manager)
|
||||
audio_device_buffer_(nullptr) {
|
||||
RTC_LOG(INFO) << "ctor";
|
||||
RTC_DCHECK(audio_parameters_.is_valid());
|
||||
Java_WebRtcAudioTrack_setNativeAudioTrack(env, j_audio_track_,
|
||||
jni::jlongFromPointer(this));
|
||||
// Detach from this thread since construction is allowed to happen on a
|
||||
// different thread.
|
||||
thread_checker_.DetachFromThread();
|
||||
|
||||
@ -20,7 +20,6 @@
|
||||
#include "rtc_base/thread_checker.h"
|
||||
#include "sdk/android/src/jni/audio_device/audio_common.h"
|
||||
#include "sdk/android/src/jni/audio_device/audio_device_module.h"
|
||||
#include "sdk/android/src/jni/audio_device/audio_manager.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
@ -32,16 +31,23 @@ namespace android_adm {
|
||||
// C++-land, but decoded audio buffers are requested on a high-priority
|
||||
// thread managed by the Java class.
|
||||
//
|
||||
// An instance must be created and destroyed on one and the same thread.
|
||||
// All public methods must also be called on the same thread. A thread checker
|
||||
// will RTC_DCHECK if any method is called on an invalid thread.
|
||||
// An instance can be created on any thread, but must then be used on one and
|
||||
// the same thread. All public methods must also be called on the same thread. A
|
||||
// thread checker will RTC_DCHECK if any method is called on an invalid thread
|
||||
//
|
||||
// This class uses AttachCurrentThreadIfNeeded to attach to a Java VM if needed
|
||||
// and detach when the object goes out of scope. Additional thread checking
|
||||
// guarantees that no other (possibly non attached) thread is used.
|
||||
// This class uses AttachCurrentThreadIfNeeded to attach to a Java VM if needed.
|
||||
// Additional thread checking guarantees that no other (possibly non attached)
|
||||
// thread is used.
|
||||
class AudioTrackJni : public AudioOutput {
|
||||
public:
|
||||
explicit AudioTrackJni(AudioManager* audio_manager);
|
||||
static ScopedJavaLocalRef<jobject> CreateJavaWebRtcAudioTrack(
|
||||
JNIEnv* env,
|
||||
const JavaRef<jobject>& j_context,
|
||||
const JavaRef<jobject>& j_audio_manager);
|
||||
|
||||
AudioTrackJni(JNIEnv* env,
|
||||
const AudioParameters& audio_parameters,
|
||||
const JavaRef<jobject>& j_webrtc_audio_track);
|
||||
~AudioTrackJni() override;
|
||||
|
||||
int32_t Init() override;
|
||||
|
||||
@ -1,59 +0,0 @@
|
||||
/*
|
||||
* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "sdk/android/src/jni/audio_device/build_info.h"
|
||||
|
||||
#include "sdk/android/generated_audio_device_base_jni/jni/BuildInfo_jni.h"
|
||||
#include "sdk/android/src/jni/jni_helpers.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
namespace android_adm {
|
||||
|
||||
BuildInfo::BuildInfo() : env_(AttachCurrentThreadIfNeeded()) {}
|
||||
|
||||
std::string BuildInfo::GetDeviceModel() {
|
||||
thread_checker_.CalledOnValidThread();
|
||||
return JavaToStdString(env_, Java_BuildInfo_getDeviceModel(env_));
|
||||
}
|
||||
|
||||
std::string BuildInfo::GetBrand() {
|
||||
thread_checker_.CalledOnValidThread();
|
||||
return JavaToStdString(env_, Java_BuildInfo_getBrand(env_));
|
||||
}
|
||||
|
||||
std::string BuildInfo::GetDeviceManufacturer() {
|
||||
thread_checker_.CalledOnValidThread();
|
||||
return JavaToStdString(env_, Java_BuildInfo_getDeviceManufacturer(env_));
|
||||
}
|
||||
|
||||
std::string BuildInfo::GetAndroidBuildId() {
|
||||
thread_checker_.CalledOnValidThread();
|
||||
return JavaToStdString(env_, Java_BuildInfo_getAndroidBuildId(env_));
|
||||
}
|
||||
|
||||
std::string BuildInfo::GetBuildType() {
|
||||
thread_checker_.CalledOnValidThread();
|
||||
return JavaToStdString(env_, Java_BuildInfo_getBuildType(env_));
|
||||
}
|
||||
|
||||
std::string BuildInfo::GetBuildRelease() {
|
||||
thread_checker_.CalledOnValidThread();
|
||||
return JavaToStdString(env_, Java_BuildInfo_getBuildRelease(env_));
|
||||
}
|
||||
|
||||
SdkCode BuildInfo::GetSdkVersion() {
|
||||
thread_checker_.CalledOnValidThread();
|
||||
return static_cast<SdkCode>(Java_BuildInfo_getSdkVersion(env_));
|
||||
}
|
||||
|
||||
} // namespace android_adm
|
||||
|
||||
} // namespace webrtc
|
||||
@ -1,76 +0,0 @@
|
||||
/*
|
||||
* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_BUILD_INFO_H_
|
||||
#define SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_BUILD_INFO_H_
|
||||
|
||||
#include <jni.h>
|
||||
#include <memory>
|
||||
#include <string>
|
||||
|
||||
#include "rtc_base/thread_checker.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
namespace android_adm {
|
||||
|
||||
// This enumeration maps to the values returned by BuildInfo::GetSdkVersion(),
|
||||
// indicating the Android release associated with a given SDK version.
|
||||
// See https://developer.android.com/guide/topics/manifest/uses-sdk-element.html
|
||||
// for details.
|
||||
enum SdkCode {
|
||||
SDK_CODE_JELLY_BEAN = 16, // Android 4.1
|
||||
SDK_CODE_JELLY_BEAN_MR1 = 17, // Android 4.2
|
||||
SDK_CODE_JELLY_BEAN_MR2 = 18, // Android 4.3
|
||||
SDK_CODE_KITKAT = 19, // Android 4.4
|
||||
SDK_CODE_WATCH = 20, // Android 4.4W
|
||||
SDK_CODE_LOLLIPOP = 21, // Android 5.0
|
||||
SDK_CODE_LOLLIPOP_MR1 = 22, // Android 5.1
|
||||
SDK_CODE_MARSHMALLOW = 23, // Android 6.0
|
||||
SDK_CODE_N = 24,
|
||||
};
|
||||
|
||||
// Utility class used to query the Java class (org/webrtc/audio/BuildInfo)
|
||||
// for device and Android build information.
|
||||
// The calling thread is attached to the JVM at construction if needed and a
|
||||
// valid Java environment object is also created.
|
||||
// All Get methods must be called on the creating thread. If not, the code will
|
||||
// hit RTC_DCHECKs when calling JNIEnvironment::JavaToStdString().
|
||||
class BuildInfo {
|
||||
public:
|
||||
BuildInfo();
|
||||
~BuildInfo() {}
|
||||
|
||||
// End-user-visible name for the end product (e.g. "Nexus 6").
|
||||
std::string GetDeviceModel();
|
||||
// Consumer-visible brand (e.g. "google").
|
||||
std::string GetBrand();
|
||||
// Manufacturer of the product/hardware (e.g. "motorola").
|
||||
std::string GetDeviceManufacturer();
|
||||
// Android build ID (e.g. LMY47D).
|
||||
std::string GetAndroidBuildId();
|
||||
// The type of build (e.g. "user" or "eng").
|
||||
std::string GetBuildType();
|
||||
// The user-visible version string (e.g. "5.1").
|
||||
std::string GetBuildRelease();
|
||||
// The user-visible SDK version of the framework (e.g. 21). See SdkCode enum
|
||||
// for translation.
|
||||
SdkCode GetSdkVersion();
|
||||
|
||||
private:
|
||||
JNIEnv* const env_;
|
||||
rtc::ThreadChecker thread_checker_;
|
||||
};
|
||||
|
||||
} // namespace android_adm
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_BUILD_INFO_H_
|
||||
50
sdk/android/src/jni/audio_device/java_audio_device_module.cc
Normal file
50
sdk/android/src/jni/audio_device/java_audio_device_module.cc
Normal file
@ -0,0 +1,50 @@
|
||||
/*
|
||||
* Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "sdk/android/generated_java_audio_device_jni/jni/JavaAudioDeviceModule_jni.h"
|
||||
#include "sdk/android/src/jni/audio_device/audio_record_jni.h"
|
||||
#include "sdk/android/src/jni/audio_device/audio_track_jni.h"
|
||||
#include "sdk/android/src/jni/jni_helpers.h"
|
||||
|
||||
namespace webrtc {
|
||||
namespace jni {
|
||||
|
||||
static jlong JNI_JavaAudioDeviceModule_CreateAudioDeviceModule(
|
||||
JNIEnv* env,
|
||||
const JavaParamRef<jclass>& j_caller,
|
||||
const JavaParamRef<jobject>& j_context,
|
||||
const JavaParamRef<jobject>& j_audio_manager,
|
||||
const JavaParamRef<jobject>& j_webrtc_audio_record,
|
||||
const JavaParamRef<jobject>& j_webrtc_audio_track,
|
||||
int sample_rate,
|
||||
jboolean j_use_stereo_input,
|
||||
jboolean j_use_stereo_output) {
|
||||
AudioParameters input_parameters;
|
||||
AudioParameters output_parameters;
|
||||
android_adm::GetAudioParameters(env, j_context, j_audio_manager, sample_rate,
|
||||
j_use_stereo_input, j_use_stereo_output,
|
||||
&input_parameters, &output_parameters);
|
||||
auto audio_input = rtc::MakeUnique<android_adm::AudioRecordJni>(
|
||||
env, input_parameters,
|
||||
android_adm::kHighLatencyModeDelayEstimateInMilliseconds,
|
||||
j_webrtc_audio_record);
|
||||
auto audio_output = rtc::MakeUnique<android_adm::AudioTrackJni>(
|
||||
env, output_parameters, j_webrtc_audio_track);
|
||||
return jlongFromPointer(
|
||||
CreateAudioDeviceModuleFromInputAndOutput(
|
||||
AudioDeviceModule::kAndroidJavaAudio, j_use_stereo_input,
|
||||
j_use_stereo_output,
|
||||
android_adm::kHighLatencyModeDelayEstimateInMilliseconds,
|
||||
std::move(audio_input), std::move(audio_output))
|
||||
.release());
|
||||
}
|
||||
|
||||
} // namespace jni
|
||||
} // namespace webrtc
|
||||
@ -20,7 +20,6 @@
|
||||
#include "rtc_base/platform_thread.h"
|
||||
#include "rtc_base/timeutils.h"
|
||||
#include "sdk/android/src/jni/audio_device/audio_common.h"
|
||||
#include "sdk/android/src/jni/audio_device/audio_manager.h"
|
||||
|
||||
#define TAG "OpenSLESPlayer"
|
||||
#define ALOGV(...) __android_log_print(ANDROID_LOG_VERBOSE, TAG, __VA_ARGS__)
|
||||
@ -43,9 +42,9 @@ namespace webrtc {
|
||||
namespace android_adm {
|
||||
|
||||
OpenSLESPlayer::OpenSLESPlayer(
|
||||
AudioManager* audio_manager,
|
||||
const AudioParameters& audio_parameters,
|
||||
std::unique_ptr<OpenSLEngineManager> engine_manager)
|
||||
: audio_parameters_(audio_manager->GetPlayoutAudioParameters()),
|
||||
: audio_parameters_(audio_parameters),
|
||||
audio_device_buffer_(nullptr),
|
||||
initialized_(false),
|
||||
playing_(false),
|
||||
|
||||
@ -23,7 +23,6 @@
|
||||
#include "rtc_base/thread_checker.h"
|
||||
#include "sdk/android/src/jni/audio_device/audio_common.h"
|
||||
#include "sdk/android/src/jni/audio_device/audio_device_module.h"
|
||||
#include "sdk/android/src/jni/audio_device/audio_manager.h"
|
||||
#include "sdk/android/src/jni/audio_device/opensles_common.h"
|
||||
|
||||
namespace webrtc {
|
||||
@ -35,11 +34,11 @@ namespace android_adm {
|
||||
// Implements 16-bit mono PCM audio output support for Android using the
|
||||
// C based OpenSL ES API. No calls from C/C++ to Java using JNI is done.
|
||||
//
|
||||
// An instance must be created and destroyed on one and the same thread.
|
||||
// All public methods must also be called on the same thread. A thread checker
|
||||
// will RTC_DCHECK if any method is called on an invalid thread. Decoded audio
|
||||
// buffers are requested on a dedicated internal thread managed by the OpenSL
|
||||
// ES layer.
|
||||
// An instance can be created on any thread, but must then be used on one and
|
||||
// the same thread. All public methods must also be called on the same thread. A
|
||||
// thread checker will RTC_DCHECK if any method is called on an invalid thread.
|
||||
// Decoded audio buffers are requested on a dedicated internal thread managed by
|
||||
// the OpenSL ES layer.
|
||||
//
|
||||
// The existing design forces the user to call InitPlayout() after Stoplayout()
|
||||
// to be able to call StartPlayout() again. This is inline with how the Java-
|
||||
@ -60,7 +59,7 @@ class OpenSLESPlayer : public AudioOutput {
|
||||
// TODO(henrika): perhaps set this value dynamically based on OS version.
|
||||
static const int kNumOfOpenSLESBuffers = 2;
|
||||
|
||||
OpenSLESPlayer(AudioManager* audio_manager,
|
||||
OpenSLESPlayer(const AudioParameters& audio_parameters,
|
||||
std::unique_ptr<OpenSLEngineManager> engine_manager);
|
||||
~OpenSLESPlayer() override;
|
||||
|
||||
@ -126,8 +125,6 @@ class OpenSLESPlayer : public AudioOutput {
|
||||
// Detached during construction of this object.
|
||||
rtc::ThreadChecker thread_checker_opensles_;
|
||||
|
||||
// Contains audio parameters provided to this class at construction by the
|
||||
// AudioManager.
|
||||
const AudioParameters audio_parameters_;
|
||||
|
||||
// Raw pointer handle provided to us in AttachAudioBuffer(). Owned by the
|
||||
|
||||
@ -20,7 +20,6 @@
|
||||
#include "rtc_base/platform_thread.h"
|
||||
#include "rtc_base/timeutils.h"
|
||||
#include "sdk/android/src/jni/audio_device/audio_common.h"
|
||||
#include "sdk/android/src/jni/audio_device/audio_manager.h"
|
||||
|
||||
#define TAG "OpenSLESRecorder"
|
||||
#define ALOGV(...) __android_log_print(ANDROID_LOG_VERBOSE, TAG, __VA_ARGS__)
|
||||
@ -43,9 +42,9 @@ namespace webrtc {
|
||||
|
||||
namespace android_adm {
|
||||
|
||||
OpenSLESRecorder::OpenSLESRecorder(AudioManager* audio_manager,
|
||||
OpenSLESRecorder::OpenSLESRecorder(const AudioParameters& audio_parameters,
|
||||
OpenSLEngineManager* engine_manager)
|
||||
: audio_parameters_(audio_manager->GetRecordAudioParameters()),
|
||||
: audio_parameters_(audio_parameters),
|
||||
audio_device_buffer_(nullptr),
|
||||
initialized_(false),
|
||||
recording_(false),
|
||||
@ -195,6 +194,14 @@ void OpenSLESRecorder::AttachAudioBuffer(AudioDeviceBuffer* audio_buffer) {
|
||||
AllocateDataBuffers();
|
||||
}
|
||||
|
||||
bool OpenSLESRecorder::IsAcousticEchoCancelerSupported() const {
|
||||
return false;
|
||||
}
|
||||
|
||||
bool OpenSLESRecorder::IsNoiseSuppressorSupported() const {
|
||||
return false;
|
||||
}
|
||||
|
||||
int OpenSLESRecorder::EnableBuiltInAEC(bool enable) {
|
||||
ALOGD("EnableBuiltInAEC(%d)", enable);
|
||||
RTC_DCHECK(thread_checker_.CalledOnValidThread());
|
||||
|
||||
@ -23,7 +23,6 @@
|
||||
#include "rtc_base/thread_checker.h"
|
||||
#include "sdk/android/src/jni/audio_device/audio_common.h"
|
||||
#include "sdk/android/src/jni/audio_device/audio_device_module.h"
|
||||
#include "sdk/android/src/jni/audio_device/audio_manager.h"
|
||||
#include "sdk/android/src/jni/audio_device/opensles_common.h"
|
||||
|
||||
namespace webrtc {
|
||||
@ -35,11 +34,11 @@ namespace android_adm {
|
||||
// Implements 16-bit mono PCM audio input support for Android using the
|
||||
// C based OpenSL ES API. No calls from C/C++ to Java using JNI is done.
|
||||
//
|
||||
// An instance must be created and destroyed on one and the same thread.
|
||||
// All public methods must also be called on the same thread. A thread checker
|
||||
// will RTC_DCHECK if any method is called on an invalid thread. Recorded audio
|
||||
// buffers are provided on a dedicated internal thread managed by the OpenSL
|
||||
// ES layer.
|
||||
// An instance can be created on any thread, but must then be used on one and
|
||||
// the same thread. All public methods must also be called on the same thread. A
|
||||
// thread checker will RTC_DCHECK if any method is called on an invalid thread.
|
||||
// Recorded audio buffers are provided on a dedicated internal thread managed by
|
||||
// the OpenSL ES layer.
|
||||
//
|
||||
// The existing design forces the user to call InitRecording() after
|
||||
// StopRecording() to be able to call StartRecording() again. This is inline
|
||||
@ -63,7 +62,7 @@ class OpenSLESRecorder : public AudioInput {
|
||||
// TODO(henrika): perhaps set this value dynamically based on OS version.
|
||||
static const int kNumOfOpenSLESBuffers = 2;
|
||||
|
||||
OpenSLESRecorder(AudioManager* audio_manager,
|
||||
OpenSLESRecorder(const AudioParameters& audio_parameters,
|
||||
OpenSLEngineManager* engine_manager);
|
||||
~OpenSLESRecorder() override;
|
||||
|
||||
@ -80,6 +79,8 @@ class OpenSLESRecorder : public AudioInput {
|
||||
void AttachAudioBuffer(AudioDeviceBuffer* audio_buffer) override;
|
||||
|
||||
// TODO(henrika): add support using OpenSL ES APIs when available.
|
||||
bool IsAcousticEchoCancelerSupported() const override;
|
||||
bool IsNoiseSuppressorSupported() const override;
|
||||
int EnableBuiltInAEC(bool enable) override;
|
||||
int EnableBuiltInAGC(bool enable) override;
|
||||
int EnableBuiltInNS(bool enable) override;
|
||||
@ -134,8 +135,6 @@ class OpenSLESRecorder : public AudioInput {
|
||||
// Detached during construction of this object.
|
||||
rtc::ThreadChecker thread_checker_opensles_;
|
||||
|
||||
// Contains audio parameters provided to this class at construction by the
|
||||
// AudioManager.
|
||||
const AudioParameters audio_parameters_;
|
||||
|
||||
// Raw pointer handle provided to us in AttachAudioBuffer(). Owned by the
|
||||
|
||||
@ -26,9 +26,6 @@
|
||||
#include "rtc_base/thread.h"
|
||||
#include "sdk/android/generated_peerconnection_jni/jni/PeerConnectionFactory_jni.h"
|
||||
#include "sdk/android/native_api/jni/java_types.h"
|
||||
#include "sdk/android/src/jni/audio_device/audio_manager.h"
|
||||
#include "sdk/android/src/jni/audio_device/audio_record_jni.h"
|
||||
#include "sdk/android/src/jni/audio_device/audio_track_jni.h"
|
||||
#include "sdk/android/src/jni/jni_helpers.h"
|
||||
#include "sdk/android/src/jni/pc/androidnetworkmonitor.h"
|
||||
#include "sdk/android/src/jni/pc/audio.h"
|
||||
@ -84,9 +81,6 @@ static char* field_trials_init_string = nullptr;
|
||||
static bool factory_static_initialized = false;
|
||||
static bool video_hw_acceleration_enabled = true;
|
||||
|
||||
static const char* kExternalAndroidAudioDeviceFieldTrialName =
|
||||
"WebRTC-ExternalAndroidAudioDevice";
|
||||
|
||||
void PeerConnectionFactoryNetworkThreadReady() {
|
||||
RTC_LOG(LS_INFO) << "Network thread JavaCallback";
|
||||
JNIEnv* env = AttachCurrentThreadIfNeeded();
|
||||
@ -199,6 +193,7 @@ jlong CreatePeerConnectionFactoryForJava(
|
||||
JNIEnv* jni,
|
||||
const JavaParamRef<jobject>& jcontext,
|
||||
const JavaParamRef<jobject>& joptions,
|
||||
rtc::scoped_refptr<AudioDeviceModule> audio_device_module,
|
||||
const JavaParamRef<jobject>& jencoder_factory,
|
||||
const JavaParamRef<jobject>& jdecoder_factory,
|
||||
rtc::scoped_refptr<AudioProcessing> audio_processor,
|
||||
@ -240,23 +235,6 @@ jlong CreatePeerConnectionFactoryForJava(
|
||||
rtc::NetworkMonitorFactory::SetFactory(network_monitor_factory);
|
||||
}
|
||||
|
||||
rtc::scoped_refptr<AudioDeviceModule> adm = nullptr;
|
||||
if (field_trial::IsEnabled(kExternalAndroidAudioDeviceFieldTrialName)) {
|
||||
// Only Java AudioDeviceModule is supported as an external ADM at the
|
||||
// moment.
|
||||
const AudioDeviceModule::AudioLayer audio_layer =
|
||||
AudioDeviceModule::kAndroidJavaAudio;
|
||||
auto audio_manager =
|
||||
rtc::MakeUnique<android_adm::AudioManager>(jni, audio_layer, jcontext);
|
||||
auto audio_input =
|
||||
rtc::MakeUnique<android_adm::AudioRecordJni>(audio_manager.get());
|
||||
auto audio_output =
|
||||
rtc::MakeUnique<android_adm::AudioTrackJni>(audio_manager.get());
|
||||
adm = CreateAudioDeviceModuleFromInputAndOutput(
|
||||
audio_layer, std::move(audio_manager), std::move(audio_input),
|
||||
std::move(audio_output));
|
||||
}
|
||||
|
||||
rtc::scoped_refptr<AudioMixer> audio_mixer = nullptr;
|
||||
std::unique_ptr<CallFactoryInterface> call_factory(CreateCallFactory());
|
||||
std::unique_ptr<RtcEventLogFactoryInterface> rtc_event_log_factory(
|
||||
@ -274,7 +252,7 @@ jlong CreatePeerConnectionFactoryForJava(
|
||||
legacy_video_decoder_factory = CreateLegacyVideoDecoderFactory();
|
||||
}
|
||||
media_engine.reset(CreateMediaEngine(
|
||||
adm, audio_encoder_factory, audio_decoder_factory,
|
||||
audio_device_module, audio_encoder_factory, audio_decoder_factory,
|
||||
legacy_video_encoder_factory, legacy_video_decoder_factory, audio_mixer,
|
||||
audio_processor));
|
||||
#endif
|
||||
@ -305,7 +283,7 @@ jlong CreatePeerConnectionFactoryForJava(
|
||||
}
|
||||
|
||||
media_engine.reset(CreateMediaEngine(
|
||||
adm, audio_encoder_factory, audio_decoder_factory,
|
||||
audio_device_module, audio_encoder_factory, audio_decoder_factory,
|
||||
std::move(video_encoder_factory), std::move(video_decoder_factory),
|
||||
audio_mixer, audio_processor));
|
||||
}
|
||||
@ -335,6 +313,7 @@ static jlong JNI_PeerConnectionFactory_CreatePeerConnectionFactory(
|
||||
const JavaParamRef<jclass>&,
|
||||
const JavaParamRef<jobject>& jcontext,
|
||||
const JavaParamRef<jobject>& joptions,
|
||||
jlong native_audio_device_module,
|
||||
const JavaParamRef<jobject>& jencoder_factory,
|
||||
const JavaParamRef<jobject>& jdecoder_factory,
|
||||
jlong native_audio_processor,
|
||||
@ -345,7 +324,9 @@ static jlong JNI_PeerConnectionFactory_CreatePeerConnectionFactory(
|
||||
reinterpret_cast<FecControllerFactoryInterface*>(
|
||||
native_fec_controller_factory));
|
||||
return CreatePeerConnectionFactoryForJava(
|
||||
jni, jcontext, joptions, jencoder_factory, jdecoder_factory,
|
||||
jni, jcontext, joptions,
|
||||
reinterpret_cast<AudioDeviceModule*>(native_audio_device_module),
|
||||
jencoder_factory, jdecoder_factory,
|
||||
audio_processor ? audio_processor : CreateAudioProcessing(),
|
||||
std::move(fec_controller_factory));
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user