Adding playout volume control to WebRtcAudioTrack.java.
Also adds a framework for an AudioManager to be used by both sides (playout and recording). This initial implementation only does very simple tasks like setting up the correct audio mode (needed for correct volume behavior). Note that this CL is mainly about modifying the volume. The added AudioManager is only a place holder for future work. I could have done the same parts in the WebRtcAudioTrack class but feel that it is better to move these parts to an AudioManager already at this stage. The AudioManager supports Init() where actual audio changes are done (set audio mode etc.) but it can also be used a simple "construct-and-store-audio-parameters" unit, which is the case here. Hence, the AM now serves as the center for getting audio parameters and then inject these into playout and recording sides. Previously, both sides acquired their own parameters and that is more error prone. BUG=NONE TEST=AudioDeviceTest R=perkj@webrtc.org, phoglund@webrtc.org Review URL: https://webrtc-codereview.appspot.com/45829004 Cr-Commit-Position: refs/heads/master@{#8875}
This commit is contained in:
133
webrtc/modules/audio_device/android/audio_manager.h
Normal file
133
webrtc/modules/audio_device/android/audio_manager.h
Normal file
@ -0,0 +1,133 @@
|
||||
/*
|
||||
* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MODULES_AUDIO_DEVICE_ANDROID_AUDIO_MANAGER_H_
|
||||
#define WEBRTC_MODULES_AUDIO_DEVICE_ANDROID_AUDIO_MANAGER_H_
|
||||
|
||||
#include <jni.h>
|
||||
|
||||
#include "webrtc/base/thread_checker.h"
|
||||
#include "webrtc/modules/audio_device/android/audio_common.h"
|
||||
#include "webrtc/modules/audio_device/include/audio_device_defines.h"
|
||||
#include "webrtc/modules/audio_device/audio_device_generic.h"
|
||||
#include "webrtc/modules/utility/interface/helpers_android.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
class AudioParameters {
|
||||
public:
|
||||
enum { kBitsPerSample = 16 };
|
||||
AudioParameters()
|
||||
: sample_rate_(0),
|
||||
channels_(0),
|
||||
frames_per_buffer_(0),
|
||||
bits_per_sample_(kBitsPerSample) {}
|
||||
AudioParameters(int sample_rate, int channels)
|
||||
: sample_rate_(sample_rate),
|
||||
channels_(channels),
|
||||
frames_per_buffer_(sample_rate / 100),
|
||||
bits_per_sample_(kBitsPerSample) {}
|
||||
void reset(int sample_rate, int channels) {
|
||||
sample_rate_ = sample_rate;
|
||||
channels_ = channels;
|
||||
// WebRTC uses a fixed buffer size equal to 10ms.
|
||||
frames_per_buffer_ = (sample_rate / 100);
|
||||
}
|
||||
int sample_rate() const { return sample_rate_; }
|
||||
int channels() const { return channels_; }
|
||||
int frames_per_buffer() const { return frames_per_buffer_; }
|
||||
bool is_valid() const {
|
||||
return ((sample_rate_ > 0) && (channels_ > 0) && (frames_per_buffer_ > 0));
|
||||
}
|
||||
int GetBytesPerFrame() const { return channels_ * bits_per_sample_ / 8; }
|
||||
int GetBytesPerBuffer() const {
|
||||
return frames_per_buffer_ * GetBytesPerFrame();
|
||||
}
|
||||
|
||||
private:
|
||||
int sample_rate_;
|
||||
int channels_;
|
||||
int frames_per_buffer_;
|
||||
const int bits_per_sample_;
|
||||
};
|
||||
|
||||
// Implements support for functions in the WebRTC audio stack for Android that
|
||||
// relies on the AudioManager in android.media. It also populates an
|
||||
// AudioParameter structure with native audio parameters detected at
|
||||
// construction. This class does not make any audio-related modifications
|
||||
// unless Init() is called. Caching audio parameters makes no changes but only
|
||||
// reads data from the Java side.
|
||||
// TODO(henrika): expand this class when adding support for low-latency
|
||||
// OpenSL ES. Currently, it only contains very basic functionality.
|
||||
class AudioManager {
|
||||
public:
|
||||
// Use the invocation API to allow the native application to use the JNI
|
||||
// interface pointer to access VM features. |jvm| denotes the Java VM and
|
||||
// |context| corresponds to android.content.Context in Java.
|
||||
// This method also sets a global jclass object, |g_audio_manager_class| for
|
||||
// the "org/webrtc/voiceengine/WebRtcAudioManager"-class.
|
||||
static void SetAndroidAudioDeviceObjects(void* jvm, void* context);
|
||||
// Always call this method after the object has been destructed. It deletes
|
||||
// existing global references and enables garbage collection.
|
||||
static void ClearAndroidAudioDeviceObjects();
|
||||
|
||||
AudioManager();
|
||||
~AudioManager();
|
||||
|
||||
// Initializes the audio manager (changes mode to MODE_IN_COMMUNICATION,
|
||||
// request audio focus etc.).
|
||||
// It is possible to use this class without calling Init() if the calling
|
||||
// application prefers to set up the audio environment on its own instead.
|
||||
bool Init();
|
||||
// Revert any setting done by Init().
|
||||
bool Close();
|
||||
|
||||
// Native audio parameters stored during construction.
|
||||
AudioParameters GetPlayoutAudioParameters() const;
|
||||
AudioParameters GetRecordAudioParameters() const;
|
||||
|
||||
bool initialized() const { return initialized_; }
|
||||
|
||||
private:
|
||||
// Called from Java side so we can cache the native audio parameters.
|
||||
// This method will be called by the WebRtcAudioManager constructor, i.e.
|
||||
// on the same thread that this object is created on.
|
||||
static void JNICALL CacheAudioParameters(JNIEnv* env, jobject obj,
|
||||
jint sample_rate, jint channels, jlong nativeAudioManager);
|
||||
void OnCacheAudioParameters(JNIEnv* env, jint sample_rate, jint channels);
|
||||
|
||||
// Returns true if SetAndroidAudioDeviceObjects() has been called
|
||||
// successfully.
|
||||
bool HasDeviceObjects();
|
||||
|
||||
// Called from the constructor. Defines the |j_audio_manager_| member.
|
||||
void CreateJavaInstance();
|
||||
|
||||
// Stores thread ID in the constructor.
|
||||
// We can then use ThreadChecker::CalledOnValidThread() to ensure that
|
||||
// other methods are called from the same thread.
|
||||
rtc::ThreadChecker thread_checker_;
|
||||
|
||||
// The Java WebRtcAudioManager instance.
|
||||
jobject j_audio_manager_;
|
||||
|
||||
// Set to true by Init() and false by Close().
|
||||
bool initialized_;
|
||||
|
||||
// Contains native parameters (e.g. sample rate, channel configuration).
|
||||
// Set at construction in OnCacheAudioParameters() which is called from
|
||||
// Java on the same thread as this object is created on.
|
||||
AudioParameters playout_parameters_;
|
||||
AudioParameters record_parameters_;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // WEBRTC_MODULES_AUDIO_DEVICE_ANDROID_AUDIO_MANAGER_H_
|
||||
Reference in New Issue
Block a user