
Also adds a framework for an AudioManager to be used by both sides (playout and recording). This initial implementation only does very simple tasks like setting up the correct audio mode (needed for correct volume behavior). Note that this CL is mainly about modifying the volume. The added AudioManager is only a place holder for future work. I could have done the same parts in the WebRtcAudioTrack class but feel that it is better to move these parts to an AudioManager already at this stage. The AudioManager supports Init() where actual audio changes are done (set audio mode etc.) but it can also be used a simple "construct-and-store-audio-parameters" unit, which is the case here. Hence, the AM now serves as the center for getting audio parameters and then inject these into playout and recording sides. Previously, both sides acquired their own parameters and that is more error prone. BUG=NONE TEST=AudioDeviceTest R=perkj@webrtc.org, phoglund@webrtc.org Review URL: https://webrtc-codereview.appspot.com/45829004 Cr-Commit-Position: refs/heads/master@{#8875}
130 lines
3.7 KiB
C++
130 lines
3.7 KiB
C++
/*
|
|
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
|
|
*
|
|
* Use of this source code is governed by a BSD-style license
|
|
* that can be found in the LICENSE file in the root of the source
|
|
* tree. An additional intellectual property rights grant can be found
|
|
* in the file PATENTS. All contributing project authors may
|
|
* be found in the AUTHORS file in the root of the source tree.
|
|
*/
|
|
|
|
#include <assert.h>
|
|
#include <jni.h>
|
|
|
|
#include "webrtc/base/scoped_ptr.h"
|
|
#include "webrtc/examples/android/opensl_loopback/fake_audio_device_buffer.h"
|
|
#include "webrtc/modules/audio_device/android/audio_device_template.h"
|
|
#include "webrtc/modules/audio_device/android/audio_record_jni.h"
|
|
#include "webrtc/modules/audio_device/android/audio_track_jni.h"
|
|
#include "webrtc/modules/audio_device/android/opensles_input.h"
|
|
#include "webrtc/modules/audio_device/android/opensles_output.h"
|
|
|
|
// Java globals
|
|
static JavaVM* g_vm = NULL;
|
|
static jclass g_osr = NULL;
|
|
|
|
namespace webrtc {
|
|
|
|
template <class InputType, class OutputType>
|
|
class OpenSlRunnerTemplate {
|
|
public:
|
|
OpenSlRunnerTemplate()
|
|
: output_(NULL), // TODO(henrika): inject proper audio manager.
|
|
input_(&output_, NULL) {
|
|
output_.AttachAudioBuffer(&audio_buffer_);
|
|
if (output_.Init() != 0) {
|
|
assert(false);
|
|
}
|
|
if (output_.InitPlayout() != 0) {
|
|
assert(false);
|
|
}
|
|
input_.AttachAudioBuffer(&audio_buffer_);
|
|
if (input_.Init() != 0) {
|
|
assert(false);
|
|
}
|
|
if (input_.InitRecording() != 0) {
|
|
assert(false);
|
|
}
|
|
}
|
|
|
|
~OpenSlRunnerTemplate() {}
|
|
|
|
void StartPlayRecord() {
|
|
output_.StartPlayout();
|
|
input_.StartRecording();
|
|
}
|
|
|
|
void StopPlayRecord() {
|
|
// There are large enough buffers to compensate for recording and playing
|
|
// jitter such that the timing of stopping playing or recording should not
|
|
// result in over or underrun.
|
|
input_.StopRecording();
|
|
output_.StopPlayout();
|
|
audio_buffer_.ClearBuffer();
|
|
}
|
|
|
|
private:
|
|
OutputType output_;
|
|
InputType input_;
|
|
FakeAudioDeviceBuffer audio_buffer_;
|
|
};
|
|
|
|
class OpenSlRunner
|
|
: public OpenSlRunnerTemplate<OpenSlesInput, OpenSlesOutput> {
|
|
public:
|
|
// Global class implementing native code.
|
|
static OpenSlRunner* g_runner;
|
|
|
|
|
|
OpenSlRunner() {}
|
|
virtual ~OpenSlRunner() {}
|
|
|
|
static JNIEXPORT void JNICALL RegisterApplicationContext(
|
|
JNIEnv* env,
|
|
jobject obj,
|
|
jobject context) {
|
|
assert(!g_runner); // Should only be called once.
|
|
OpenSlesInput::SetAndroidAudioDeviceObjects(g_vm, context);
|
|
OpenSlesOutput::SetAndroidAudioDeviceObjects(g_vm, context);
|
|
g_runner = new OpenSlRunner();
|
|
}
|
|
|
|
static JNIEXPORT void JNICALL Start(JNIEnv * env, jobject) {
|
|
g_runner->StartPlayRecord();
|
|
}
|
|
|
|
static JNIEXPORT void JNICALL Stop(JNIEnv * env, jobject) {
|
|
g_runner->StopPlayRecord();
|
|
}
|
|
};
|
|
|
|
OpenSlRunner* OpenSlRunner::g_runner = NULL;
|
|
|
|
} // namespace webrtc
|
|
|
|
jint JNI_OnLoad(JavaVM* vm, void* reserved) {
|
|
// Only called once.
|
|
assert(!g_vm);
|
|
JNIEnv* env;
|
|
if (vm->GetEnv(reinterpret_cast<void**>(&env), JNI_VERSION_1_6) != JNI_OK) {
|
|
return -1;
|
|
}
|
|
|
|
jclass local_osr = env->FindClass("org/webrtc/app/OpenSlRunner");
|
|
assert(local_osr != NULL);
|
|
g_osr = static_cast<jclass>(env->NewGlobalRef(local_osr));
|
|
JNINativeMethod nativeFunctions[] = {
|
|
{"RegisterApplicationContext", "(Landroid/content/Context;)V",
|
|
reinterpret_cast<void*>(
|
|
&webrtc::OpenSlRunner::RegisterApplicationContext)},
|
|
{"Start", "()V", reinterpret_cast<void*>(&webrtc::OpenSlRunner::Start)},
|
|
{"Stop", "()V", reinterpret_cast<void*>(&webrtc::OpenSlRunner::Stop)}
|
|
};
|
|
int ret_val = env->RegisterNatives(g_osr, nativeFunctions, 3);
|
|
if (ret_val != 0) {
|
|
assert(false);
|
|
}
|
|
g_vm = vm;
|
|
return JNI_VERSION_1_6;
|
|
}
|