Add Java support for AudioProcessing and PostProcessing injection

This allows injection of a user-defined post processing module from
the Android layer.

Bug: webrtc:8163
Change-Id: If3a6b4726c34c5f82d186b8cf95373c283cbd3f6
Reviewed-on: https://webrtc-review.googlesource.com/7610
Reviewed-by: Sami Kalliomäki <sakal@webrtc.org>
Reviewed-by: Taylor Brandstetter <deadbeef@webrtc.org>
Commit-Queue: Sam Zackrisson <saza@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#20367}
This commit is contained in:
Sam Zackrisson
2017-10-20 09:36:16 +02:00
committed by Commit Bot
parent 6f72f56b6c
commit 6f38d25f11
14 changed files with 260 additions and 15 deletions

View File

@ -12,6 +12,7 @@
#include "api/audio_codecs/builtin_audio_decoder_factory.h"
#include "api/audio_codecs/builtin_audio_encoder_factory.h"
#include "modules/audio_processing/include/audio_processing.h"
namespace webrtc {
namespace jni {
@ -24,5 +25,9 @@ rtc::scoped_refptr<AudioEncoderFactory> CreateAudioEncoderFactory() {
return CreateBuiltinAudioEncoderFactory();
}
rtc::scoped_refptr<AudioProcessing> CreateAudioProcessing() {
return AudioProcessing::Create();
}
} // namespace jni
} // namespace webrtc

View File

@ -15,6 +15,7 @@
// We don't want this target depend on audio related targets
#include "api/audio_codecs/audio_decoder_factory.h" // nogncheck
#include "api/audio_codecs/audio_encoder_factory.h" // nogncheck
#include "modules/audio_processing/include/audio_processing.h" // nogncheck
#include "rtc_base/scoped_ref_ptr.h"
namespace webrtc {
@ -24,6 +25,8 @@ rtc::scoped_refptr<AudioDecoderFactory> CreateAudioDecoderFactory();
rtc::scoped_refptr<AudioEncoderFactory> CreateAudioEncoderFactory();
rtc::scoped_refptr<AudioProcessing> CreateAudioProcessing();
} // namespace jni
} // namespace webrtc

View File

@ -0,0 +1,36 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <memory>
#include "modules/audio_processing/include/audio_processing.h"
#include "rtc_base/scoped_ref_ptr.h"
#include "sdk/android/src/jni/jni_helpers.h"
namespace webrtc {
namespace jni {
JNI_FUNCTION_DECLARATION(
jlong,
DefaultAudioProcessingFactory_nativeCreateAudioProcessing,
JNIEnv*,
jclass,
jlong native_post_processor) {
std::unique_ptr<PostProcessing> post_processor(
reinterpret_cast<PostProcessing*>(native_post_processor));
rtc::scoped_refptr<AudioProcessing> audio_processing =
AudioProcessing::Create(webrtc::Config(), std::move(post_processor),
nullptr /* echo_control_factory */,
nullptr /* beamformer */);
return jlongFromPointer(audio_processing.release());
}
} // namespace jni
} // namespace webrtc

View File

@ -31,10 +31,11 @@ cricket::MediaEngineInterface* CreateMediaEngine(
const rtc::scoped_refptr<AudioDecoderFactory>& audio_decoder_factory,
cricket::WebRtcVideoEncoderFactory* video_encoder_factory,
cricket::WebRtcVideoDecoderFactory* video_decoder_factory,
rtc::scoped_refptr<AudioMixer> audio_mixer) {
rtc::scoped_refptr<AudioMixer> audio_mixer,
rtc::scoped_refptr<AudioProcessing> audio_processor) {
return cricket::WebRtcMediaEngineFactory::Create(
adm, audio_encoder_factory, audio_decoder_factory, video_encoder_factory,
video_decoder_factory, audio_mixer, AudioProcessing::Create());
video_decoder_factory, audio_mixer, audio_processor);
}
} // namespace jni

View File

@ -20,6 +20,7 @@ class AudioEncoderFactory;
class AudioDecoderFactory;
class RtcEventLogFactoryInterface;
class AudioMixer;
class AudioProcessing;
} // namespace webrtc
namespace cricket {
@ -40,7 +41,8 @@ cricket::MediaEngineInterface* CreateMediaEngine(
const rtc::scoped_refptr<AudioDecoderFactory>& audio_decoder_factory,
cricket::WebRtcVideoEncoderFactory* video_encoder_factory,
cricket::WebRtcVideoDecoderFactory* video_decoder_factory,
rtc::scoped_refptr<AudioMixer> audio_mixer);
rtc::scoped_refptr<AudioMixer> audio_mixer,
rtc::scoped_refptr<AudioProcessing> audio_processor);
} // namespace jni
} // namespace webrtc

View File

@ -21,5 +21,9 @@ rtc::scoped_refptr<AudioEncoderFactory> CreateAudioEncoderFactory() {
return nullptr;
}
rtc::scoped_refptr<AudioProcessing> CreateAudioProcessing() {
return nullptr;
}
} // namespace jni
} // namespace webrtc

View File

@ -27,7 +27,8 @@ cricket::MediaEngineInterface* CreateMediaEngine(
const rtc::scoped_refptr<AudioDecoderFactory>& audio_decoder_factory,
cricket::WebRtcVideoEncoderFactory* video_encoder_factory,
cricket::WebRtcVideoDecoderFactory* video_decoder_factory,
rtc::scoped_refptr<AudioMixer> audio_mixer) {
rtc::scoped_refptr<AudioMixer> audio_mixer,
rtc::scoped_refptr<AudioProcessing> audio_processor) {
return nullptr;
}

View File

@ -14,6 +14,9 @@
#include "api/peerconnectioninterface.h"
#include "media/base/mediaengine.h"
#include "modules/utility/include/jvm_android.h"
// We don't depend on the audio processing module implementation.
// The user may pass in a nullptr.
#include "modules/audio_processing/include/audio_processing.h" // nogncheck
#include "rtc_base/event_tracer.h"
#include "rtc_base/stringutils.h"
#include "rtc_base/thread.h"
@ -132,14 +135,12 @@ JNI_FUNCTION_DECLARATION(void,
rtc::tracing::ShutdownInternalTracer();
}
JNI_FUNCTION_DECLARATION(
jlong,
PeerConnectionFactory_nativeCreatePeerConnectionFactory,
jlong CreatePeerConnectionFactoryForJava(
JNIEnv* jni,
jclass,
jobject joptions,
jobject jencoder_factory,
jobject jdecoder_factory) {
jobject jdecoder_factory,
rtc::scoped_refptr<AudioProcessing> audio_processor) {
// talk/ assumes pretty widely that the current Thread is ThreadManager'd, but
// ThreadManager only WrapCurrentThread()s the thread where it is first
// created. Since the semantics around when auto-wrapping happens in
@ -190,7 +191,7 @@ JNI_FUNCTION_DECLARATION(
CreateRtcEventLogFactory());
std::unique_ptr<cricket::MediaEngineInterface> media_engine(CreateMediaEngine(
adm, audio_encoder_factory, audio_decoder_factory, video_encoder_factory,
video_decoder_factory, audio_mixer));
video_decoder_factory, audio_mixer, audio_processor));
rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
CreateModularPeerConnectionFactory(
@ -212,6 +213,35 @@ JNI_FUNCTION_DECLARATION(
return jlongFromPointer(owned_factory);
}
JNI_FUNCTION_DECLARATION(
jlong,
PeerConnectionFactory_nativeCreatePeerConnectionFactory,
JNIEnv* jni,
jclass,
jobject joptions,
jobject jencoder_factory,
jobject jdecoder_factory) {
return CreatePeerConnectionFactoryForJava(jni, joptions, jencoder_factory,
jdecoder_factory,
CreateAudioProcessing());
}
JNI_FUNCTION_DECLARATION(
jlong,
PeerConnectionFactory_nativeCreatePeerConnectionFactoryWithAudioProcessing,
JNIEnv* jni,
jclass,
jobject joptions,
jobject jencoder_factory,
jobject jdecoder_factory,
jlong native_audio_processor) {
rtc::scoped_refptr<AudioProcessing> audio_processor =
reinterpret_cast<AudioProcessing*>(native_audio_processor);
RTC_DCHECK(audio_processor);
return CreatePeerConnectionFactoryForJava(jni, joptions, jencoder_factory,
jdecoder_factory, audio_processor);
}
JNI_FUNCTION_DECLARATION(void,
PeerConnectionFactory_nativeFreeFactory,
JNIEnv*,