Revert "Reland "Delete old Android ADM.""
This reverts commit 6e4d7e606c4327eaa9298193e22794fcb9b30218. Reason for revert: Still breaks downstream build (though in a different way this time) Original change's description: > Reland "Delete old Android ADM." > > This is a reland of commit 4ec3e9c98873520b3171d40ab0426b2f05edbbd2 > > Original change's description: > > Delete old Android ADM. > > > > The schedule move Android ADM code to sdk directory have been around > > for several years, but the old code still not delete. > > > > Bug: webrtc:7452 > > Change-Id: I0f75c680f71f0b2ce614de6cbd9f124c2a59d453 > > Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/264620 > > Reviewed-by: Magnus Jedvert <magjed@webrtc.org> > > Commit-Queue: Henrik Andreassson <henrika@webrtc.org> > > Reviewed-by: Henrik Andreassson <henrika@webrtc.org> > > Cr-Commit-Position: refs/heads/main@{#37174} > > Bug: webrtc:7452 > Change-Id: Icabad23e72c8258a854b7809a93811161517266c > Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/265872 > Reviewed-by: Henrik Andreassson <henrika@webrtc.org> > Commit-Queue: Björn Terelius <terelius@webrtc.org> > Cr-Commit-Position: refs/heads/main@{#37236} Bug: webrtc:7452 Change-Id: Ide8fbd55fadd7aed9989053afff7c63c04f1320f No-Presubmit: true No-Tree-Checks: true No-Try: true Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/266023 Bot-Commit: rubber-stamper@appspot.gserviceaccount.com <rubber-stamper@appspot.gserviceaccount.com> Commit-Queue: Björn Terelius <terelius@webrtc.org> Owners-Override: Björn Terelius <terelius@webrtc.org> Cr-Commit-Position: refs/heads/main@{#37242}
This commit is contained in:
committed by
WebRTC LUCI CQ
parent
7517fb639b
commit
38a28603fd
@ -13,7 +13,7 @@
|
||||
#include "rtc_base/ip_address.h"
|
||||
#include "rtc_base/logging.h"
|
||||
#include "rtc_base/thread.h"
|
||||
#include "sdk/android/native_api/jni/application_context_provider.h"
|
||||
#include "sdk/android/native_unittests/application_context_provider.h"
|
||||
#include "sdk/android/src/jni/jni_helpers.h"
|
||||
#include "test/gtest.h"
|
||||
#include "test/scoped_key_value_config.h"
|
||||
@ -47,7 +47,7 @@ class AndroidNetworkMonitorTest : public ::testing::Test {
|
||||
public:
|
||||
AndroidNetworkMonitorTest() {
|
||||
JNIEnv* env = AttachCurrentThreadIfNeeded();
|
||||
ScopedJavaLocalRef<jobject> context = GetAppContext(env);
|
||||
ScopedJavaLocalRef<jobject> context = test::GetAppContextForTest(env);
|
||||
network_monitor_ = std::make_unique<jni::AndroidNetworkMonitor>(
|
||||
env, context, field_trials_);
|
||||
}
|
||||
|
||||
24
sdk/android/native_unittests/application_context_provider.cc
Normal file
24
sdk/android/native_unittests/application_context_provider.cc
Normal file
@ -0,0 +1,24 @@
|
||||
/*
|
||||
* Copyright 2019 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
#include "sdk/android/native_unittests/application_context_provider.h"
|
||||
|
||||
#include "sdk/android/generated_native_unittests_jni/ApplicationContextProvider_jni.h"
|
||||
#include "sdk/android/src/jni/jni_helpers.h"
|
||||
|
||||
namespace webrtc {
|
||||
namespace test {
|
||||
|
||||
ScopedJavaLocalRef<jobject> GetAppContextForTest(JNIEnv* jni) {
|
||||
return ScopedJavaLocalRef<jobject>(
|
||||
jni::Java_ApplicationContextProvider_getApplicationContextForTest(jni));
|
||||
}
|
||||
|
||||
} // namespace test
|
||||
} // namespace webrtc
|
||||
23
sdk/android/native_unittests/application_context_provider.h
Normal file
23
sdk/android/native_unittests/application_context_provider.h
Normal file
@ -0,0 +1,23 @@
|
||||
/*
|
||||
* Copyright 2019 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
#ifndef SDK_ANDROID_NATIVE_UNITTESTS_APPLICATION_CONTEXT_PROVIDER_H_
|
||||
#define SDK_ANDROID_NATIVE_UNITTESTS_APPLICATION_CONTEXT_PROVIDER_H_
|
||||
|
||||
#include "sdk/android/src/jni/jni_helpers.h"
|
||||
|
||||
namespace webrtc {
|
||||
namespace test {
|
||||
|
||||
ScopedJavaLocalRef<jobject> GetAppContextForTest(JNIEnv* jni);
|
||||
|
||||
} // namespace test
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // SDK_ANDROID_NATIVE_UNITTESTS_APPLICATION_CONTEXT_PROVIDER_H_
|
||||
@ -22,7 +22,7 @@
|
||||
#include "rtc_base/time_utils.h"
|
||||
#include "sdk/android/generated_native_unittests_jni/BuildInfo_jni.h"
|
||||
#include "sdk/android/native_api/audio_device_module/audio_device_android.h"
|
||||
#include "sdk/android/native_api/jni/application_context_provider.h"
|
||||
#include "sdk/android/native_unittests/application_context_provider.h"
|
||||
#include "sdk/android/src/jni/audio_device/audio_common.h"
|
||||
#include "sdk/android/src/jni/audio_device/audio_device_module.h"
|
||||
#include "sdk/android/src/jni/audio_device/opensles_common.h"
|
||||
@ -466,7 +466,7 @@ class AudioDeviceTest : public ::testing::Test {
|
||||
// implementations.
|
||||
// Creates an audio device using a default audio layer.
|
||||
jni_ = AttachCurrentThreadIfNeeded();
|
||||
context_ = GetAppContext(jni_);
|
||||
context_ = test::GetAppContextForTest(jni_);
|
||||
audio_device_ = CreateJavaAudioDeviceModule(jni_, context_.obj());
|
||||
EXPECT_NE(audio_device_.get(), nullptr);
|
||||
EXPECT_EQ(0, audio_device_->Init());
|
||||
@ -491,7 +491,7 @@ class AudioDeviceTest : public ::testing::Test {
|
||||
}
|
||||
|
||||
void SetActiveAudioLayer(AudioDeviceModule::AudioLayer audio_layer) {
|
||||
audio_device_ = CreateAndroidAudioDeviceModule(audio_layer);
|
||||
audio_device_ = CreateAudioDevice(audio_layer);
|
||||
EXPECT_NE(audio_device_.get(), nullptr);
|
||||
EXPECT_EQ(0, audio_device_->Init());
|
||||
UpdateParameters();
|
||||
@ -512,6 +512,30 @@ class AudioDeviceTest : public ::testing::Test {
|
||||
return audio_device_;
|
||||
}
|
||||
|
||||
rtc::scoped_refptr<AudioDeviceModule> CreateAudioDevice(
|
||||
AudioDeviceModule::AudioLayer audio_layer) {
|
||||
#if defined(WEBRTC_AUDIO_DEVICE_INCLUDE_ANDROID_AAUDIO)
|
||||
if (audio_layer == AudioDeviceModule::kAndroidAAudioAudio) {
|
||||
return rtc::scoped_refptr<AudioDeviceModule>(
|
||||
CreateAAudioAudioDeviceModule(jni_, context_.obj()));
|
||||
}
|
||||
#endif
|
||||
if (audio_layer == AudioDeviceModule::kAndroidJavaAudio) {
|
||||
return rtc::scoped_refptr<AudioDeviceModule>(
|
||||
CreateJavaAudioDeviceModule(jni_, context_.obj()));
|
||||
} else if (audio_layer == AudioDeviceModule::kAndroidOpenSLESAudio) {
|
||||
return rtc::scoped_refptr<AudioDeviceModule>(
|
||||
CreateOpenSLESAudioDeviceModule(jni_, context_.obj()));
|
||||
} else if (audio_layer ==
|
||||
AudioDeviceModule::kAndroidJavaInputAndOpenSLESOutputAudio) {
|
||||
return rtc::scoped_refptr<AudioDeviceModule>(
|
||||
CreateJavaInputAndOpenSLESOutputAudioDeviceModule(jni_,
|
||||
context_.obj()));
|
||||
} else {
|
||||
return nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
// Returns file name relative to the resource root given a sample rate.
|
||||
std::string GetFileName(int sample_rate) {
|
||||
EXPECT_TRUE(sample_rate == 48000 || sample_rate == 44100);
|
||||
@ -542,7 +566,7 @@ class AudioDeviceTest : public ::testing::Test {
|
||||
int TestDelayOnAudioLayer(
|
||||
const AudioDeviceModule::AudioLayer& layer_to_test) {
|
||||
rtc::scoped_refptr<AudioDeviceModule> audio_device;
|
||||
audio_device = CreateAndroidAudioDeviceModule(layer_to_test);
|
||||
audio_device = CreateAudioDevice(layer_to_test);
|
||||
EXPECT_NE(audio_device.get(), nullptr);
|
||||
uint16_t playout_delay;
|
||||
EXPECT_EQ(0, audio_device->PlayoutDelay(&playout_delay));
|
||||
@ -552,7 +576,7 @@ class AudioDeviceTest : public ::testing::Test {
|
||||
AudioDeviceModule::AudioLayer TestActiveAudioLayer(
|
||||
const AudioDeviceModule::AudioLayer& layer_to_test) {
|
||||
rtc::scoped_refptr<AudioDeviceModule> audio_device;
|
||||
audio_device = CreateAndroidAudioDeviceModule(layer_to_test);
|
||||
audio_device = CreateAudioDevice(layer_to_test);
|
||||
EXPECT_NE(audio_device.get(), nullptr);
|
||||
AudioDeviceModule::AudioLayer active;
|
||||
EXPECT_EQ(0, audio_device->ActiveAudioLayer(&active));
|
||||
@ -650,22 +674,6 @@ class AudioDeviceTest : public ::testing::Test {
|
||||
return volume;
|
||||
}
|
||||
|
||||
bool IsLowLatencyPlayoutSupported() {
|
||||
return jni::IsLowLatencyInputSupported(jni_, context_);
|
||||
}
|
||||
|
||||
bool IsLowLatencyRecordSupported() {
|
||||
return jni::IsLowLatencyOutputSupported(jni_, context_);
|
||||
}
|
||||
|
||||
bool IsAAudioSupported() {
|
||||
#if defined(WEBRTC_AUDIO_DEVICE_INCLUDE_ANDROID_AAUDIO)
|
||||
return true;
|
||||
#else
|
||||
return false;
|
||||
#endif
|
||||
}
|
||||
|
||||
JNIEnv* jni_;
|
||||
ScopedJavaLocalRef<jobject> context_;
|
||||
rtc::Event test_is_done_;
|
||||
@ -679,31 +687,6 @@ TEST_F(AudioDeviceTest, ConstructDestruct) {
|
||||
// Using the test fixture to create and destruct the audio device module.
|
||||
}
|
||||
|
||||
// We always ask for a default audio layer when the ADM is constructed. But the
|
||||
// ADM will then internally set the best suitable combination of audio layers,
|
||||
// for input and output based on if low-latency output and/or input audio in
|
||||
// combination with OpenSL ES is supported or not. This test ensures that the
|
||||
// correct selection is done.
|
||||
TEST_F(AudioDeviceTest, VerifyDefaultAudioLayer) {
|
||||
const AudioDeviceModule::AudioLayer audio_layer =
|
||||
TestActiveAudioLayer(AudioDeviceModule::kPlatformDefaultAudio);
|
||||
bool low_latency_output = IsLowLatencyPlayoutSupported();
|
||||
bool low_latency_input = IsLowLatencyRecordSupported();
|
||||
bool aaudio = IsAAudioSupported();
|
||||
AudioDeviceModule::AudioLayer expected_audio_layer;
|
||||
if (aaudio) {
|
||||
expected_audio_layer = AudioDeviceModule::kAndroidAAudioAudio;
|
||||
} else if (low_latency_output && low_latency_input) {
|
||||
expected_audio_layer = AudioDeviceModule::kAndroidOpenSLESAudio;
|
||||
} else if (low_latency_output && !low_latency_input) {
|
||||
expected_audio_layer =
|
||||
AudioDeviceModule::kAndroidJavaInputAndOpenSLESOutputAudio;
|
||||
} else {
|
||||
expected_audio_layer = AudioDeviceModule::kAndroidJavaAudio;
|
||||
}
|
||||
EXPECT_EQ(expected_audio_layer, audio_layer);
|
||||
}
|
||||
|
||||
// Verify that it is possible to explicitly create the two types of supported
|
||||
// ADMs. These two tests overrides the default selection of native audio layer
|
||||
// by ignoring if the device supports low-latency output or not.
|
||||
@ -731,18 +714,15 @@ TEST_F(AudioDeviceTest, CorrectAudioLayerIsUsedForOpenSLInBothDirections) {
|
||||
EXPECT_EQ(expected_layer, active_layer);
|
||||
}
|
||||
|
||||
// TODO(bugs.webrtc.org/8914)
|
||||
// TODO(phensman): Add test for AAudio/Java combination when this combination
|
||||
// is supported.
|
||||
#if !defined(WEBRTC_AUDIO_DEVICE_INCLUDE_ANDROID_AAUDIO)
|
||||
#define MAYBE_CorrectAudioLayerIsUsedForAAudioInBothDirections \
|
||||
DISABLED_CorrectAudioLayerIsUsedForAAudioInBothDirections
|
||||
|
||||
#define MAYBE_CorrectAudioLayerIsUsedForCombinedJavaAAudioCombo \
|
||||
DISABLED_CorrectAudioLayerIsUsedForCombinedJavaAAudioCombo
|
||||
#else
|
||||
#define MAYBE_CorrectAudioLayerIsUsedForAAudioInBothDirections \
|
||||
CorrectAudioLayerIsUsedForAAudioInBothDirections
|
||||
|
||||
#define MAYBE_CorrectAudioLayerIsUsedForCombinedJavaAAudioCombo \
|
||||
CorrectAudioLayerIsUsedForCombinedJavaAAudioCombo
|
||||
#endif
|
||||
TEST_F(AudioDeviceTest,
|
||||
MAYBE_CorrectAudioLayerIsUsedForAAudioInBothDirections) {
|
||||
@ -753,15 +733,6 @@ TEST_F(AudioDeviceTest,
|
||||
EXPECT_EQ(expected_layer, active_layer);
|
||||
}
|
||||
|
||||
TEST_F(AudioDeviceTest,
|
||||
MAYBE_CorrectAudioLayerIsUsedForCombinedJavaAAudioCombo) {
|
||||
AudioDeviceModule::AudioLayer expected_layer =
|
||||
AudioDeviceModule::kAndroidJavaInputAndAAudioOutputAudio;
|
||||
AudioDeviceModule::AudioLayer active_layer =
|
||||
TestActiveAudioLayer(expected_layer);
|
||||
EXPECT_EQ(expected_layer, active_layer);
|
||||
}
|
||||
|
||||
// The Android ADM supports two different delay reporting modes. One for the
|
||||
// low-latency output path (in combination with OpenSL ES), and one for the
|
||||
// high-latency output path (Java backends in both directions). These two tests
|
||||
@ -1158,7 +1129,7 @@ TEST_F(AudioDeviceTest, DISABLED_MeasureLoopbackLatency) {
|
||||
|
||||
TEST(JavaAudioDeviceTest, TestRunningTwoAdmsSimultaneously) {
|
||||
JNIEnv* jni = AttachCurrentThreadIfNeeded();
|
||||
ScopedJavaLocalRef<jobject> context = GetAppContext(jni);
|
||||
ScopedJavaLocalRef<jobject> context = test::GetAppContextForTest(jni);
|
||||
|
||||
// Create and start the first ADM.
|
||||
rtc::scoped_refptr<AudioDeviceModule> adm_1 =
|
||||
|
||||
@ -0,0 +1,20 @@
|
||||
/*
|
||||
* Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.content.Context;
|
||||
|
||||
public class ApplicationContextProvider {
|
||||
@CalledByNative
|
||||
public static Context getApplicationContextForTest() {
|
||||
return ContextUtils.getApplicationContext();
|
||||
}
|
||||
}
|
||||
@ -24,7 +24,7 @@
|
||||
#include "sdk/android/generated_native_unittests_jni/PeerConnectionFactoryInitializationHelper_jni.h"
|
||||
#include "sdk/android/native_api/audio_device_module/audio_device_android.h"
|
||||
#include "sdk/android/native_api/jni/jvm.h"
|
||||
#include "sdk/android/native_api/jni/application_context_provider.h"
|
||||
#include "sdk/android/native_unittests/application_context_provider.h"
|
||||
#include "sdk/android/src/jni/jni_helpers.h"
|
||||
#include "test/gtest.h"
|
||||
|
||||
@ -57,7 +57,7 @@ rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> CreateTestPCF(
|
||||
cricket::MediaEngineDependencies media_deps;
|
||||
media_deps.task_queue_factory = pcf_deps.task_queue_factory.get();
|
||||
media_deps.adm =
|
||||
CreateJavaAudioDeviceModule(jni, GetAppContext(jni).obj());
|
||||
CreateJavaAudioDeviceModule(jni, GetAppContextForTest(jni).obj());
|
||||
media_deps.video_encoder_factory =
|
||||
std::make_unique<webrtc::InternalEncoderFactory>();
|
||||
media_deps.video_decoder_factory =
|
||||
|
||||
Reference in New Issue
Block a user