Remove checks for SDK <= 21
WebRTC’s minSdk is 21, so all those checks are dead code. Change-Id: I26497fd92259b66d9e5ac6afbb393adf4d904c77 Bug: webrtc:13780 Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/253124 Reviewed-by: Henrik Andreassson <henrika@webrtc.org> Reviewed-by: Linus Nilsson <lnilsson@webrtc.org> Commit-Queue: Xavier Lepaul <xalep@webrtc.org> Cr-Commit-Position: refs/heads/main@{#36140}
This commit is contained in:
committed by
WebRTC LUCI CQ
parent
7befe8e5e4
commit
0f50cc2849
@ -149,16 +149,10 @@ public class AppRTCProximitySensor implements SensorEventListener {
|
||||
info.append(", resolution: ").append(proximitySensor.getResolution());
|
||||
info.append(", max range: ").append(proximitySensor.getMaximumRange());
|
||||
info.append(", min delay: ").append(proximitySensor.getMinDelay());
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT_WATCH) {
|
||||
// Added in API level 20.
|
||||
info.append(", type: ").append(proximitySensor.getStringType());
|
||||
}
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
|
||||
// Added in API level 21.
|
||||
info.append(", max delay: ").append(proximitySensor.getMaxDelay());
|
||||
info.append(", reporting mode: ").append(proximitySensor.getReportingMode());
|
||||
info.append(", isWakeUpSensor: ").append(proximitySensor.isWakeUpSensor());
|
||||
}
|
||||
info.append(", type: ").append(proximitySensor.getStringType());
|
||||
info.append(", max delay: ").append(proximitySensor.getMaxDelay());
|
||||
info.append(", reporting mode: ").append(proximitySensor.getReportingMode());
|
||||
info.append(", isWakeUpSensor: ").append(proximitySensor.isWakeUpSensor());
|
||||
Log.d(TAG, info.toString());
|
||||
}
|
||||
}
|
||||
|
||||
@ -384,7 +384,6 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
|
||||
}
|
||||
}
|
||||
|
||||
@TargetApi(17)
|
||||
private DisplayMetrics getDisplayMetrics() {
|
||||
DisplayMetrics displayMetrics = new DisplayMetrics();
|
||||
WindowManager windowManager =
|
||||
@ -393,16 +392,11 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
|
||||
return displayMetrics;
|
||||
}
|
||||
|
||||
@TargetApi(19)
|
||||
private static int getSystemUiVisibility() {
|
||||
int flags = View.SYSTEM_UI_FLAG_HIDE_NAVIGATION | View.SYSTEM_UI_FLAG_FULLSCREEN;
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
|
||||
flags |= View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY;
|
||||
}
|
||||
return flags;
|
||||
return View.SYSTEM_UI_FLAG_HIDE_NAVIGATION | View.SYSTEM_UI_FLAG_FULLSCREEN
|
||||
| View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY;
|
||||
}
|
||||
|
||||
@TargetApi(21)
|
||||
private void startScreenCapture() {
|
||||
MediaProjectionManager mediaProjectionManager =
|
||||
(MediaProjectionManager) getApplication().getSystemService(
|
||||
@ -460,7 +454,6 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
|
||||
return null;
|
||||
}
|
||||
|
||||
@TargetApi(21)
|
||||
private @Nullable VideoCapturer createScreenCapturer() {
|
||||
if (mediaProjectionPermissionResultCode != Activity.RESULT_OK) {
|
||||
reportError("User didn't give permission to capture the screen.");
|
||||
|
||||
@ -73,7 +73,6 @@ import java.util.concurrent.TimeUnit;
|
||||
* correct value, and then returns to back to correct reading. Both when
|
||||
* jumping up and back down we might create faulty CPU load readings.
|
||||
*/
|
||||
@TargetApi(Build.VERSION_CODES.KITKAT)
|
||||
class CpuMonitor {
|
||||
private static final String TAG = "CpuMonitor";
|
||||
private static final int MOVING_AVERAGE_SAMPLES = 5;
|
||||
@ -159,8 +158,7 @@ class CpuMonitor {
|
||||
}
|
||||
|
||||
public static boolean isSupported() {
|
||||
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT
|
||||
&& Build.VERSION.SDK_INT < Build.VERSION_CODES.N;
|
||||
return Build.VERSION.SDK_INT < Build.VERSION_CODES.N;
|
||||
}
|
||||
|
||||
public CpuMonitor(Context context) {
|
||||
|
||||
@ -453,10 +453,6 @@ public class PeerConnectionClientTest implements PeerConnectionEvents {
|
||||
@Test
|
||||
@SmallTest
|
||||
public void testLoopbackVp8DecodeToTexture() throws InterruptedException {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
|
||||
Log.i(TAG, "Decode to textures is not supported, requires SDK version 19.");
|
||||
return;
|
||||
}
|
||||
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8),
|
||||
createCameraCapturer(false /* captureToTexture */), true /* decodeToTexture */);
|
||||
}
|
||||
@ -464,10 +460,6 @@ public class PeerConnectionClientTest implements PeerConnectionEvents {
|
||||
@Test
|
||||
@SmallTest
|
||||
public void testLoopbackVp9DecodeToTexture() throws InterruptedException {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
|
||||
Log.i(TAG, "Decode to textures is not supported, requires SDK version 19.");
|
||||
return;
|
||||
}
|
||||
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP9),
|
||||
createCameraCapturer(false /* captureToTexture */), true /* decodeToTexture */);
|
||||
}
|
||||
@ -475,10 +467,6 @@ public class PeerConnectionClientTest implements PeerConnectionEvents {
|
||||
@Test
|
||||
@SmallTest
|
||||
public void testLoopbackH264DecodeToTexture() throws InterruptedException {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
|
||||
Log.i(TAG, "Decode to textures is not supported, requires SDK version 19.");
|
||||
return;
|
||||
}
|
||||
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264),
|
||||
createCameraCapturer(false /* captureToTexture */), true /* decodeToTexture */);
|
||||
}
|
||||
@ -486,10 +474,6 @@ public class PeerConnectionClientTest implements PeerConnectionEvents {
|
||||
@Test
|
||||
@SmallTest
|
||||
public void testLoopbackVp8CaptureToTexture() throws InterruptedException {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
|
||||
Log.i(TAG, "Encode to textures is not supported. Requires SDK version 19");
|
||||
return;
|
||||
}
|
||||
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8),
|
||||
createCameraCapturer(true /* captureToTexture */), true /* decodeToTexture */);
|
||||
}
|
||||
@ -497,10 +481,6 @@ public class PeerConnectionClientTest implements PeerConnectionEvents {
|
||||
@Test
|
||||
@SmallTest
|
||||
public void testLoopbackH264CaptureToTexture() throws InterruptedException {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
|
||||
Log.i(TAG, "Encode to textures is not supported. Requires KITKAT");
|
||||
return;
|
||||
}
|
||||
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264),
|
||||
createCameraCapturer(true /* captureToTexture */), true /* decodeToTexture */);
|
||||
}
|
||||
|
||||
@ -95,8 +95,6 @@ public class WebRtcAudioEffects {
|
||||
// Returns true if the platform AEC should be excluded based on its UUID.
|
||||
// AudioEffect.queryEffects() can throw IllegalStateException.
|
||||
private static boolean isAcousticEchoCancelerExcludedByUUID() {
|
||||
if (Build.VERSION.SDK_INT < 18)
|
||||
return false;
|
||||
for (Descriptor d : getAvailableEffects()) {
|
||||
if (d.type.equals(AudioEffect.EFFECT_TYPE_AEC)
|
||||
&& d.uuid.equals(AOSP_ACOUSTIC_ECHO_CANCELER)) {
|
||||
@ -109,8 +107,6 @@ public class WebRtcAudioEffects {
|
||||
// Returns true if the platform NS should be excluded based on its UUID.
|
||||
// AudioEffect.queryEffects() can throw IllegalStateException.
|
||||
private static boolean isNoiseSuppressorExcludedByUUID() {
|
||||
if (Build.VERSION.SDK_INT < 18)
|
||||
return false;
|
||||
for (Descriptor d : getAvailableEffects()) {
|
||||
if (d.type.equals(AudioEffect.EFFECT_TYPE_NS) && d.uuid.equals(AOSP_NOISE_SUPPRESSOR)) {
|
||||
return true;
|
||||
@ -121,15 +117,11 @@ public class WebRtcAudioEffects {
|
||||
|
||||
// Returns true if the device supports Acoustic Echo Cancellation (AEC).
|
||||
private static boolean isAcousticEchoCancelerEffectAvailable() {
|
||||
if (Build.VERSION.SDK_INT < 18)
|
||||
return false;
|
||||
return isEffectTypeAvailable(AudioEffect.EFFECT_TYPE_AEC);
|
||||
}
|
||||
|
||||
// Returns true if the device supports Noise Suppression (NS).
|
||||
private static boolean isNoiseSuppressorEffectAvailable() {
|
||||
if (Build.VERSION.SDK_INT < 18)
|
||||
return false;
|
||||
return isEffectTypeAvailable(AudioEffect.EFFECT_TYPE_NS);
|
||||
}
|
||||
|
||||
@ -277,9 +269,6 @@ public class WebRtcAudioEffects {
|
||||
// As an example: Samsung Galaxy S6 includes an AGC in the descriptor but
|
||||
// AutomaticGainControl.isAvailable() returns false.
|
||||
private boolean effectTypeIsVoIP(UUID type) {
|
||||
if (Build.VERSION.SDK_INT < 18)
|
||||
return false;
|
||||
|
||||
return (AudioEffect.EFFECT_TYPE_AEC.equals(type) && isAcousticEchoCancelerSupported())
|
||||
|| (AudioEffect.EFFECT_TYPE_NS.equals(type) && isNoiseSuppressorSupported());
|
||||
}
|
||||
|
||||
@ -258,7 +258,7 @@ public class WebRtcAudioManager {
|
||||
// as well. The NDK doc states that: "As of API level 21, lower latency
|
||||
// audio input is supported on select devices. To take advantage of this
|
||||
// feature, first confirm that lower latency output is available".
|
||||
return Build.VERSION.SDK_INT >= 21 && isLowLatencyOutputSupported();
|
||||
return isLowLatencyOutputSupported();
|
||||
}
|
||||
|
||||
// Returns true if the device has professional audio level of functionality
|
||||
@ -301,9 +301,6 @@ public class WebRtcAudioManager {
|
||||
}
|
||||
|
||||
private int getSampleRateForApiLevel() {
|
||||
if (Build.VERSION.SDK_INT < 17) {
|
||||
return WebRtcAudioUtils.getDefaultSampleRateHz();
|
||||
}
|
||||
String sampleRateString = audioManager.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE);
|
||||
return (sampleRateString == null) ? WebRtcAudioUtils.getDefaultSampleRateHz()
|
||||
: Integer.parseInt(sampleRateString);
|
||||
@ -312,9 +309,6 @@ public class WebRtcAudioManager {
|
||||
// Returns the native output buffer size for low-latency output streams.
|
||||
private int getLowLatencyOutputFramesPerBuffer() {
|
||||
assertTrue(isLowLatencyOutputSupported());
|
||||
if (Build.VERSION.SDK_INT < 17) {
|
||||
return DEFAULT_FRAME_PER_BUFFER;
|
||||
}
|
||||
String framesPerBuffer =
|
||||
audioManager.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER);
|
||||
return framesPerBuffer == null ? DEFAULT_FRAME_PER_BUFFER : Integer.parseInt(framesPerBuffer);
|
||||
|
||||
@ -10,7 +10,6 @@
|
||||
|
||||
package org.webrtc.voiceengine;
|
||||
|
||||
import android.annotation.TargetApi;
|
||||
import android.content.Context;
|
||||
import android.media.AudioAttributes;
|
||||
import android.media.AudioFormat;
|
||||
@ -46,7 +45,7 @@ public class WebRtcAudioTrack {
|
||||
|
||||
// By default, WebRTC creates audio tracks with a usage attribute
|
||||
// corresponding to voice communications, such as telephony or VoIP.
|
||||
private static final int DEFAULT_USAGE = getDefaultUsageAttribute();
|
||||
private static final int DEFAULT_USAGE = AudioAttributes.USAGE_VOICE_COMMUNICATION;
|
||||
private static int usageAttribute = DEFAULT_USAGE;
|
||||
|
||||
// This method overrides the default usage attribute and allows the user
|
||||
@ -60,15 +59,6 @@ public class WebRtcAudioTrack {
|
||||
usageAttribute = usage;
|
||||
}
|
||||
|
||||
private static int getDefaultUsageAttribute() {
|
||||
if (Build.VERSION.SDK_INT >= 21) {
|
||||
return AudioAttributes.USAGE_VOICE_COMMUNICATION;
|
||||
} else {
|
||||
// Not used on SDKs lower than 21.
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
private final long nativeAudioTrack;
|
||||
private final AudioManager audioManager;
|
||||
private final ThreadUtils.ThreadChecker threadChecker = new ThreadUtils.ThreadChecker();
|
||||
@ -154,7 +144,7 @@ public class WebRtcAudioTrack {
|
||||
byteBuffer.put(emptyBytes);
|
||||
byteBuffer.position(0);
|
||||
}
|
||||
int bytesWritten = writeBytes(audioTrack, byteBuffer, sizeInBytes);
|
||||
int bytesWritten = audioTrack.write(byteBuffer, sizeInBytes, AudioTrack.WRITE_BLOCKING);
|
||||
if (bytesWritten != sizeInBytes) {
|
||||
Logging.e(TAG, "AudioTrack.write played invalid number of bytes: " + bytesWritten);
|
||||
// If a write() returns a negative value, an error has occurred.
|
||||
@ -188,14 +178,6 @@ public class WebRtcAudioTrack {
|
||||
}
|
||||
}
|
||||
|
||||
private int writeBytes(AudioTrack audioTrack, ByteBuffer byteBuffer, int sizeInBytes) {
|
||||
if (Build.VERSION.SDK_INT >= 21) {
|
||||
return audioTrack.write(byteBuffer, sizeInBytes, AudioTrack.WRITE_BLOCKING);
|
||||
} else {
|
||||
return audioTrack.write(byteBuffer.array(), byteBuffer.arrayOffset(), sizeInBytes);
|
||||
}
|
||||
}
|
||||
|
||||
// Stops the inner thread loop which results in calling AudioTrack.stop().
|
||||
// Does not block the calling thread.
|
||||
public void stopThread() {
|
||||
@ -257,19 +239,12 @@ public class WebRtcAudioTrack {
|
||||
// Create an AudioTrack object and initialize its associated audio buffer.
|
||||
// The size of this buffer determines how long an AudioTrack can play
|
||||
// before running out of data.
|
||||
if (Build.VERSION.SDK_INT >= 21) {
|
||||
// If we are on API level 21 or higher, it is possible to use a special AudioTrack
|
||||
// constructor that uses AudioAttributes and AudioFormat as input. It allows us to
|
||||
// supersede the notion of stream types for defining the behavior of audio playback,
|
||||
// and to allow certain platforms or routing policies to use this information for more
|
||||
// refined volume or routing decisions.
|
||||
audioTrack = createAudioTrackOnLollipopOrHigher(
|
||||
sampleRate, channelConfig, minBufferSizeInBytes);
|
||||
} else {
|
||||
// Use default constructor for API levels below 21.
|
||||
audioTrack =
|
||||
createAudioTrackOnLowerThanLollipop(sampleRate, channelConfig, minBufferSizeInBytes);
|
||||
}
|
||||
// As we are on API level 21 or higher, it is possible to use a special AudioTrack
|
||||
// constructor that uses AudioAttributes and AudioFormat as input. It allows us to
|
||||
// supersede the notion of stream types for defining the behavior of audio playback,
|
||||
// and to allow certain platforms or routing policies to use this information for more
|
||||
// refined volume or routing decisions.
|
||||
audioTrack = createAudioTrack(sampleRate, channelConfig, minBufferSizeInBytes);
|
||||
} catch (IllegalArgumentException e) {
|
||||
reportWebRtcAudioTrackInitError(e.getMessage());
|
||||
releaseAudioResources();
|
||||
@ -353,7 +328,7 @@ public class WebRtcAudioTrack {
|
||||
threadChecker.checkIsOnValidThread();
|
||||
Logging.d(TAG, "setStreamVolume(" + volume + ")");
|
||||
assertTrue(audioManager != null);
|
||||
if (isVolumeFixed()) {
|
||||
if (audioManager.isVolumeFixed()) {
|
||||
Logging.e(TAG, "The device implements a fixed volume policy.");
|
||||
return false;
|
||||
}
|
||||
@ -361,12 +336,6 @@ public class WebRtcAudioTrack {
|
||||
return true;
|
||||
}
|
||||
|
||||
private boolean isVolumeFixed() {
|
||||
if (Build.VERSION.SDK_INT < 21)
|
||||
return false;
|
||||
return audioManager.isVolumeFixed();
|
||||
}
|
||||
|
||||
/** Get current volume level for a phone call audio stream. */
|
||||
private int getStreamVolume() {
|
||||
threadChecker.checkIsOnValidThread();
|
||||
@ -387,10 +356,9 @@ public class WebRtcAudioTrack {
|
||||
// Creates and AudioTrack instance using AudioAttributes and AudioFormat as input.
|
||||
// It allows certain platforms or routing policies to use this information for more
|
||||
// refined volume or routing decisions.
|
||||
@TargetApi(21)
|
||||
private static AudioTrack createAudioTrackOnLollipopOrHigher(
|
||||
private static AudioTrack createAudioTrack(
|
||||
int sampleRateInHz, int channelConfig, int bufferSizeInBytes) {
|
||||
Logging.d(TAG, "createAudioTrackOnLollipopOrHigher");
|
||||
Logging.d(TAG, "createAudioTrack");
|
||||
// TODO(henrika): use setPerformanceMode(int) with PERFORMANCE_MODE_LOW_LATENCY to control
|
||||
// performance when Android O is supported. Add some logging in the mean time.
|
||||
final int nativeOutputSampleRate =
|
||||
@ -418,13 +386,6 @@ public class WebRtcAudioTrack {
|
||||
AudioManager.AUDIO_SESSION_ID_GENERATE);
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation") // Deprecated in API level 25.
|
||||
private static AudioTrack createAudioTrackOnLowerThanLollipop(
|
||||
int sampleRateInHz, int channelConfig, int bufferSizeInBytes) {
|
||||
return new AudioTrack(AudioManager.STREAM_VOICE_CALL, sampleRateInHz, channelConfig,
|
||||
AudioFormat.ENCODING_PCM_16BIT, bufferSizeInBytes, AudioTrack.MODE_STREAM);
|
||||
}
|
||||
|
||||
private void logBufferSizeInFrames() {
|
||||
if (Build.VERSION.SDK_INT >= 23) {
|
||||
Logging.d(TAG, "AudioTrack: "
|
||||
|
||||
@ -214,13 +214,6 @@ public final class WebRtcAudioUtils {
|
||||
+ "BT SCO: " + audioManager.isBluetoothScoOn());
|
||||
}
|
||||
|
||||
private static boolean isVolumeFixed(AudioManager audioManager) {
|
||||
if (Build.VERSION.SDK_INT < 21) {
|
||||
return false;
|
||||
}
|
||||
return audioManager.isVolumeFixed();
|
||||
}
|
||||
|
||||
// Adds volume information for all possible stream types.
|
||||
private static void logAudioStateVolume(String tag, AudioManager audioManager) {
|
||||
final int[] streams = {
|
||||
@ -233,7 +226,7 @@ public final class WebRtcAudioUtils {
|
||||
};
|
||||
Logging.d(tag, "Audio State: ");
|
||||
// Some devices may not have volume controls and might use a fixed volume.
|
||||
boolean fixedVolume = isVolumeFixed(audioManager);
|
||||
boolean fixedVolume = audioManager.isVolumeFixed();
|
||||
Logging.d(tag, " fixed volume=" + fixedVolume);
|
||||
if (!fixedVolume) {
|
||||
for (int stream : streams) {
|
||||
|
||||
@ -10,12 +10,10 @@
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.annotation.TargetApi;
|
||||
import android.content.Context;
|
||||
import android.hardware.camera2.CameraManager;
|
||||
import androidx.annotation.Nullable;
|
||||
|
||||
@TargetApi(21)
|
||||
public class Camera2Capturer extends CameraCapturer {
|
||||
private final Context context;
|
||||
@Nullable private final CameraManager cameraManager;
|
||||
|
||||
@ -10,7 +10,6 @@
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.annotation.TargetApi;
|
||||
import android.content.Context;
|
||||
import android.graphics.Rect;
|
||||
import android.graphics.SurfaceTexture;
|
||||
@ -30,7 +29,6 @@ import java.util.List;
|
||||
import java.util.Map;
|
||||
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
|
||||
|
||||
@TargetApi(21)
|
||||
public class Camera2Enumerator implements CameraEnumerator {
|
||||
private final static String TAG = "Camera2Enumerator";
|
||||
private final static double NANO_SECONDS_PER_SECOND = 1.0e9;
|
||||
@ -107,10 +105,6 @@ public class Camera2Enumerator implements CameraEnumerator {
|
||||
* Checks if API is supported and all cameras have better than legacy support.
|
||||
*/
|
||||
public static boolean isSupported(Context context) {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) {
|
||||
return false;
|
||||
}
|
||||
|
||||
CameraManager cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
|
||||
try {
|
||||
String[] cameraIds = cameraManager.getCameraIdList();
|
||||
|
||||
@ -147,13 +147,11 @@ public interface EglBase {
|
||||
|
||||
/**
|
||||
* Create a new context with the specified config attributes, sharing data with `sharedContext`.
|
||||
* If `sharedContext` is null, a root context is created. This function will try to create an EGL
|
||||
* 1.4 context if possible, and an EGL 1.0 context otherwise.
|
||||
* If `sharedContext` is null, a root EGL 1.4 context is created.
|
||||
*/
|
||||
public static EglBase create(@Nullable Context sharedContext, int[] configAttributes) {
|
||||
if (sharedContext == null) {
|
||||
return EglBase14Impl.isEGL14Supported() ? createEgl14(configAttributes)
|
||||
: createEgl10(configAttributes);
|
||||
return createEgl14(configAttributes);
|
||||
} else if (sharedContext instanceof EglBase14.Context) {
|
||||
return createEgl14((EglBase14.Context) sharedContext, configAttributes);
|
||||
} else if (sharedContext instanceof EglBase10.Context) {
|
||||
|
||||
@ -94,11 +94,6 @@ public class HardwareVideoEncoderFactory implements VideoEncoderFactory {
|
||||
@Nullable
|
||||
@Override
|
||||
public VideoEncoder createEncoder(VideoCodecInfo input) {
|
||||
// HW encoding is not supported below Android Kitkat.
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
|
||||
return null;
|
||||
}
|
||||
|
||||
VideoCodecMimeType type = VideoCodecMimeType.valueOf(input.getName());
|
||||
MediaCodecInfo info = findCodecForType(type);
|
||||
|
||||
@ -135,11 +130,6 @@ public class HardwareVideoEncoderFactory implements VideoEncoderFactory {
|
||||
|
||||
@Override
|
||||
public VideoCodecInfo[] getSupportedCodecs() {
|
||||
// HW encoding is not supported below Android Kitkat.
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
|
||||
return new VideoCodecInfo[0];
|
||||
}
|
||||
|
||||
List<VideoCodecInfo> supportedCodecInfos = new ArrayList<VideoCodecInfo>();
|
||||
// Generate a list of supported codecs in order of preference:
|
||||
// VP8, VP9, H264 (high profile), H264 (baseline profile) and AV1.
|
||||
@ -219,13 +209,12 @@ public class HardwareVideoEncoderFactory implements VideoEncoderFactory {
|
||||
|
||||
private boolean isHardwareSupportedInCurrentSdkVp8(MediaCodecInfo info) {
|
||||
String name = info.getName();
|
||||
// QCOM Vp8 encoder is supported in KITKAT or later.
|
||||
return (name.startsWith(QCOM_PREFIX) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT)
|
||||
// QCOM Vp8 encoder is always supported.
|
||||
return name.startsWith(QCOM_PREFIX)
|
||||
// Exynos VP8 encoder is supported in M or later.
|
||||
|| (name.startsWith(EXYNOS_PREFIX) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.M)
|
||||
// Intel Vp8 encoder is supported in LOLLIPOP or later, with the intel encoder enabled.
|
||||
|| (name.startsWith(INTEL_PREFIX) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP
|
||||
&& enableIntelVp8Encoder);
|
||||
// Intel Vp8 encoder is always supported, with the intel encoder enabled.
|
||||
|| (name.startsWith(INTEL_PREFIX) && enableIntelVp8Encoder);
|
||||
}
|
||||
|
||||
private boolean isHardwareSupportedInCurrentSdkVp9(MediaCodecInfo info) {
|
||||
@ -241,11 +230,8 @@ public class HardwareVideoEncoderFactory implements VideoEncoderFactory {
|
||||
return false;
|
||||
}
|
||||
String name = info.getName();
|
||||
// QCOM H264 encoder is supported in KITKAT or later.
|
||||
return (name.startsWith(QCOM_PREFIX) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT)
|
||||
// Exynos H264 encoder is supported in LOLLIPOP or later.
|
||||
|| (name.startsWith(EXYNOS_PREFIX)
|
||||
&& Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP);
|
||||
// QCOM and Exynos H264 encoders are always supported.
|
||||
return name.startsWith(QCOM_PREFIX) || name.startsWith(EXYNOS_PREFIX);
|
||||
}
|
||||
|
||||
private boolean isMediaCodecAllowed(MediaCodecInfo info) {
|
||||
@ -257,14 +243,13 @@ public class HardwareVideoEncoderFactory implements VideoEncoderFactory {
|
||||
|
||||
private int getForcedKeyFrameIntervalMs(VideoCodecMimeType type, String codecName) {
|
||||
if (type == VideoCodecMimeType.VP8 && codecName.startsWith(QCOM_PREFIX)) {
|
||||
if (Build.VERSION.SDK_INT == Build.VERSION_CODES.LOLLIPOP
|
||||
|| Build.VERSION.SDK_INT == Build.VERSION_CODES.LOLLIPOP_MR1) {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) {
|
||||
return QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_L_MS;
|
||||
} else if (Build.VERSION.SDK_INT == Build.VERSION_CODES.M) {
|
||||
return QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_M_MS;
|
||||
} else if (Build.VERSION.SDK_INT > Build.VERSION_CODES.M) {
|
||||
return QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_N_MS;
|
||||
}
|
||||
if (Build.VERSION.SDK_INT == Build.VERSION_CODES.M) {
|
||||
return QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_M_MS;
|
||||
}
|
||||
return QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_N_MS;
|
||||
}
|
||||
// Other codecs don't need key frame forcing.
|
||||
return 0;
|
||||
|
||||
@ -10,7 +10,6 @@
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.annotation.TargetApi;
|
||||
import android.app.Activity;
|
||||
import android.content.Context;
|
||||
import android.content.Intent;
|
||||
@ -31,10 +30,7 @@ import androidx.annotation.Nullable;
|
||||
* place on the HandlerThread of the given {@code SurfaceTextureHelper}. When done with each frame,
|
||||
* the native code returns the buffer to the {@code SurfaceTextureHelper} to be used for new
|
||||
* frames. At any time, at most one frame is being processed.
|
||||
*
|
||||
* @note This class is only supported on Android Lollipop and above.
|
||||
*/
|
||||
@TargetApi(21)
|
||||
public class ScreenCapturerAndroid implements VideoCapturer, VideoSink {
|
||||
private static final int DISPLAY_FLAGS =
|
||||
DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC | DisplayManager.VIRTUAL_DISPLAY_FLAG_PRESENTATION;
|
||||
|
||||
@ -198,7 +198,7 @@ public class SurfaceTextureHelper {
|
||||
|
||||
oesTextureId = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
|
||||
surfaceTexture = new SurfaceTexture(oesTextureId);
|
||||
setOnFrameAvailableListener(surfaceTexture, (SurfaceTexture st) -> {
|
||||
surfaceTexture.setOnFrameAvailableListener(st -> {
|
||||
if (hasPendingTexture) {
|
||||
Logging.d(TAG, "A frame is already pending, dropping frame.");
|
||||
}
|
||||
@ -208,20 +208,6 @@ public class SurfaceTextureHelper {
|
||||
}, handler);
|
||||
}
|
||||
|
||||
@TargetApi(21)
|
||||
private static void setOnFrameAvailableListener(SurfaceTexture surfaceTexture,
|
||||
SurfaceTexture.OnFrameAvailableListener listener, Handler handler) {
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
|
||||
surfaceTexture.setOnFrameAvailableListener(listener, handler);
|
||||
} else {
|
||||
// The documentation states that the listener will be called on an arbitrary thread, but in
|
||||
// pratice, it is always the thread on which the SurfaceTexture was constructed. There are
|
||||
// assertions in place in case this ever changes. For API >= 21, we use the new API to
|
||||
// explicitly specify the handler.
|
||||
surfaceTexture.setOnFrameAvailableListener(listener);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Start to stream textures to the given `listener`. If you need to change listener, you need to
|
||||
* call stopListening() first.
|
||||
|
||||
@ -12,7 +12,6 @@ package org.webrtc;
|
||||
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
import android.annotation.TargetApi;
|
||||
import android.content.Context;
|
||||
import android.hardware.camera2.CameraAccessException;
|
||||
import android.hardware.camera2.CameraDevice;
|
||||
@ -31,7 +30,6 @@ import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
|
||||
@TargetApi(21)
|
||||
@RunWith(BaseJUnit4ClassRunner.class)
|
||||
public class Camera2CapturerTest {
|
||||
static final String TAG = "Camera2CapturerTest";
|
||||
|
||||
@ -16,7 +16,6 @@ import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
import android.annotation.TargetApi;
|
||||
import android.graphics.Matrix;
|
||||
import android.opengl.GLES11Ext;
|
||||
import android.util.Log;
|
||||
@ -38,7 +37,6 @@ import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
|
||||
@TargetApi(16)
|
||||
@RunWith(ParameterizedRunner.class)
|
||||
@UseRunnerDelegate(BaseJUnit4RunnerDelegate.class)
|
||||
public class HardwareVideoEncoderTest {
|
||||
|
||||
@ -296,14 +296,12 @@ public class NetworkMonitorTest {
|
||||
ConnectivityManagerDelegate delegate = new ConnectivityManagerDelegate(
|
||||
InstrumentationRegistry.getTargetContext(), new HashSet<>());
|
||||
delegate.getNetworkState();
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
|
||||
Network[] networks = delegate.getAllNetworks();
|
||||
if (networks.length >= 1) {
|
||||
delegate.getNetworkState(networks[0]);
|
||||
delegate.hasInternetCapability(networks[0]);
|
||||
}
|
||||
delegate.getDefaultNetId();
|
||||
Network[] networks = delegate.getAllNetworks();
|
||||
if (networks.length >= 1) {
|
||||
delegate.getNetworkState(networks[0]);
|
||||
delegate.hasInternetCapability(networks[0]);
|
||||
}
|
||||
delegate.getDefaultNetId();
|
||||
}
|
||||
|
||||
/** Tests that ConnectivityManagerDelegate preferentially reads from the cache */
|
||||
|
||||
@ -10,7 +10,6 @@
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.annotation.TargetApi;
|
||||
import android.content.Context;
|
||||
import android.hardware.camera2.CameraAccessException;
|
||||
import android.hardware.camera2.CameraCaptureSession;
|
||||
@ -29,7 +28,6 @@ import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
|
||||
|
||||
@TargetApi(21)
|
||||
class Camera2Session implements CameraSession {
|
||||
private static final String TAG = "Camera2Session";
|
||||
|
||||
|
||||
@ -10,7 +10,6 @@
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.annotation.TargetApi;
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.opengl.EGL14;
|
||||
import android.opengl.EGLConfig;
|
||||
@ -29,25 +28,13 @@ import org.webrtc.EglBase;
|
||||
* and an EGLSurface.
|
||||
*/
|
||||
@SuppressWarnings("ReferenceEquality") // We want to compare to EGL14 constants.
|
||||
@TargetApi(18)
|
||||
class EglBase14Impl implements EglBase14 {
|
||||
private static final String TAG = "EglBase14Impl";
|
||||
private static final int EGLExt_SDK_VERSION = Build.VERSION_CODES.JELLY_BEAN_MR2;
|
||||
private static final int CURRENT_SDK_VERSION = Build.VERSION.SDK_INT;
|
||||
private EGLContext eglContext;
|
||||
@Nullable private EGLConfig eglConfig;
|
||||
private EGLDisplay eglDisplay;
|
||||
private EGLSurface eglSurface = EGL14.EGL_NO_SURFACE;
|
||||
|
||||
// EGL 1.4 is supported from API 17. But EGLExt that is used for setting presentation
|
||||
// time stamp on a surface is supported from 18 so we require 18.
|
||||
public static boolean isEGL14Supported() {
|
||||
Logging.d(TAG,
|
||||
"SDK version: " + CURRENT_SDK_VERSION
|
||||
+ ". isEGL14Supported: " + (CURRENT_SDK_VERSION >= EGLExt_SDK_VERSION));
|
||||
return (CURRENT_SDK_VERSION >= EGLExt_SDK_VERSION);
|
||||
}
|
||||
|
||||
public static class Context implements EglBase14.Context {
|
||||
private final EGLContext egl14Context;
|
||||
|
||||
@ -57,11 +44,8 @@ class EglBase14Impl implements EglBase14 {
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("deprecation")
|
||||
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
|
||||
public long getNativeEglContext() {
|
||||
return CURRENT_SDK_VERSION >= Build.VERSION_CODES.LOLLIPOP ? egl14Context.getNativeHandle()
|
||||
: egl14Context.getHandle();
|
||||
return egl14Context.getNativeHandle();
|
||||
}
|
||||
|
||||
public Context(android.opengl.EGLContext eglContext) {
|
||||
|
||||
@ -10,7 +10,6 @@
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.annotation.TargetApi;
|
||||
import android.media.MediaCodec;
|
||||
import android.media.MediaCodecInfo;
|
||||
import android.media.MediaFormat;
|
||||
@ -29,11 +28,7 @@ import org.webrtc.ThreadUtils.ThreadChecker;
|
||||
|
||||
/**
|
||||
* Android hardware video encoder.
|
||||
*
|
||||
* @note This class is only supported on Android Kitkat and above.
|
||||
*/
|
||||
@TargetApi(19)
|
||||
@SuppressWarnings("deprecation") // Cannot support API level 19 without using deprecated methods.
|
||||
class HardwareVideoEncoder implements VideoEncoder {
|
||||
private static final String TAG = "HardwareVideoEncoder";
|
||||
|
||||
|
||||
@ -56,15 +56,8 @@ class MediaCodecUtils {
|
||||
MediaCodecUtils.COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m};
|
||||
|
||||
// Color formats supported by texture mode encoding - in order of preference.
|
||||
static final int[] TEXTURE_COLOR_FORMATS = getTextureColorFormats();
|
||||
|
||||
private static int[] getTextureColorFormats() {
|
||||
if (Build.VERSION.SDK_INT >= 18) {
|
||||
return new int[] {MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface};
|
||||
} else {
|
||||
return new int[] {};
|
||||
}
|
||||
}
|
||||
static final int[] TEXTURE_COLOR_FORMATS =
|
||||
new int[] {MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface};
|
||||
|
||||
static @Nullable Integer selectColorFormat(
|
||||
int[] supportedColorFormats, CodecCapabilities capabilities) {
|
||||
|
||||
@ -83,11 +83,6 @@ class MediaCodecVideoDecoderFactory implements VideoDecoderFactory {
|
||||
}
|
||||
|
||||
private @Nullable MediaCodecInfo findCodecForType(VideoCodecMimeType type) {
|
||||
// HW decoding is not supported on builds before KITKAT.
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
|
||||
return null;
|
||||
}
|
||||
|
||||
for (int i = 0; i < MediaCodecList.getCodecCount(); ++i) {
|
||||
MediaCodecInfo info = null;
|
||||
try {
|
||||
@ -131,8 +126,8 @@ class MediaCodecVideoDecoderFactory implements VideoDecoderFactory {
|
||||
|
||||
private boolean isH264HighProfileSupported(MediaCodecInfo info) {
|
||||
String name = info.getName();
|
||||
// Support H.264 HP decoding on QCOM chips for Android L and above.
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP && name.startsWith(QCOM_PREFIX)) {
|
||||
// Support H.264 HP decoding on QCOM chips.
|
||||
if (name.startsWith(QCOM_PREFIX)) {
|
||||
return true;
|
||||
}
|
||||
// Support H.264 HP decoding on Exynos chips for Android M and above.
|
||||
|
||||
@ -10,7 +10,6 @@
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.annotation.TargetApi;
|
||||
import android.media.MediaCodec;
|
||||
import android.media.MediaCodec.BufferInfo;
|
||||
import android.media.MediaCrypto;
|
||||
@ -99,13 +98,11 @@ class MediaCodecWrapperFactoryImpl implements MediaCodecWrapperFactory {
|
||||
}
|
||||
|
||||
@Override
|
||||
@TargetApi(18)
|
||||
public Surface createInputSurface() {
|
||||
return mediaCodec.createInputSurface();
|
||||
}
|
||||
|
||||
@Override
|
||||
@TargetApi(19)
|
||||
public void setParameters(Bundle params) {
|
||||
mediaCodec.setParameters(params);
|
||||
}
|
||||
|
||||
@ -54,15 +54,11 @@ class WebRtcAudioEffects {
|
||||
// Returns true if all conditions for supporting HW Acoustic Echo Cancellation (AEC) are
|
||||
// fulfilled.
|
||||
public static boolean isAcousticEchoCancelerSupported() {
|
||||
if (Build.VERSION.SDK_INT < 18)
|
||||
return false;
|
||||
return isEffectTypeAvailable(AudioEffect.EFFECT_TYPE_AEC, AOSP_ACOUSTIC_ECHO_CANCELER);
|
||||
}
|
||||
|
||||
// Returns true if all conditions for supporting HW Noise Suppression (NS) are fulfilled.
|
||||
public static boolean isNoiseSuppressorSupported() {
|
||||
if (Build.VERSION.SDK_INT < 18)
|
||||
return false;
|
||||
return isEffectTypeAvailable(AudioEffect.EFFECT_TYPE_NS, AOSP_NOISE_SUPPRESSOR);
|
||||
}
|
||||
|
||||
@ -188,9 +184,6 @@ class WebRtcAudioEffects {
|
||||
// As an example: Samsung Galaxy S6 includes an AGC in the descriptor but
|
||||
// AutomaticGainControl.isAvailable() returns false.
|
||||
private boolean effectTypeIsVoIP(UUID type) {
|
||||
if (Build.VERSION.SDK_INT < 18)
|
||||
return false;
|
||||
|
||||
return (AudioEffect.EFFECT_TYPE_AEC.equals(type) && isAcousticEchoCancelerSupported())
|
||||
|| (AudioEffect.EFFECT_TYPE_NS.equals(type) && isNoiseSuppressorSupported());
|
||||
}
|
||||
|
||||
@ -64,7 +64,7 @@ class WebRtcAudioManager {
|
||||
// as well. The NDK doc states that: "As of API level 21, lower latency
|
||||
// audio input is supported on select devices. To take advantage of this
|
||||
// feature, first confirm that lower latency output is available".
|
||||
return Build.VERSION.SDK_INT >= 21 && isLowLatencyOutputSupported(context);
|
||||
return isLowLatencyOutputSupported(context);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -85,18 +85,12 @@ class WebRtcAudioManager {
|
||||
}
|
||||
|
||||
private static int getSampleRateForApiLevel(AudioManager audioManager) {
|
||||
if (Build.VERSION.SDK_INT < 17) {
|
||||
return DEFAULT_SAMPLE_RATE_HZ;
|
||||
}
|
||||
String sampleRateString = audioManager.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE);
|
||||
return (sampleRateString == null) ? DEFAULT_SAMPLE_RATE_HZ : Integer.parseInt(sampleRateString);
|
||||
}
|
||||
|
||||
// Returns the native output buffer size for low-latency output streams.
|
||||
private static int getLowLatencyFramesPerBuffer(AudioManager audioManager) {
|
||||
if (Build.VERSION.SDK_INT < 17) {
|
||||
return DEFAULT_FRAME_PER_BUFFER;
|
||||
}
|
||||
String framesPerBuffer =
|
||||
audioManager.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER);
|
||||
return framesPerBuffer == null ? DEFAULT_FRAME_PER_BUFFER : Integer.parseInt(framesPerBuffer);
|
||||
|
||||
@ -47,16 +47,7 @@ class WebRtcAudioTrack {
|
||||
|
||||
// By default, WebRTC creates audio tracks with a usage attribute
|
||||
// corresponding to voice communications, such as telephony or VoIP.
|
||||
private static final int DEFAULT_USAGE = getDefaultUsageAttribute();
|
||||
|
||||
private static int getDefaultUsageAttribute() {
|
||||
if (Build.VERSION.SDK_INT >= 21) {
|
||||
return AudioAttributes.USAGE_VOICE_COMMUNICATION;
|
||||
} else {
|
||||
// Not used on SDKs lower than L.
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
private static final int DEFAULT_USAGE = AudioAttributes.USAGE_VOICE_COMMUNICATION;
|
||||
|
||||
// Indicates the AudioTrack has started playing audio.
|
||||
private static final int AUDIO_TRACK_START = 0;
|
||||
@ -128,7 +119,7 @@ class WebRtcAudioTrack {
|
||||
byteBuffer.put(emptyBytes);
|
||||
byteBuffer.position(0);
|
||||
}
|
||||
int bytesWritten = writeBytes(audioTrack, byteBuffer, sizeInBytes);
|
||||
int bytesWritten = audioTrack.write(byteBuffer, sizeInBytes, AudioTrack.WRITE_BLOCKING);
|
||||
if (bytesWritten != sizeInBytes) {
|
||||
Logging.e(TAG, "AudioTrack.write played invalid number of bytes: " + bytesWritten);
|
||||
// If a write() returns a negative value, an error has occurred.
|
||||
@ -152,14 +143,6 @@ class WebRtcAudioTrack {
|
||||
}
|
||||
}
|
||||
|
||||
private int writeBytes(AudioTrack audioTrack, ByteBuffer byteBuffer, int sizeInBytes) {
|
||||
if (Build.VERSION.SDK_INT >= 21) {
|
||||
return audioTrack.write(byteBuffer, sizeInBytes, AudioTrack.WRITE_BLOCKING);
|
||||
} else {
|
||||
return audioTrack.write(byteBuffer.array(), byteBuffer.arrayOffset(), sizeInBytes);
|
||||
}
|
||||
}
|
||||
|
||||
// Stops the inner thread loop which results in calling AudioTrack.stop().
|
||||
// Does not block the calling thread.
|
||||
public void stopThread() {
|
||||
@ -247,18 +230,14 @@ class WebRtcAudioTrack {
|
||||
// On API level 26 or higher, we can use a low latency mode.
|
||||
audioTrack = createAudioTrackOnOreoOrHigher(
|
||||
sampleRate, channelConfig, minBufferSizeInBytes, audioAttributes);
|
||||
} else if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
|
||||
// If we are on API level 21 or higher, it is possible to use a special AudioTrack
|
||||
} else {
|
||||
// As we are on API level 21 or higher, it is possible to use a special AudioTrack
|
||||
// constructor that uses AudioAttributes and AudioFormat as input. It allows us to
|
||||
// supersede the notion of stream types for defining the behavior of audio playback,
|
||||
// and to allow certain platforms or routing policies to use this information for more
|
||||
// refined volume or routing decisions.
|
||||
audioTrack = createAudioTrackOnLollipopOrHigher(
|
||||
audioTrack = createAudioTrackBeforeOreo(
|
||||
sampleRate, channelConfig, minBufferSizeInBytes, audioAttributes);
|
||||
} else {
|
||||
// Use default constructor for API levels below 21.
|
||||
audioTrack =
|
||||
createAudioTrackOnLowerThanLollipop(sampleRate, channelConfig, minBufferSizeInBytes);
|
||||
}
|
||||
} catch (IllegalArgumentException e) {
|
||||
reportWebRtcAudioTrackInitError(e.getMessage());
|
||||
@ -360,7 +339,7 @@ class WebRtcAudioTrack {
|
||||
private boolean setStreamVolume(int volume) {
|
||||
threadChecker.checkIsOnValidThread();
|
||||
Logging.d(TAG, "setStreamVolume(" + volume + ")");
|
||||
if (isVolumeFixed()) {
|
||||
if (audioManager.isVolumeFixed()) {
|
||||
Logging.e(TAG, "The device implements a fixed volume policy.");
|
||||
return false;
|
||||
}
|
||||
@ -368,12 +347,6 @@ class WebRtcAudioTrack {
|
||||
return true;
|
||||
}
|
||||
|
||||
private boolean isVolumeFixed() {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP)
|
||||
return false;
|
||||
return audioManager.isVolumeFixed();
|
||||
}
|
||||
|
||||
/** Get current volume level for a phone call audio stream. */
|
||||
@CalledByNative
|
||||
private int getStreamVolume() {
|
||||
@ -441,10 +414,9 @@ class WebRtcAudioTrack {
|
||||
// Creates and AudioTrack instance using AudioAttributes and AudioFormat as input.
|
||||
// It allows certain platforms or routing policies to use this information for more
|
||||
// refined volume or routing decisions.
|
||||
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
|
||||
private static AudioTrack createAudioTrackOnLollipopOrHigher(int sampleRateInHz,
|
||||
int channelConfig, int bufferSizeInBytes, @Nullable AudioAttributes overrideAttributes) {
|
||||
Logging.d(TAG, "createAudioTrackOnLollipopOrHigher");
|
||||
private static AudioTrack createAudioTrackBeforeOreo(int sampleRateInHz, int channelConfig,
|
||||
int bufferSizeInBytes, @Nullable AudioAttributes overrideAttributes) {
|
||||
Logging.d(TAG, "createAudioTrackBeforeOreo");
|
||||
logNativeOutputSampleRate(sampleRateInHz);
|
||||
|
||||
// Create an audio track where the audio usage is for VoIP and the content type is speech.
|
||||
@ -489,13 +461,6 @@ class WebRtcAudioTrack {
|
||||
return builder.setAllowedCapturePolicy(overrideAttributes.getAllowedCapturePolicy());
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation") // Deprecated in API level 25.
|
||||
private static AudioTrack createAudioTrackOnLowerThanLollipop(
|
||||
int sampleRateInHz, int channelConfig, int bufferSizeInBytes) {
|
||||
return new AudioTrack(AudioManager.STREAM_VOICE_CALL, sampleRateInHz, channelConfig,
|
||||
AudioFormat.ENCODING_PCM_16BIT, bufferSizeInBytes, AudioTrack.MODE_STREAM);
|
||||
}
|
||||
|
||||
private void logBufferSizeInFrames() {
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
|
||||
Logging.d(TAG,
|
||||
|
||||
@ -201,13 +201,6 @@ final class WebRtcAudioUtils {
|
||||
+ "BT SCO: " + audioManager.isBluetoothScoOn());
|
||||
}
|
||||
|
||||
private static boolean isVolumeFixed(AudioManager audioManager) {
|
||||
if (Build.VERSION.SDK_INT < 21) {
|
||||
return false;
|
||||
}
|
||||
return audioManager.isVolumeFixed();
|
||||
}
|
||||
|
||||
// Adds volume information for all possible stream types.
|
||||
private static void logAudioStateVolume(String tag, AudioManager audioManager) {
|
||||
final int[] streams = {AudioManager.STREAM_VOICE_CALL, AudioManager.STREAM_MUSIC,
|
||||
@ -215,7 +208,7 @@ final class WebRtcAudioUtils {
|
||||
AudioManager.STREAM_SYSTEM};
|
||||
Logging.d(tag, "Audio State: ");
|
||||
// Some devices may not have volume controls and might use a fixed volume.
|
||||
boolean fixedVolume = isVolumeFixed(audioManager);
|
||||
boolean fixedVolume = audioManager.isVolumeFixed();
|
||||
Logging.d(tag, " fixed volume=" + fixedVolume);
|
||||
if (!fixedVolume) {
|
||||
for (int stream : streams) {
|
||||
|
||||
Reference in New Issue
Block a user