Remove checks for SDK <= 21

WebRTC’s minSdk is 21, so all those checks are dead code.

Change-Id: I26497fd92259b66d9e5ac6afbb393adf4d904c77
Bug: webrtc:13780
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/253124
Reviewed-by: Henrik Andreassson <henrika@webrtc.org>
Reviewed-by: Linus Nilsson <lnilsson@webrtc.org>
Commit-Queue: Xavier Lepaul‎ <xalep@webrtc.org>
Cr-Commit-Position: refs/heads/main@{#36140}
This commit is contained in:
Xavier Lepaul
2022-03-07 10:23:23 +01:00
committed by WebRTC LUCI CQ
parent 7befe8e5e4
commit 0f50cc2849
27 changed files with 55 additions and 295 deletions

View File

@ -10,7 +10,6 @@
package org.webrtc;
import android.annotation.TargetApi;
import android.content.Context;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
@ -29,7 +28,6 @@ import java.util.List;
import java.util.concurrent.TimeUnit;
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
@TargetApi(21)
class Camera2Session implements CameraSession {
private static final String TAG = "Camera2Session";

View File

@ -10,7 +10,6 @@
package org.webrtc;
import android.annotation.TargetApi;
import android.graphics.SurfaceTexture;
import android.opengl.EGL14;
import android.opengl.EGLConfig;
@ -29,25 +28,13 @@ import org.webrtc.EglBase;
* and an EGLSurface.
*/
@SuppressWarnings("ReferenceEquality") // We want to compare to EGL14 constants.
@TargetApi(18)
class EglBase14Impl implements EglBase14 {
private static final String TAG = "EglBase14Impl";
private static final int EGLExt_SDK_VERSION = Build.VERSION_CODES.JELLY_BEAN_MR2;
private static final int CURRENT_SDK_VERSION = Build.VERSION.SDK_INT;
private EGLContext eglContext;
@Nullable private EGLConfig eglConfig;
private EGLDisplay eglDisplay;
private EGLSurface eglSurface = EGL14.EGL_NO_SURFACE;
// EGL 1.4 is supported from API 17. But EGLExt that is used for setting presentation
// time stamp on a surface is supported from 18 so we require 18.
public static boolean isEGL14Supported() {
Logging.d(TAG,
"SDK version: " + CURRENT_SDK_VERSION
+ ". isEGL14Supported: " + (CURRENT_SDK_VERSION >= EGLExt_SDK_VERSION));
return (CURRENT_SDK_VERSION >= EGLExt_SDK_VERSION);
}
public static class Context implements EglBase14.Context {
private final EGLContext egl14Context;
@ -57,11 +44,8 @@ class EglBase14Impl implements EglBase14 {
}
@Override
@SuppressWarnings("deprecation")
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
public long getNativeEglContext() {
return CURRENT_SDK_VERSION >= Build.VERSION_CODES.LOLLIPOP ? egl14Context.getNativeHandle()
: egl14Context.getHandle();
return egl14Context.getNativeHandle();
}
public Context(android.opengl.EGLContext eglContext) {

View File

@ -10,7 +10,6 @@
package org.webrtc;
import android.annotation.TargetApi;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
@ -29,11 +28,7 @@ import org.webrtc.ThreadUtils.ThreadChecker;
/**
* Android hardware video encoder.
*
* @note This class is only supported on Android Kitkat and above.
*/
@TargetApi(19)
@SuppressWarnings("deprecation") // Cannot support API level 19 without using deprecated methods.
class HardwareVideoEncoder implements VideoEncoder {
private static final String TAG = "HardwareVideoEncoder";

View File

@ -56,15 +56,8 @@ class MediaCodecUtils {
MediaCodecUtils.COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m};
// Color formats supported by texture mode encoding - in order of preference.
static final int[] TEXTURE_COLOR_FORMATS = getTextureColorFormats();
private static int[] getTextureColorFormats() {
if (Build.VERSION.SDK_INT >= 18) {
return new int[] {MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface};
} else {
return new int[] {};
}
}
static final int[] TEXTURE_COLOR_FORMATS =
new int[] {MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface};
static @Nullable Integer selectColorFormat(
int[] supportedColorFormats, CodecCapabilities capabilities) {

View File

@ -83,11 +83,6 @@ class MediaCodecVideoDecoderFactory implements VideoDecoderFactory {
}
private @Nullable MediaCodecInfo findCodecForType(VideoCodecMimeType type) {
// HW decoding is not supported on builds before KITKAT.
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
return null;
}
for (int i = 0; i < MediaCodecList.getCodecCount(); ++i) {
MediaCodecInfo info = null;
try {
@ -131,8 +126,8 @@ class MediaCodecVideoDecoderFactory implements VideoDecoderFactory {
private boolean isH264HighProfileSupported(MediaCodecInfo info) {
String name = info.getName();
// Support H.264 HP decoding on QCOM chips for Android L and above.
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP && name.startsWith(QCOM_PREFIX)) {
// Support H.264 HP decoding on QCOM chips.
if (name.startsWith(QCOM_PREFIX)) {
return true;
}
// Support H.264 HP decoding on Exynos chips for Android M and above.

View File

@ -10,7 +10,6 @@
package org.webrtc;
import android.annotation.TargetApi;
import android.media.MediaCodec;
import android.media.MediaCodec.BufferInfo;
import android.media.MediaCrypto;
@ -99,13 +98,11 @@ class MediaCodecWrapperFactoryImpl implements MediaCodecWrapperFactory {
}
@Override
@TargetApi(18)
public Surface createInputSurface() {
return mediaCodec.createInputSurface();
}
@Override
@TargetApi(19)
public void setParameters(Bundle params) {
mediaCodec.setParameters(params);
}

View File

@ -54,15 +54,11 @@ class WebRtcAudioEffects {
// Returns true if all conditions for supporting HW Acoustic Echo Cancellation (AEC) are
// fulfilled.
public static boolean isAcousticEchoCancelerSupported() {
if (Build.VERSION.SDK_INT < 18)
return false;
return isEffectTypeAvailable(AudioEffect.EFFECT_TYPE_AEC, AOSP_ACOUSTIC_ECHO_CANCELER);
}
// Returns true if all conditions for supporting HW Noise Suppression (NS) are fulfilled.
public static boolean isNoiseSuppressorSupported() {
if (Build.VERSION.SDK_INT < 18)
return false;
return isEffectTypeAvailable(AudioEffect.EFFECT_TYPE_NS, AOSP_NOISE_SUPPRESSOR);
}
@ -188,9 +184,6 @@ class WebRtcAudioEffects {
// As an example: Samsung Galaxy S6 includes an AGC in the descriptor but
// AutomaticGainControl.isAvailable() returns false.
private boolean effectTypeIsVoIP(UUID type) {
if (Build.VERSION.SDK_INT < 18)
return false;
return (AudioEffect.EFFECT_TYPE_AEC.equals(type) && isAcousticEchoCancelerSupported())
|| (AudioEffect.EFFECT_TYPE_NS.equals(type) && isNoiseSuppressorSupported());
}

View File

@ -64,7 +64,7 @@ class WebRtcAudioManager {
// as well. The NDK doc states that: "As of API level 21, lower latency
// audio input is supported on select devices. To take advantage of this
// feature, first confirm that lower latency output is available".
return Build.VERSION.SDK_INT >= 21 && isLowLatencyOutputSupported(context);
return isLowLatencyOutputSupported(context);
}
/**
@ -85,18 +85,12 @@ class WebRtcAudioManager {
}
private static int getSampleRateForApiLevel(AudioManager audioManager) {
if (Build.VERSION.SDK_INT < 17) {
return DEFAULT_SAMPLE_RATE_HZ;
}
String sampleRateString = audioManager.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE);
return (sampleRateString == null) ? DEFAULT_SAMPLE_RATE_HZ : Integer.parseInt(sampleRateString);
}
// Returns the native output buffer size for low-latency output streams.
private static int getLowLatencyFramesPerBuffer(AudioManager audioManager) {
if (Build.VERSION.SDK_INT < 17) {
return DEFAULT_FRAME_PER_BUFFER;
}
String framesPerBuffer =
audioManager.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER);
return framesPerBuffer == null ? DEFAULT_FRAME_PER_BUFFER : Integer.parseInt(framesPerBuffer);

View File

@ -47,16 +47,7 @@ class WebRtcAudioTrack {
// By default, WebRTC creates audio tracks with a usage attribute
// corresponding to voice communications, such as telephony or VoIP.
private static final int DEFAULT_USAGE = getDefaultUsageAttribute();
private static int getDefaultUsageAttribute() {
if (Build.VERSION.SDK_INT >= 21) {
return AudioAttributes.USAGE_VOICE_COMMUNICATION;
} else {
// Not used on SDKs lower than L.
return 0;
}
}
private static final int DEFAULT_USAGE = AudioAttributes.USAGE_VOICE_COMMUNICATION;
// Indicates the AudioTrack has started playing audio.
private static final int AUDIO_TRACK_START = 0;
@ -128,7 +119,7 @@ class WebRtcAudioTrack {
byteBuffer.put(emptyBytes);
byteBuffer.position(0);
}
int bytesWritten = writeBytes(audioTrack, byteBuffer, sizeInBytes);
int bytesWritten = audioTrack.write(byteBuffer, sizeInBytes, AudioTrack.WRITE_BLOCKING);
if (bytesWritten != sizeInBytes) {
Logging.e(TAG, "AudioTrack.write played invalid number of bytes: " + bytesWritten);
// If a write() returns a negative value, an error has occurred.
@ -152,14 +143,6 @@ class WebRtcAudioTrack {
}
}
private int writeBytes(AudioTrack audioTrack, ByteBuffer byteBuffer, int sizeInBytes) {
if (Build.VERSION.SDK_INT >= 21) {
return audioTrack.write(byteBuffer, sizeInBytes, AudioTrack.WRITE_BLOCKING);
} else {
return audioTrack.write(byteBuffer.array(), byteBuffer.arrayOffset(), sizeInBytes);
}
}
// Stops the inner thread loop which results in calling AudioTrack.stop().
// Does not block the calling thread.
public void stopThread() {
@ -247,18 +230,14 @@ class WebRtcAudioTrack {
// On API level 26 or higher, we can use a low latency mode.
audioTrack = createAudioTrackOnOreoOrHigher(
sampleRate, channelConfig, minBufferSizeInBytes, audioAttributes);
} else if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
// If we are on API level 21 or higher, it is possible to use a special AudioTrack
} else {
// As we are on API level 21 or higher, it is possible to use a special AudioTrack
// constructor that uses AudioAttributes and AudioFormat as input. It allows us to
// supersede the notion of stream types for defining the behavior of audio playback,
// and to allow certain platforms or routing policies to use this information for more
// refined volume or routing decisions.
audioTrack = createAudioTrackOnLollipopOrHigher(
audioTrack = createAudioTrackBeforeOreo(
sampleRate, channelConfig, minBufferSizeInBytes, audioAttributes);
} else {
// Use default constructor for API levels below 21.
audioTrack =
createAudioTrackOnLowerThanLollipop(sampleRate, channelConfig, minBufferSizeInBytes);
}
} catch (IllegalArgumentException e) {
reportWebRtcAudioTrackInitError(e.getMessage());
@ -360,7 +339,7 @@ class WebRtcAudioTrack {
private boolean setStreamVolume(int volume) {
threadChecker.checkIsOnValidThread();
Logging.d(TAG, "setStreamVolume(" + volume + ")");
if (isVolumeFixed()) {
if (audioManager.isVolumeFixed()) {
Logging.e(TAG, "The device implements a fixed volume policy.");
return false;
}
@ -368,12 +347,6 @@ class WebRtcAudioTrack {
return true;
}
private boolean isVolumeFixed() {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP)
return false;
return audioManager.isVolumeFixed();
}
/** Get current volume level for a phone call audio stream. */
@CalledByNative
private int getStreamVolume() {
@ -441,10 +414,9 @@ class WebRtcAudioTrack {
// Creates and AudioTrack instance using AudioAttributes and AudioFormat as input.
// It allows certain platforms or routing policies to use this information for more
// refined volume or routing decisions.
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
private static AudioTrack createAudioTrackOnLollipopOrHigher(int sampleRateInHz,
int channelConfig, int bufferSizeInBytes, @Nullable AudioAttributes overrideAttributes) {
Logging.d(TAG, "createAudioTrackOnLollipopOrHigher");
private static AudioTrack createAudioTrackBeforeOreo(int sampleRateInHz, int channelConfig,
int bufferSizeInBytes, @Nullable AudioAttributes overrideAttributes) {
Logging.d(TAG, "createAudioTrackBeforeOreo");
logNativeOutputSampleRate(sampleRateInHz);
// Create an audio track where the audio usage is for VoIP and the content type is speech.
@ -489,13 +461,6 @@ class WebRtcAudioTrack {
return builder.setAllowedCapturePolicy(overrideAttributes.getAllowedCapturePolicy());
}
@SuppressWarnings("deprecation") // Deprecated in API level 25.
private static AudioTrack createAudioTrackOnLowerThanLollipop(
int sampleRateInHz, int channelConfig, int bufferSizeInBytes) {
return new AudioTrack(AudioManager.STREAM_VOICE_CALL, sampleRateInHz, channelConfig,
AudioFormat.ENCODING_PCM_16BIT, bufferSizeInBytes, AudioTrack.MODE_STREAM);
}
private void logBufferSizeInFrames() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
Logging.d(TAG,

View File

@ -201,13 +201,6 @@ final class WebRtcAudioUtils {
+ "BT SCO: " + audioManager.isBluetoothScoOn());
}
private static boolean isVolumeFixed(AudioManager audioManager) {
if (Build.VERSION.SDK_INT < 21) {
return false;
}
return audioManager.isVolumeFixed();
}
// Adds volume information for all possible stream types.
private static void logAudioStateVolume(String tag, AudioManager audioManager) {
final int[] streams = {AudioManager.STREAM_VOICE_CALL, AudioManager.STREAM_MUSIC,
@ -215,7 +208,7 @@ final class WebRtcAudioUtils {
AudioManager.STREAM_SYSTEM};
Logging.d(tag, "Audio State: ");
// Some devices may not have volume controls and might use a fixed volume.
boolean fixedVolume = isVolumeFixed(audioManager);
boolean fixedVolume = audioManager.isVolumeFixed();
Logging.d(tag, " fixed volume=" + fixedVolume);
if (!fixedVolume) {
for (int stream : streams) {