Remove NewApi lint suppression.
BUG=webrtc:6597 Review-Url: https://codereview.webrtc.org/2662273004 Cr-Commit-Position: refs/heads/master@{#16448}
This commit is contained in:
@ -5,7 +5,6 @@
|
||||
should lint cleanly for the issues below. -->
|
||||
<!-- TODO(phoglund): make work with suppress.py or remove printout referring
|
||||
to suppress.py. -->
|
||||
<issue id="NewApi"></issue>
|
||||
|
||||
<issue id="Assert" severity="ignore"/>
|
||||
<issue id="UseSparseArrays" severity="ignore"/>
|
||||
@ -21,7 +20,6 @@
|
||||
<issue id="RelativeOverlap" severity="ignore"/>
|
||||
<issue id="RtlCompat" severity="ignore"/>
|
||||
<issue id="IconMissingDensityFolder" severity="ignore"/>
|
||||
<issue id="NewApi" severity="ignore"/>
|
||||
<issue id="OldTargetApi" severity="ignore"/>
|
||||
|
||||
<issue id="GoogleAppIndexingWarning" severity="ignore"/>
|
||||
|
||||
@ -286,10 +286,7 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
|
||||
screencaptureEnabled = intent.getBooleanExtra(EXTRA_SCREENCAPTURE, false);
|
||||
// If capturing format is not specified for screencapture, use screen resolution.
|
||||
if (screencaptureEnabled && videoWidth == 0 && videoHeight == 0) {
|
||||
DisplayMetrics displayMetrics = new DisplayMetrics();
|
||||
WindowManager windowManager =
|
||||
(WindowManager) getApplication().getSystemService(Context.WINDOW_SERVICE);
|
||||
windowManager.getDefaultDisplay().getRealMetrics(displayMetrics);
|
||||
DisplayMetrics displayMetrics = getDisplayMetrics();
|
||||
videoWidth = displayMetrics.widthPixels;
|
||||
videoHeight = displayMetrics.heightPixels;
|
||||
}
|
||||
@ -369,6 +366,15 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
|
||||
}
|
||||
}
|
||||
|
||||
@TargetApi(17)
|
||||
private DisplayMetrics getDisplayMetrics() {
|
||||
DisplayMetrics displayMetrics = new DisplayMetrics();
|
||||
WindowManager windowManager =
|
||||
(WindowManager) getApplication().getSystemService(Context.WINDOW_SERVICE);
|
||||
windowManager.getDefaultDisplay().getRealMetrics(displayMetrics);
|
||||
return displayMetrics;
|
||||
}
|
||||
|
||||
@TargetApi(19)
|
||||
private static int getSystemUiVisibility() {
|
||||
int flags = View.SYSTEM_UI_FLAG_HIDE_NAVIGATION | View.SYSTEM_UI_FLAG_FULLSCREEN;
|
||||
@ -436,6 +442,21 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
|
||||
return null;
|
||||
}
|
||||
|
||||
@TargetApi(21)
|
||||
private VideoCapturer createScreenCapturer() {
|
||||
if (mediaProjectionPermissionResultCode != Activity.RESULT_OK) {
|
||||
reportError("User didn't give permission to capture the screen.");
|
||||
return null;
|
||||
}
|
||||
return new ScreenCapturerAndroid(
|
||||
mediaProjectionPermissionResultData, new MediaProjection.Callback() {
|
||||
@Override
|
||||
public void onStop() {
|
||||
reportError("User revoked permission to capture the screen.");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Activity interfaces
|
||||
@Override
|
||||
public void onStop() {
|
||||
@ -687,17 +708,7 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
|
||||
return null;
|
||||
}
|
||||
} else if (screencaptureEnabled) {
|
||||
if (mediaProjectionPermissionResultCode != Activity.RESULT_OK) {
|
||||
reportError("User didn't give permission to capture the screen.");
|
||||
return null;
|
||||
}
|
||||
return new ScreenCapturerAndroid(
|
||||
mediaProjectionPermissionResultData, new MediaProjection.Callback() {
|
||||
@Override
|
||||
public void onStop() {
|
||||
reportError("User revoked permission to capture the screen.");
|
||||
}
|
||||
});
|
||||
return createScreenCapturer();
|
||||
} else if (useCamera2()) {
|
||||
if (!captureToTexture()) {
|
||||
reportError(getString(R.string.camera2_texture_only_error));
|
||||
|
||||
@ -10,18 +10,17 @@
|
||||
|
||||
package org.webrtc.voiceengine;
|
||||
|
||||
import org.webrtc.Logging;
|
||||
import org.webrtc.ThreadUtils;
|
||||
|
||||
import android.annotation.TargetApi;
|
||||
import android.content.Context;
|
||||
import android.media.AudioFormat;
|
||||
import android.media.AudioRecord;
|
||||
import android.media.MediaRecorder.AudioSource;
|
||||
import android.os.Process;
|
||||
|
||||
import java.lang.System;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import org.webrtc.Logging;
|
||||
import org.webrtc.ThreadUtils;
|
||||
|
||||
public class WebRtcAudioRecord {
|
||||
private static final boolean DEBUG = false;
|
||||
@ -263,6 +262,7 @@ public class WebRtcAudioRecord {
|
||||
+ "sample rate: " + audioRecord.getSampleRate());
|
||||
}
|
||||
|
||||
@TargetApi(23)
|
||||
private void logMainParametersExtended() {
|
||||
if (WebRtcAudioUtils.runningOnMarshmallowOrHigher()) {
|
||||
Logging.d(TAG, "AudioRecord: "
|
||||
|
||||
@ -284,6 +284,7 @@ public class WebRtcAudioTrack {
|
||||
+ "max gain: " + audioTrack.getMaxVolume());
|
||||
}
|
||||
|
||||
@TargetApi(24)
|
||||
private void logMainParametersExtended() {
|
||||
if (WebRtcAudioUtils.runningOnMarshmallowOrHigher()) {
|
||||
Logging.d(TAG, "AudioTrack: "
|
||||
@ -303,6 +304,7 @@ public class WebRtcAudioTrack {
|
||||
// potential audio glitch.
|
||||
// TODO(henrika): keep track of this value in the field and possibly add new
|
||||
// UMA stat if needed.
|
||||
@TargetApi(24)
|
||||
private void logUnderrunCount() {
|
||||
if (WebRtcAudioUtils.runningOnNougatOrHigher()) {
|
||||
Logging.d(TAG, "underrun count: " + audioTrack.getUnderrunCount());
|
||||
|
||||
@ -19,7 +19,7 @@ import android.support.test.filters.MediumTest;
|
||||
import android.support.test.filters.SmallTest;
|
||||
import java.io.IOException;
|
||||
import java.lang.Thread;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import org.chromium.base.test.BaseJUnit4ClassRunner;
|
||||
@ -106,7 +106,7 @@ public class FileVideoCapturerTest {
|
||||
assertEquals(FRAME_WIDTH * FRAME_HEIGHT * 3 / 2, frame.data.length);
|
||||
|
||||
byte[] expectedNV12Bytes = new byte[frame.data.length];
|
||||
FileVideoCapturer.nativeI420ToNV21(expectedFrames[i].getBytes(StandardCharsets.US_ASCII),
|
||||
FileVideoCapturer.nativeI420ToNV21(expectedFrames[i].getBytes(Charset.forName("US-ASCII")),
|
||||
FRAME_WIDTH, FRAME_HEIGHT, expectedNV12Bytes);
|
||||
|
||||
assertTrue(Arrays.equals(expectedNV12Bytes, frame.data));
|
||||
|
||||
@ -19,7 +19,7 @@ import java.io.IOException;
|
||||
import java.io.RandomAccessFile;
|
||||
import java.lang.Thread;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Random;
|
||||
import org.chromium.base.test.BaseJUnit4ClassRunner;
|
||||
@ -46,7 +46,7 @@ public class VideoFileRendererTest {
|
||||
int[] planeSizes = {
|
||||
frameWidth * frameWidth, frameWidth * frameHeight / 4, frameWidth * frameHeight / 4};
|
||||
|
||||
byte[] frameBytes = frameStr.getBytes(StandardCharsets.US_ASCII);
|
||||
byte[] frameBytes = frameStr.getBytes(Charset.forName("US-ASCII"));
|
||||
ByteBuffer[] yuvPlanes = new ByteBuffer[3];
|
||||
int pos = 0;
|
||||
for (int i = 0; i < 3; i++) {
|
||||
@ -69,7 +69,7 @@ public class VideoFileRendererTest {
|
||||
int length = (int) writtenFile.length();
|
||||
byte[] data = new byte[length];
|
||||
writtenFile.readFully(data);
|
||||
String fileContent = new String(data, StandardCharsets.US_ASCII);
|
||||
String fileContent = new String(data, Charset.forName("US-ASCII"));
|
||||
String expected = "YUV4MPEG2 C420 W4 H4 Ip F30:1 A1:1\n"
|
||||
+ "FRAME\n"
|
||||
+ "THIS IS JUST SOME TEXT xFRAME\n"
|
||||
|
||||
Reference in New Issue
Block a user