Android: Replace EGL14 with EGL10
The purpose with this change is to support older API levels by replacing EGL14 (API lvl 17) with EGL10 (API lvl 1). The main purpose is to lower API lvl requirement for SurfaceViewRenderer from API lvl 17 to API lvl 15. Also, camera texture capture will work on API lvl < 17 (and texture encode/decode in MediaCodec, but we don't use MediaCodec below API lvl 18?). GLSurfaceView/VideoRendererGui is already using EGL10. EGL 1.1 - 1.4 added new functionality, but won't affect performance. We don't need the functionality, so there should be no reason to not use EGL 1.0. I have profiled AppRTCDemo with Qualcomm Trepn Profiler on a Nexus 5 and Nexus 6 and couldn't see any difference. Specifically, this CL: * Update EglBase to use EGL10 instead of EGL14. * Update imports from EGL14 to EGL10 in a lot of files (plus changing import order in some cases). * Update VideoCapturerAndroid to always support texture capture. Review URL: https://codereview.webrtc.org/1396013004 Cr-Commit-Position: refs/heads/master@{#10378}
This commit is contained in:
@ -26,18 +26,18 @@
|
||||
*/
|
||||
package org.webrtc;
|
||||
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.opengl.GLES20;
|
||||
import android.opengl.Matrix;
|
||||
import android.test.ActivityTestCase;
|
||||
import android.test.suitebuilder.annotation.SmallTest;
|
||||
import android.test.suitebuilder.annotation.MediumTest;
|
||||
import android.test.suitebuilder.annotation.SmallTest;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.Random;
|
||||
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.opengl.EGL14;
|
||||
import android.opengl.EGLContext;
|
||||
import android.opengl.GLES20;
|
||||
import android.opengl.Matrix;
|
||||
import javax.microedition.khronos.egl.EGL10;
|
||||
import javax.microedition.khronos.egl.EGLContext;
|
||||
|
||||
public final class GlRectDrawerTest extends ActivityTestCase {
|
||||
// Resolution of the test image.
|
||||
@ -100,7 +100,7 @@ public final class GlRectDrawerTest extends ActivityTestCase {
|
||||
@SmallTest
|
||||
public void testRgbRendering() {
|
||||
// Create EGL base with a pixel buffer as display output.
|
||||
final EglBase eglBase = new EglBase(EGL14.EGL_NO_CONTEXT, EglBase.ConfigType.PIXEL_BUFFER);
|
||||
final EglBase eglBase = new EglBase(EGL10.EGL_NO_CONTEXT, EglBase.ConfigType.PIXEL_BUFFER);
|
||||
eglBase.createPbufferSurface(WIDTH, HEIGHT);
|
||||
eglBase.makeCurrent();
|
||||
|
||||
@ -137,7 +137,7 @@ public final class GlRectDrawerTest extends ActivityTestCase {
|
||||
@SmallTest
|
||||
public void testYuvRendering() {
|
||||
// Create EGL base with a pixel buffer as display output.
|
||||
EglBase eglBase = new EglBase(EGL14.EGL_NO_CONTEXT, EglBase.ConfigType.PIXEL_BUFFER);
|
||||
EglBase eglBase = new EglBase(EGL10.EGL_NO_CONTEXT, EglBase.ConfigType.PIXEL_BUFFER);
|
||||
eglBase.createPbufferSurface(WIDTH, HEIGHT);
|
||||
eglBase.makeCurrent();
|
||||
|
||||
@ -266,7 +266,7 @@ public final class GlRectDrawerTest extends ActivityTestCase {
|
||||
}
|
||||
|
||||
// Create EGL base with a pixel buffer as display output.
|
||||
final EglBase eglBase = new EglBase(EGL14.EGL_NO_CONTEXT, EglBase.ConfigType.PIXEL_BUFFER);
|
||||
final EglBase eglBase = new EglBase(EGL10.EGL_NO_CONTEXT, EglBase.ConfigType.PIXEL_BUFFER);
|
||||
eglBase.createPbufferSurface(WIDTH, HEIGHT);
|
||||
|
||||
// Create resources for generating OES textures.
|
||||
|
@ -26,18 +26,19 @@
|
||||
*/
|
||||
package org.webrtc;
|
||||
|
||||
import android.test.ActivityTestCase;
|
||||
import android.test.suitebuilder.annotation.MediumTest;
|
||||
import android.test.suitebuilder.annotation.SmallTest;
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.opengl.EGL14;
|
||||
import android.opengl.GLES20;
|
||||
import android.os.Handler;
|
||||
import android.os.HandlerThread;
|
||||
import android.os.SystemClock;
|
||||
import android.test.ActivityTestCase;
|
||||
import android.test.suitebuilder.annotation.MediumTest;
|
||||
import android.test.suitebuilder.annotation.SmallTest;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
import javax.microedition.khronos.egl.EGL10;
|
||||
|
||||
public final class SurfaceTextureHelperTest extends ActivityTestCase {
|
||||
/**
|
||||
* Mock texture listener with blocking wait functionality.
|
||||
@ -108,7 +109,7 @@ public final class SurfaceTextureHelperTest extends ActivityTestCase {
|
||||
final int width = 16;
|
||||
final int height = 16;
|
||||
// Create EGL base with a pixel buffer as display output.
|
||||
final EglBase eglBase = new EglBase(EGL14.EGL_NO_CONTEXT, EglBase.ConfigType.PIXEL_BUFFER);
|
||||
final EglBase eglBase = new EglBase(EGL10.EGL_NO_CONTEXT, EglBase.ConfigType.PIXEL_BUFFER);
|
||||
eglBase.createPbufferSurface(width, height);
|
||||
final GlRectDrawer drawer = new GlRectDrawer();
|
||||
|
||||
@ -175,7 +176,7 @@ public final class SurfaceTextureHelperTest extends ActivityTestCase {
|
||||
final int width = 16;
|
||||
final int height = 16;
|
||||
// Create EGL base with a pixel buffer as display output.
|
||||
final EglBase eglBase = new EglBase(EGL14.EGL_NO_CONTEXT, EglBase.ConfigType.PIXEL_BUFFER);
|
||||
final EglBase eglBase = new EglBase(EGL10.EGL_NO_CONTEXT, EglBase.ConfigType.PIXEL_BUFFER);
|
||||
eglBase.createPbufferSurface(width, height);
|
||||
|
||||
// Create SurfaceTextureHelper and listener.
|
||||
@ -239,11 +240,11 @@ public final class SurfaceTextureHelperTest extends ActivityTestCase {
|
||||
public static void testDisconnect() throws InterruptedException {
|
||||
// Create SurfaceTextureHelper and listener.
|
||||
final SurfaceTextureHelper surfaceTextureHelper =
|
||||
SurfaceTextureHelper.create(EGL14.EGL_NO_CONTEXT);
|
||||
SurfaceTextureHelper.create(EGL10.EGL_NO_CONTEXT);
|
||||
final MockTextureListener listener = new MockTextureListener();
|
||||
surfaceTextureHelper.setListener(listener);
|
||||
// Create EglBase with the SurfaceTexture as target EGLSurface.
|
||||
final EglBase eglBase = new EglBase(EGL14.EGL_NO_CONTEXT, EglBase.ConfigType.PLAIN);
|
||||
final EglBase eglBase = new EglBase(EGL10.EGL_NO_CONTEXT, EglBase.ConfigType.PLAIN);
|
||||
eglBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
|
||||
eglBase.makeCurrent();
|
||||
// Assert no frame has been received yet.
|
||||
@ -275,7 +276,7 @@ public final class SurfaceTextureHelperTest extends ActivityTestCase {
|
||||
@SmallTest
|
||||
public static void testDisconnectImmediately() {
|
||||
final SurfaceTextureHelper surfaceTextureHelper =
|
||||
SurfaceTextureHelper.create(EGL14.EGL_NO_CONTEXT);
|
||||
SurfaceTextureHelper.create(EGL10.EGL_NO_CONTEXT);
|
||||
surfaceTextureHelper.disconnect();
|
||||
}
|
||||
|
||||
@ -291,14 +292,14 @@ public final class SurfaceTextureHelperTest extends ActivityTestCase {
|
||||
|
||||
// Create SurfaceTextureHelper and listener.
|
||||
final SurfaceTextureHelper surfaceTextureHelper =
|
||||
SurfaceTextureHelper.create(EGL14.EGL_NO_CONTEXT, handler);
|
||||
SurfaceTextureHelper.create(EGL10.EGL_NO_CONTEXT, handler);
|
||||
// Create a mock listener and expect frames to be delivered on |thread|.
|
||||
final MockTextureListener listener = new MockTextureListener(thread);
|
||||
surfaceTextureHelper.setListener(listener);
|
||||
|
||||
// Create resources for stubbing an OES texture producer. |eglOesBase| has the
|
||||
// SurfaceTexture in |surfaceTextureHelper| as the target EGLSurface.
|
||||
final EglBase eglOesBase = new EglBase(EGL14.EGL_NO_CONTEXT, EglBase.ConfigType.PLAIN);
|
||||
final EglBase eglOesBase = new EglBase(EGL10.EGL_NO_CONTEXT, EglBase.ConfigType.PLAIN);
|
||||
eglOesBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
|
||||
eglOesBase.makeCurrent();
|
||||
// Draw a frame onto the SurfaceTexture.
|
||||
|
@ -28,7 +28,6 @@
|
||||
package org.webrtc;
|
||||
|
||||
import android.graphics.Point;
|
||||
import android.opengl.EGL14;
|
||||
import android.test.ActivityTestCase;
|
||||
import android.test.suitebuilder.annotation.MediumTest;
|
||||
import android.view.View.MeasureSpec;
|
||||
@ -37,6 +36,8 @@ import java.nio.ByteBuffer;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import javax.microedition.khronos.egl.EGL10;
|
||||
|
||||
public final class SurfaceViewRendererOnMeasureTest extends ActivityTestCase {
|
||||
/**
|
||||
* List with all possible scaling types.
|
||||
@ -110,7 +111,7 @@ public final class SurfaceViewRendererOnMeasureTest extends ActivityTestCase {
|
||||
}
|
||||
|
||||
// Test behaviour after SurfaceViewRenderer.init() is called, but still no frame.
|
||||
surfaceViewRenderer.init(EGL14.EGL_NO_CONTEXT, null);
|
||||
surfaceViewRenderer.init(EGL10.EGL_NO_CONTEXT, null);
|
||||
for (RendererCommon.ScalingType scalingType : scalingTypes) {
|
||||
for (int measureSpecMode : measureSpecModes) {
|
||||
final int zeroMeasureSize = MeasureSpec.makeMeasureSpec(0, measureSpecMode);
|
||||
@ -133,7 +134,7 @@ public final class SurfaceViewRendererOnMeasureTest extends ActivityTestCase {
|
||||
public void testFrame1280x720() {
|
||||
final SurfaceViewRenderer surfaceViewRenderer =
|
||||
new SurfaceViewRenderer(getInstrumentation().getContext());
|
||||
surfaceViewRenderer.init(EGL14.EGL_NO_CONTEXT, null);
|
||||
surfaceViewRenderer.init(EGL10.EGL_NO_CONTEXT, null);
|
||||
|
||||
// Test different rotation degress, but same rotated size.
|
||||
for (int rotationDegree : new int[] {0, 90, 180, 270}) {
|
||||
|
@ -26,8 +26,6 @@
|
||||
*/
|
||||
package org.webrtc;
|
||||
|
||||
import android.opengl.EGL14;
|
||||
import android.os.Build;
|
||||
import android.test.ActivityTestCase;
|
||||
import android.test.suitebuilder.annotation.MediumTest;
|
||||
import android.test.suitebuilder.annotation.SmallTest;
|
||||
@ -39,6 +37,8 @@ import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
import javax.microedition.khronos.egl.EGL10;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
public class VideoCapturerAndroidTest extends ActivityTestCase {
|
||||
static final String TAG = "VideoCapturerAndroidTest";
|
||||
@ -87,12 +87,8 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
|
||||
|
||||
@SmallTest
|
||||
public void testCreateAndReleaseUsingTextures() {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN_MR1) {
|
||||
Log.i(TAG, "Capturing to textures is not supported, requires EGL14.");
|
||||
return;
|
||||
}
|
||||
VideoCapturerAndroidTestFixtures.release(
|
||||
VideoCapturerAndroid.create("", null, EGL14.EGL_NO_CONTEXT));
|
||||
VideoCapturerAndroid.create("", null, EGL10.EGL_NO_CONTEXT));
|
||||
}
|
||||
|
||||
@SmallTest
|
||||
@ -115,12 +111,8 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
|
||||
// TODO(perkj): Enable once VideoCapture to texture support has landed in C++.
|
||||
@SmallTest
|
||||
public void DISABLED_testStartVideoCapturerUsingTextures() throws InterruptedException {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN_MR1) {
|
||||
Log.i(TAG, "Capturing to textures is not supported, requires EGL14.");
|
||||
return;
|
||||
}
|
||||
VideoCapturerAndroid capturer =
|
||||
VideoCapturerAndroid.create("", null, EGL14.EGL_NO_CONTEXT);
|
||||
VideoCapturerAndroid.create("", null, EGL10.EGL_NO_CONTEXT);
|
||||
VideoCapturerAndroidTestFixtures.startCapturerAndRender(capturer);
|
||||
}
|
||||
|
||||
@ -162,11 +154,7 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
|
||||
// TODO(perkj): Enable once VideoCapture to texture support has landed in C++.
|
||||
@SmallTest
|
||||
public void DISABLED_testSwitchVideoCapturerUsingTextures() throws InterruptedException {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN_MR1) {
|
||||
Log.i(TAG, "Capturing to textures is not supported, requires EGL14.");
|
||||
return;
|
||||
}
|
||||
VideoCapturerAndroid capturer = VideoCapturerAndroid.create("", null, EGL14.EGL_NO_CONTEXT);
|
||||
VideoCapturerAndroid capturer = VideoCapturerAndroid.create("", null, EGL10.EGL_NO_CONTEXT);
|
||||
VideoCapturerAndroidTestFixtures.switchCamera(capturer);
|
||||
}
|
||||
|
||||
@ -191,13 +179,9 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
|
||||
|
||||
@MediumTest
|
||||
public void testCameraCallsAfterStopUsingTextures() throws InterruptedException {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN_MR1) {
|
||||
Log.i(TAG, "Capturing to textures is not supported, requires EGL14.");
|
||||
return;
|
||||
}
|
||||
final String deviceName = CameraEnumerationAndroid.getDeviceName(0);
|
||||
final VideoCapturerAndroid capturer = VideoCapturerAndroid.create(deviceName, null,
|
||||
EGL14.EGL_NO_CONTEXT);
|
||||
EGL10.EGL_NO_CONTEXT);
|
||||
|
||||
VideoCapturerAndroidTestFixtures.cameraCallsAfterStop(capturer,
|
||||
getInstrumentation().getContext());
|
||||
@ -214,11 +198,7 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
|
||||
// TODO(perkj): Enable once VideoCapture to texture support has landed in C++.
|
||||
@SmallTest
|
||||
public void DISABLED_testStopRestartVideoSourceUsingTextures() throws InterruptedException {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN_MR1) {
|
||||
Log.i(TAG, "Capturing to textures is not supported, requires EGL14.");
|
||||
return;
|
||||
}
|
||||
VideoCapturerAndroid capturer = VideoCapturerAndroid.create("", null, EGL14.EGL_NO_CONTEXT);
|
||||
VideoCapturerAndroid capturer = VideoCapturerAndroid.create("", null, EGL10.EGL_NO_CONTEXT);
|
||||
VideoCapturerAndroidTestFixtures.stopRestartVideoSource(capturer);
|
||||
}
|
||||
|
||||
@ -235,13 +215,9 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
|
||||
|
||||
@SmallTest
|
||||
public void testStartStopWithDifferentResolutionsUsingTextures() throws InterruptedException {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN_MR1) {
|
||||
Log.i(TAG, "Capturing to textures is not supported, requires EGL14.");
|
||||
return;
|
||||
}
|
||||
String deviceName = CameraEnumerationAndroid.getDeviceName(0);
|
||||
VideoCapturerAndroid capturer =
|
||||
VideoCapturerAndroid.create(deviceName, null, EGL14.EGL_NO_CONTEXT);
|
||||
VideoCapturerAndroid.create(deviceName, null, EGL10.EGL_NO_CONTEXT);
|
||||
VideoCapturerAndroidTestFixtures.startStopWithDifferentResolutions(capturer,
|
||||
getInstrumentation().getContext());
|
||||
}
|
||||
@ -259,13 +235,9 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
|
||||
|
||||
@SmallTest
|
||||
public void testReturnBufferLateUsingTextures() throws InterruptedException {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN_MR1) {
|
||||
Log.i(TAG, "Capturing to textures is not supported, requires EGL14.");
|
||||
return;
|
||||
}
|
||||
String deviceName = CameraEnumerationAndroid.getDeviceName(0);
|
||||
VideoCapturerAndroid capturer =
|
||||
VideoCapturerAndroid.create(deviceName, null, EGL14.EGL_NO_CONTEXT);
|
||||
VideoCapturerAndroid.create(deviceName, null, EGL10.EGL_NO_CONTEXT);
|
||||
VideoCapturerAndroidTestFixtures.returnBufferLate(capturer,
|
||||
getInstrumentation().getContext());
|
||||
}
|
||||
@ -282,12 +254,8 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
|
||||
// TODO(perkj): Enable once VideoCapture to texture support has landed in C++.
|
||||
@MediumTest
|
||||
public void DISABLED_testReturnBufferLateEndToEndUsingTextures() throws InterruptedException {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN_MR1) {
|
||||
Log.i(TAG, "Capturing to textures is not supported, requires EGL14.");
|
||||
return;
|
||||
}
|
||||
final VideoCapturerAndroid capturer =
|
||||
VideoCapturerAndroid.create("", null, EGL14.EGL_NO_CONTEXT);
|
||||
VideoCapturerAndroid.create("", null, EGL10.EGL_NO_CONTEXT);
|
||||
VideoCapturerAndroidTestFixtures.returnBufferLateEndToEnd(capturer);
|
||||
}
|
||||
}
|
||||
|
@ -28,35 +28,36 @@
|
||||
package org.webrtc;
|
||||
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.opengl.EGL14;
|
||||
import android.opengl.EGLConfig;
|
||||
import android.opengl.EGLContext;
|
||||
import android.opengl.EGLDisplay;
|
||||
import android.opengl.EGLSurface;
|
||||
import android.view.Surface;
|
||||
|
||||
import org.webrtc.Logging;
|
||||
|
||||
import javax.microedition.khronos.egl.EGL10;
|
||||
import javax.microedition.khronos.egl.EGLConfig;
|
||||
import javax.microedition.khronos.egl.EGLContext;
|
||||
import javax.microedition.khronos.egl.EGLDisplay;
|
||||
import javax.microedition.khronos.egl.EGLSurface;
|
||||
|
||||
/**
|
||||
* Holds EGL state and utility methods for handling an EGLContext, an EGLDisplay, and an EGLSurface.
|
||||
*/
|
||||
public final class EglBase {
|
||||
private static final String TAG = "EglBase";
|
||||
private static final int EGL14_SDK_VERSION = android.os.Build.VERSION_CODES.JELLY_BEAN_MR1;
|
||||
private static final int CURRENT_SDK_VERSION = android.os.Build.VERSION.SDK_INT;
|
||||
// These constants are taken from EGL14.EGL_OPENGL_ES2_BIT and EGL14.EGL_CONTEXT_CLIENT_VERSION.
|
||||
// https://android.googlesource.com/platform/frameworks/base/+/master/opengl/java/android/opengl/EGL14.java
|
||||
// This is similar to how GlSurfaceView does:
|
||||
// http://grepcode.com/file/repository.grepcode.com/java/ext/com.google.android/android/5.1.1_r1/android/opengl/GLSurfaceView.java#760
|
||||
private static final int EGL_OPENGL_ES2_BIT = 4;
|
||||
private static final int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
|
||||
// Android-specific extension.
|
||||
private static final int EGL_RECORDABLE_ANDROID = 0x3142;
|
||||
|
||||
private final EGL10 egl;
|
||||
private EGLContext eglContext;
|
||||
private ConfigType configType;
|
||||
private EGLConfig eglConfig;
|
||||
private EGLDisplay eglDisplay;
|
||||
private EGLSurface eglSurface = EGL14.EGL_NO_SURFACE;
|
||||
|
||||
public static boolean isEGL14Supported() {
|
||||
Logging.d(TAG, "SDK version: " + CURRENT_SDK_VERSION);
|
||||
return (CURRENT_SDK_VERSION >= EGL14_SDK_VERSION);
|
||||
}
|
||||
private EGLSurface eglSurface = EGL10.EGL_NO_SURFACE;
|
||||
|
||||
// EGLConfig constructor type. Influences eglChooseConfig arguments.
|
||||
public static enum ConfigType {
|
||||
@ -73,11 +74,12 @@ public final class EglBase {
|
||||
// Create root context without any EGLSurface or parent EGLContext. This can be used for branching
|
||||
// new contexts that share data.
|
||||
public EglBase() {
|
||||
this(EGL14.EGL_NO_CONTEXT, ConfigType.PLAIN);
|
||||
this(EGL10.EGL_NO_CONTEXT, ConfigType.PLAIN);
|
||||
}
|
||||
|
||||
// Create a new context with the specified config type, sharing data with sharedContext.
|
||||
public EglBase(EGLContext sharedContext, ConfigType configType) {
|
||||
this.egl = (EGL10) EGLContext.getEGL();
|
||||
this.configType = configType;
|
||||
eglDisplay = getEglDisplay();
|
||||
eglConfig = getEglConfig(eglDisplay, configType);
|
||||
@ -103,12 +105,12 @@ public final class EglBase {
|
||||
if (configType == ConfigType.PIXEL_BUFFER) {
|
||||
Logging.w(TAG, "This EGL context is configured for PIXEL_BUFFER, but uses regular Surface");
|
||||
}
|
||||
if (eglSurface != EGL14.EGL_NO_SURFACE) {
|
||||
if (eglSurface != EGL10.EGL_NO_SURFACE) {
|
||||
throw new RuntimeException("Already has an EGLSurface");
|
||||
}
|
||||
int[] surfaceAttribs = {EGL14.EGL_NONE};
|
||||
eglSurface = EGL14.eglCreateWindowSurface(eglDisplay, eglConfig, surface, surfaceAttribs, 0);
|
||||
if (eglSurface == EGL14.EGL_NO_SURFACE) {
|
||||
int[] surfaceAttribs = {EGL10.EGL_NONE};
|
||||
eglSurface = egl.eglCreateWindowSurface(eglDisplay, eglConfig, surface, surfaceAttribs);
|
||||
if (eglSurface == EGL10.EGL_NO_SURFACE) {
|
||||
throw new RuntimeException("Failed to create window surface");
|
||||
}
|
||||
}
|
||||
@ -124,12 +126,12 @@ public final class EglBase {
|
||||
throw new RuntimeException(
|
||||
"This EGL context is not configured to use a pixel buffer: " + configType);
|
||||
}
|
||||
if (eglSurface != EGL14.EGL_NO_SURFACE) {
|
||||
if (eglSurface != EGL10.EGL_NO_SURFACE) {
|
||||
throw new RuntimeException("Already has an EGLSurface");
|
||||
}
|
||||
int[] surfaceAttribs = {EGL14.EGL_WIDTH, width, EGL14.EGL_HEIGHT, height, EGL14.EGL_NONE};
|
||||
eglSurface = EGL14.eglCreatePbufferSurface(eglDisplay, eglConfig, surfaceAttribs, 0);
|
||||
if (eglSurface == EGL14.EGL_NO_SURFACE) {
|
||||
int[] surfaceAttribs = {EGL10.EGL_WIDTH, width, EGL10.EGL_HEIGHT, height, EGL10.EGL_NONE};
|
||||
eglSurface = egl.eglCreatePbufferSurface(eglDisplay, eglConfig, surfaceAttribs);
|
||||
if (eglSurface == EGL10.EGL_NO_SURFACE) {
|
||||
throw new RuntimeException("Failed to create pixel buffer surface");
|
||||
}
|
||||
}
|
||||
@ -139,30 +141,30 @@ public final class EglBase {
|
||||
}
|
||||
|
||||
public boolean hasSurface() {
|
||||
return eglSurface != EGL14.EGL_NO_SURFACE;
|
||||
return eglSurface != EGL10.EGL_NO_SURFACE;
|
||||
}
|
||||
|
||||
public int surfaceWidth() {
|
||||
final int widthArray[] = new int[1];
|
||||
EGL14.eglQuerySurface(eglDisplay, eglSurface, EGL14.EGL_WIDTH, widthArray, 0);
|
||||
egl.eglQuerySurface(eglDisplay, eglSurface, EGL10.EGL_WIDTH, widthArray);
|
||||
return widthArray[0];
|
||||
}
|
||||
|
||||
public int surfaceHeight() {
|
||||
final int heightArray[] = new int[1];
|
||||
EGL14.eglQuerySurface(eglDisplay, eglSurface, EGL14.EGL_HEIGHT, heightArray, 0);
|
||||
egl.eglQuerySurface(eglDisplay, eglSurface, EGL10.EGL_HEIGHT, heightArray);
|
||||
return heightArray[0];
|
||||
}
|
||||
|
||||
public void releaseSurface() {
|
||||
if (eglSurface != EGL14.EGL_NO_SURFACE) {
|
||||
EGL14.eglDestroySurface(eglDisplay, eglSurface);
|
||||
eglSurface = EGL14.EGL_NO_SURFACE;
|
||||
if (eglSurface != EGL10.EGL_NO_SURFACE) {
|
||||
egl.eglDestroySurface(eglDisplay, eglSurface);
|
||||
eglSurface = EGL10.EGL_NO_SURFACE;
|
||||
}
|
||||
}
|
||||
|
||||
private void checkIsNotReleased() {
|
||||
if (eglDisplay == EGL14.EGL_NO_DISPLAY || eglContext == EGL14.EGL_NO_CONTEXT
|
||||
if (eglDisplay == EGL10.EGL_NO_DISPLAY || eglContext == EGL10.EGL_NO_CONTEXT
|
||||
|| eglConfig == null) {
|
||||
throw new RuntimeException("This object has been released");
|
||||
}
|
||||
@ -172,63 +174,62 @@ public final class EglBase {
|
||||
checkIsNotReleased();
|
||||
releaseSurface();
|
||||
detachCurrent();
|
||||
EGL14.eglDestroyContext(eglDisplay, eglContext);
|
||||
EGL14.eglReleaseThread();
|
||||
EGL14.eglTerminate(eglDisplay);
|
||||
eglContext = EGL14.EGL_NO_CONTEXT;
|
||||
eglDisplay = EGL14.EGL_NO_DISPLAY;
|
||||
egl.eglDestroyContext(eglDisplay, eglContext);
|
||||
egl.eglTerminate(eglDisplay);
|
||||
eglContext = EGL10.EGL_NO_CONTEXT;
|
||||
eglDisplay = EGL10.EGL_NO_DISPLAY;
|
||||
eglConfig = null;
|
||||
}
|
||||
|
||||
public void makeCurrent() {
|
||||
checkIsNotReleased();
|
||||
if (eglSurface == EGL14.EGL_NO_SURFACE) {
|
||||
if (eglSurface == EGL10.EGL_NO_SURFACE) {
|
||||
throw new RuntimeException("No EGLSurface - can't make current");
|
||||
}
|
||||
if (!EGL14.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
|
||||
if (!egl.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
|
||||
throw new RuntimeException("eglMakeCurrent failed");
|
||||
}
|
||||
}
|
||||
|
||||
// Detach the current EGL context, so that it can be made current on another thread.
|
||||
public void detachCurrent() {
|
||||
if (!EGL14.eglMakeCurrent(
|
||||
eglDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT)) {
|
||||
if (!egl.eglMakeCurrent(
|
||||
eglDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT)) {
|
||||
throw new RuntimeException("eglMakeCurrent failed");
|
||||
}
|
||||
}
|
||||
|
||||
public void swapBuffers() {
|
||||
checkIsNotReleased();
|
||||
if (eglSurface == EGL14.EGL_NO_SURFACE) {
|
||||
if (eglSurface == EGL10.EGL_NO_SURFACE) {
|
||||
throw new RuntimeException("No EGLSurface - can't swap buffers");
|
||||
}
|
||||
EGL14.eglSwapBuffers(eglDisplay, eglSurface);
|
||||
egl.eglSwapBuffers(eglDisplay, eglSurface);
|
||||
}
|
||||
|
||||
// Return an EGLDisplay, or die trying.
|
||||
private static EGLDisplay getEglDisplay() {
|
||||
EGLDisplay eglDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
|
||||
if (eglDisplay == EGL14.EGL_NO_DISPLAY) {
|
||||
throw new RuntimeException("Unable to get EGL14 display");
|
||||
private EGLDisplay getEglDisplay() {
|
||||
EGLDisplay eglDisplay = egl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
|
||||
if (eglDisplay == EGL10.EGL_NO_DISPLAY) {
|
||||
throw new RuntimeException("Unable to get EGL10 display");
|
||||
}
|
||||
int[] version = new int[2];
|
||||
if (!EGL14.eglInitialize(eglDisplay, version, 0, version, 1)) {
|
||||
throw new RuntimeException("Unable to initialize EGL14");
|
||||
if (!egl.eglInitialize(eglDisplay, version)) {
|
||||
throw new RuntimeException("Unable to initialize EGL10");
|
||||
}
|
||||
return eglDisplay;
|
||||
}
|
||||
|
||||
// Return an EGLConfig, or die trying.
|
||||
private static EGLConfig getEglConfig(EGLDisplay eglDisplay, ConfigType configType) {
|
||||
private EGLConfig getEglConfig(EGLDisplay eglDisplay, ConfigType configType) {
|
||||
// Always RGB888, GLES2.
|
||||
int[] configAttributes = {
|
||||
EGL14.EGL_RED_SIZE, 8,
|
||||
EGL14.EGL_GREEN_SIZE, 8,
|
||||
EGL14.EGL_BLUE_SIZE, 8,
|
||||
EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
|
||||
EGL14.EGL_NONE, 0, // Allocate dummy fields for specific options.
|
||||
EGL14.EGL_NONE
|
||||
EGL10.EGL_RED_SIZE, 8,
|
||||
EGL10.EGL_GREEN_SIZE, 8,
|
||||
EGL10.EGL_BLUE_SIZE, 8,
|
||||
EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
|
||||
EGL10.EGL_NONE, 0, // Allocate dummy fields for specific options.
|
||||
EGL10.EGL_NONE
|
||||
};
|
||||
|
||||
// Fill in dummy fields based on configType.
|
||||
@ -236,8 +237,8 @@ public final class EglBase {
|
||||
case PLAIN:
|
||||
break;
|
||||
case PIXEL_BUFFER:
|
||||
configAttributes[configAttributes.length - 3] = EGL14.EGL_SURFACE_TYPE;
|
||||
configAttributes[configAttributes.length - 2] = EGL14.EGL_PBUFFER_BIT;
|
||||
configAttributes[configAttributes.length - 3] = EGL10.EGL_SURFACE_TYPE;
|
||||
configAttributes[configAttributes.length - 2] = EGL10.EGL_PBUFFER_BIT;
|
||||
break;
|
||||
case RECORDABLE:
|
||||
configAttributes[configAttributes.length - 3] = EGL_RECORDABLE_ANDROID;
|
||||
@ -249,20 +250,20 @@ public final class EglBase {
|
||||
|
||||
EGLConfig[] configs = new EGLConfig[1];
|
||||
int[] numConfigs = new int[1];
|
||||
if (!EGL14.eglChooseConfig(
|
||||
eglDisplay, configAttributes, 0, configs, 0, configs.length, numConfigs, 0)) {
|
||||
if (!egl.eglChooseConfig(
|
||||
eglDisplay, configAttributes, configs, configs.length, numConfigs)) {
|
||||
throw new RuntimeException("Unable to find RGB888 " + configType + " EGL config");
|
||||
}
|
||||
return configs[0];
|
||||
}
|
||||
|
||||
// Return an EGLConfig, or die trying.
|
||||
private static EGLContext createEglContext(
|
||||
private EGLContext createEglContext(
|
||||
EGLContext sharedContext, EGLDisplay eglDisplay, EGLConfig eglConfig) {
|
||||
int[] contextAttributes = {EGL14.EGL_CONTEXT_CLIENT_VERSION, 2, EGL14.EGL_NONE};
|
||||
int[] contextAttributes = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE};
|
||||
EGLContext eglContext =
|
||||
EGL14.eglCreateContext(eglDisplay, eglConfig, sharedContext, contextAttributes, 0);
|
||||
if (eglContext == EGL14.EGL_NO_CONTEXT) {
|
||||
egl.eglCreateContext(eglDisplay, eglConfig, sharedContext, contextAttributes);
|
||||
if (eglContext == EGL10.EGL_NO_CONTEXT) {
|
||||
throw new RuntimeException("Failed to create EGL context");
|
||||
}
|
||||
return eglContext;
|
||||
|
@ -28,7 +28,6 @@
|
||||
package org.webrtc;
|
||||
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.opengl.EGLContext;
|
||||
import android.opengl.GLES11Ext;
|
||||
import android.opengl.GLES20;
|
||||
import android.os.Build;
|
||||
@ -40,6 +39,8 @@ import java.util.concurrent.Callable;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import javax.microedition.khronos.egl.EGLContext;
|
||||
|
||||
/**
|
||||
* Helper class to create and synchronize access to a SurfaceTexture. The caller will get notified
|
||||
* of new frames in onTextureFrameAvailable(), and should call returnTextureFrame() when done with
|
||||
|
@ -30,7 +30,6 @@ package org.webrtc;
|
||||
import android.content.Context;
|
||||
import android.graphics.Point;
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.opengl.EGLContext;
|
||||
import android.opengl.GLES20;
|
||||
import android.opengl.Matrix;
|
||||
import android.os.Handler;
|
||||
@ -41,6 +40,8 @@ import android.view.SurfaceView;
|
||||
|
||||
import org.webrtc.Logging;
|
||||
|
||||
import javax.microedition.khronos.egl.EGLContext;
|
||||
|
||||
/**
|
||||
* Implements org.webrtc.VideoRenderer.Callbacks by displaying the video stream on a SurfaceView.
|
||||
* renderFrame() is asynchronous to avoid blocking the calling thread.
|
||||
|
@ -31,9 +31,6 @@ import android.content.Context;
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.hardware.Camera;
|
||||
import android.hardware.Camera.PreviewCallback;
|
||||
import android.opengl.EGLContext;
|
||||
import android.opengl.GLES11Ext;
|
||||
import android.opengl.GLES20;
|
||||
import android.os.Handler;
|
||||
import android.os.HandlerThread;
|
||||
import android.os.SystemClock;
|
||||
@ -56,6 +53,9 @@ import java.util.Set;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import javax.microedition.khronos.egl.EGLContext;
|
||||
import javax.microedition.khronos.egl.EGL10;
|
||||
|
||||
// Android specific implementation of VideoCapturer.
|
||||
// An instance of this class can be created by an application using
|
||||
// VideoCapturerAndroid.create();
|
||||
@ -95,14 +95,6 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
|
||||
private final CameraEventsHandler eventsHandler;
|
||||
private boolean firstFrameReported;
|
||||
private final boolean isCapturingToTexture;
|
||||
// |cameraGlTexture| is used with setPreviewTexture if the capturer is capturing to
|
||||
// ByteBuffers.
|
||||
private int cameraGlTexture;
|
||||
// |cameraSurfaceTexture| is used with setPreviewTexture if the capturer is capturing to
|
||||
// ByteBuffers. Must be a member, see issue webrtc:5021.
|
||||
private SurfaceTexture cameraSurfaceTexture;
|
||||
//|surfaceHelper| is used if the capturer is capturing to a texture. Capturing to textures require
|
||||
// API level 17.
|
||||
private final SurfaceTextureHelper surfaceHelper;
|
||||
// The camera API can output one old frame after the camera has been switched or the resolution
|
||||
// has been changed. This flag is used for dropping the first frame after camera restart.
|
||||
@ -224,7 +216,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
|
||||
}
|
||||
|
||||
public static VideoCapturerAndroid create(String name,
|
||||
CameraEventsHandler eventsHandler, Object sharedEglContext) {
|
||||
CameraEventsHandler eventsHandler, EGLContext sharedEglContext) {
|
||||
final int cameraId = lookupDeviceName(name);
|
||||
if (cameraId == -1) {
|
||||
return null;
|
||||
@ -337,7 +329,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
|
||||
}
|
||||
|
||||
private VideoCapturerAndroid(int cameraId, CameraEventsHandler eventsHandler,
|
||||
Object sharedContext) {
|
||||
EGLContext sharedContext) {
|
||||
Logging.d(TAG, "VideoCapturerAndroid");
|
||||
this.id = cameraId;
|
||||
this.eventsHandler = eventsHandler;
|
||||
@ -345,13 +337,11 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
|
||||
cameraThread.start();
|
||||
cameraThreadHandler = new Handler(cameraThread.getLooper());
|
||||
videoBuffers = new FramePool(cameraThread);
|
||||
if (sharedContext != null) {
|
||||
surfaceHelper = SurfaceTextureHelper.create((EGLContext)sharedContext, cameraThreadHandler);
|
||||
isCapturingToTexture = (sharedContext != null);
|
||||
surfaceHelper = SurfaceTextureHelper.create(
|
||||
isCapturingToTexture ? sharedContext : EGL10.EGL_NO_CONTEXT, cameraThreadHandler);
|
||||
if (isCapturingToTexture) {
|
||||
surfaceHelper.setListener(this);
|
||||
isCapturingToTexture = true;
|
||||
} else {
|
||||
surfaceHelper = null;
|
||||
isCapturingToTexture = false;
|
||||
}
|
||||
}
|
||||
|
||||
@ -397,9 +387,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
|
||||
}
|
||||
}
|
||||
});
|
||||
if (isCapturingToTexture) {
|
||||
surfaceHelper.disconnect();
|
||||
}
|
||||
cameraThread.quit();
|
||||
ThreadUtils.joinUninterruptibly(cameraThread);
|
||||
cameraThread = null;
|
||||
@ -455,13 +443,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
|
||||
Camera.getCameraInfo(id, info);
|
||||
}
|
||||
try {
|
||||
if (isCapturingToTexture) {
|
||||
camera.setPreviewTexture(surfaceHelper.getSurfaceTexture());
|
||||
} else {
|
||||
cameraGlTexture = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
|
||||
cameraSurfaceTexture = new SurfaceTexture(cameraGlTexture);
|
||||
camera.setPreviewTexture(cameraSurfaceTexture);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
Logging.e(TAG, "setPreviewTexture failed", error);
|
||||
throw new RuntimeException(e);
|
||||
@ -604,14 +586,6 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
|
||||
if (eventsHandler != null) {
|
||||
eventsHandler.onCameraClosed();
|
||||
}
|
||||
|
||||
if (cameraGlTexture != 0) {
|
||||
GLES20.glDeleteTextures(1, new int[] {cameraGlTexture}, 0);
|
||||
cameraGlTexture = 0;
|
||||
}
|
||||
if (cameraSurfaceTexture != null) {
|
||||
cameraSurfaceTexture.release();
|
||||
}
|
||||
}
|
||||
|
||||
private void switchCameraOnCameraThread() {
|
||||
|
@ -31,14 +31,14 @@ import java.util.ArrayList;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
|
||||
import javax.microedition.khronos.egl.EGLConfig;
|
||||
import javax.microedition.khronos.egl.EGL10;
|
||||
import javax.microedition.khronos.egl.EGLContext;
|
||||
import javax.microedition.khronos.opengles.GL10;
|
||||
|
||||
import android.annotation.SuppressLint;
|
||||
import android.graphics.Point;
|
||||
import android.graphics.Rect;
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.opengl.EGL14;
|
||||
import android.opengl.EGLContext;
|
||||
import android.opengl.GLES20;
|
||||
import android.opengl.GLSurfaceView;
|
||||
|
||||
@ -71,11 +71,6 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
||||
private final ArrayList<YuvImageRenderer> yuvImageRenderers;
|
||||
// |drawer| is synchronized on |yuvImageRenderers|.
|
||||
private GlRectDrawer drawer;
|
||||
private static final int EGL14_SDK_VERSION =
|
||||
android.os.Build.VERSION_CODES.JELLY_BEAN_MR1;
|
||||
// Current SDK version.
|
||||
private static final int CURRENT_SDK_VERSION =
|
||||
android.os.Build.VERSION.SDK_INT;
|
||||
// Render and draw threads.
|
||||
private static Thread renderFrameThread;
|
||||
private static Thread drawThread;
|
||||
@ -612,12 +607,10 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
||||
public void onSurfaceCreated(GL10 unused, EGLConfig config) {
|
||||
Logging.d(TAG, "VideoRendererGui.onSurfaceCreated");
|
||||
// Store render EGL context.
|
||||
if (CURRENT_SDK_VERSION >= EGL14_SDK_VERSION) {
|
||||
synchronized (VideoRendererGui.class) {
|
||||
eglContext = EGL14.eglGetCurrentContext();
|
||||
eglContext = ((EGL10) EGLContext.getEGL()).eglGetCurrentContext();
|
||||
Logging.d(TAG, "VideoRendererGui EGL Context: " + eglContext);
|
||||
}
|
||||
}
|
||||
|
||||
synchronized (yuvImageRenderers) {
|
||||
// Create drawer for YUV/OES frames.
|
||||
|
@ -191,7 +191,7 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder(
|
||||
j_init_decode_method_ = GetMethodID(
|
||||
jni, *j_media_codec_video_decoder_class_, "initDecode",
|
||||
"(Lorg/webrtc/MediaCodecVideoDecoder$VideoCodecType;"
|
||||
"IILandroid/opengl/EGLContext;)Z");
|
||||
"IILjavax/microedition/khronos/egl/EGLContext;)Z");
|
||||
j_release_method_ =
|
||||
GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V");
|
||||
j_dequeue_input_buffer_method_ = GetMethodID(
|
||||
@ -815,7 +815,8 @@ void MediaCodecVideoDecoderFactory::SetEGLContext(
|
||||
ALOGE << "error calling NewGlobalRef for EGL Context.";
|
||||
render_egl_context_ = NULL;
|
||||
} else {
|
||||
jclass j_egl_context_class = FindClass(jni, "android/opengl/EGLContext");
|
||||
jclass j_egl_context_class =
|
||||
FindClass(jni, "javax/microedition/khronos/egl/EGLContext");
|
||||
if (!jni->IsInstanceOf(render_egl_context_, j_egl_context_class)) {
|
||||
ALOGE << "Wrong EGL Context.";
|
||||
jni->DeleteGlobalRef(render_egl_context_);
|
||||
|
@ -72,6 +72,7 @@ ClassReferenceHolder::ClassReferenceHolder(JNIEnv* jni) {
|
||||
LoadClass(jni, "org/webrtc/IceCandidate");
|
||||
#if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
|
||||
LoadClass(jni, "android/graphics/SurfaceTexture");
|
||||
LoadClass(jni, "javax/microedition/khronos/egl/EGLContext");
|
||||
LoadClass(jni, "org/webrtc/CameraEnumerator");
|
||||
LoadClass(jni, "org/webrtc/Camera2Enumerator");
|
||||
LoadClass(jni, "org/webrtc/CameraEnumerationAndroid");
|
||||
@ -87,15 +88,6 @@ ClassReferenceHolder::ClassReferenceHolder(JNIEnv* jni) {
|
||||
LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$DecodedByteBuffer");
|
||||
LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$VideoCodecType");
|
||||
LoadClass(jni, "org/webrtc/SurfaceTextureHelper");
|
||||
jclass j_egl_base_class = GetClass("org/webrtc/EglBase");
|
||||
jmethodID j_is_egl14_supported_method = jni->GetStaticMethodID(
|
||||
j_egl_base_class, "isEGL14Supported", "()Z");
|
||||
bool is_egl14_supported = jni->CallStaticBooleanMethod(
|
||||
j_egl_base_class, j_is_egl14_supported_method);
|
||||
CHECK_EXCEPTION(jni);
|
||||
if (is_egl14_supported) {
|
||||
LoadClass(jni, "android/opengl/EGLContext");
|
||||
}
|
||||
#endif
|
||||
LoadClass(jni, "org/webrtc/MediaSource$State");
|
||||
LoadClass(jni, "org/webrtc/MediaStream");
|
||||
|
@ -40,13 +40,15 @@ SurfaceTextureHelper::SurfaceTextureHelper(JNIEnv* jni,
|
||||
: j_surface_texture_helper_class_(
|
||||
jni,
|
||||
FindClass(jni, "org/webrtc/SurfaceTextureHelper")),
|
||||
j_surface_texture_helper_(jni, jni->CallStaticObjectMethod(
|
||||
*j_surface_texture_helper_class_,
|
||||
GetStaticMethodID(
|
||||
j_surface_texture_helper_(
|
||||
jni,
|
||||
jni->CallStaticObjectMethod(
|
||||
*j_surface_texture_helper_class_,
|
||||
GetStaticMethodID(jni,
|
||||
*j_surface_texture_helper_class_,
|
||||
"create",
|
||||
"(Landroid/opengl/EGLContext;)Lorg/webrtc/SurfaceTextureHelper;"),
|
||||
"(Ljavax/microedition/khronos/egl/EGLContext;)"
|
||||
"Lorg/webrtc/SurfaceTextureHelper;"),
|
||||
egl_shared_context)),
|
||||
j_return_texture_method_(GetMethodID(jni,
|
||||
*j_surface_texture_helper_class_,
|
||||
|
@ -33,7 +33,6 @@ import android.media.MediaCodecInfo;
|
||||
import android.media.MediaCodecInfo.CodecCapabilities;
|
||||
import android.media.MediaCodecList;
|
||||
import android.media.MediaFormat;
|
||||
import android.opengl.EGLContext;
|
||||
import android.opengl.GLES11Ext;
|
||||
import android.opengl.GLES20;
|
||||
import android.os.Build;
|
||||
@ -45,6 +44,8 @@ import java.nio.ByteBuffer;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import javax.microedition.khronos.egl.EGLContext;
|
||||
|
||||
// Java-side of peerconnection_jni.cc:MediaCodecVideoDecoder.
|
||||
// This class is an implementation detail of the Java PeerConnection API.
|
||||
// MediaCodec is thread-hostile so this class must be operated on a single
|
||||
|
@ -11,7 +11,6 @@
|
||||
package org.appspot.apprtc;
|
||||
|
||||
import android.content.Context;
|
||||
import android.opengl.EGLContext;
|
||||
import android.util.Log;
|
||||
|
||||
import org.appspot.apprtc.AppRTCClient.SignalingParameters;
|
||||
@ -43,6 +42,8 @@ import java.util.TimerTask;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import javax.microedition.khronos.egl.EGLContext;
|
||||
|
||||
/**
|
||||
* Peer connection client implementation.
|
||||
*
|
||||
|
Reference in New Issue
Block a user