Android rendering: Move common EGL and GL functions to separate classes

This CL does not make any functional changes. The purpose is to extract some common code that is needed for texture capture and texture encode.

This CL does the following changes:
* Move common EGL functions from org.webrtc.MediaCodecVideoDecoder to org.webrtc.EglBase.
* Move common GL functions from org.webrtc.VideoRendererGui to org.webrtc.GlUtil and org.webrtc.GlShader.
* Remove unused call to surfaceTexture.getTransformMatrix in YuvImageRenderer.
* Add helper functions rotatedWidth()/rotatedHeight() in VideoRenderer.I420Frame.

R=glaznev@webrtc.org, hbos@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/47309005.

Cr-Commit-Position: refs/heads/master@{#9414}
This commit is contained in:
Magnus Jedvert
2015-06-11 10:08:59 +02:00
parent f045e4da43
commit 80cf97cddd
8 changed files with 482 additions and 228 deletions

View File

@ -0,0 +1,233 @@
/*
* libjingle
* Copyright 2015 Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.webrtc;
import android.opengl.EGL14;
import android.opengl.EGLConfig;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.EGLSurface;
import android.util.Log;
import android.view.Surface;
/**
* Holds EGL state and utility methods for handling an EGLContext, an EGLDisplay, and an EGLSurface.
*/
public final class EglBase {
private static final String TAG = "EglBase";
private static final int EGL14_SDK_VERSION = android.os.Build.VERSION_CODES.JELLY_BEAN_MR1;
private static final int CURRENT_SDK_VERSION = android.os.Build.VERSION.SDK_INT;
// Android-specific extension.
private static final int EGL_RECORDABLE_ANDROID = 0x3142;
private EGLContext eglContext;
private ConfigType configType;
private EGLConfig eglConfig;
private EGLDisplay eglDisplay;
private EGLSurface eglSurface = EGL14.EGL_NO_SURFACE;
public static boolean isEGL14Supported() {
Log.d(TAG, "SDK version: " + CURRENT_SDK_VERSION);
return (CURRENT_SDK_VERSION >= EGL14_SDK_VERSION);
}
// EGLConfig constructor type. Influences eglChooseConfig arguments.
public static enum ConfigType {
// No special parameters.
PLAIN,
// Configures with EGL_SURFACE_TYPE = EGL_PBUFFER_BIT.
PIXEL_BUFFER,
// Configures with EGL_RECORDABLE_ANDROID = 1.
// Discourages EGL from using pixel formats that cannot efficiently be
// converted to something usable by the video encoder.
RECORDABLE
}
// Create root context without any EGLSurface or parent EGLContext. This can be used for branching
// new contexts that share data.
public EglBase() {
this(EGL14.EGL_NO_CONTEXT, ConfigType.PLAIN);
}
// Create a new context with the specified config type, sharing data with sharedContext.
public EglBase(EGLContext sharedContext, ConfigType configType) {
this.configType = configType;
eglDisplay = getEglDisplay();
eglConfig = getEglConfig(eglDisplay, configType);
eglContext = createEglContext(sharedContext, eglDisplay, eglConfig);
}
// Create EGLSurface from the Android Surface.
public void createSurface(Surface surface) {
checkIsNotReleased();
if (configType == ConfigType.PIXEL_BUFFER) {
Log.w(TAG, "This EGL context is configured for PIXEL_BUFFER, but uses regular Surface");
}
if (eglSurface != EGL14.EGL_NO_SURFACE) {
throw new RuntimeException("Already has an EGLSurface");
}
int[] surfaceAttribs = {EGL14.EGL_NONE};
eglSurface = EGL14.eglCreateWindowSurface(eglDisplay, eglConfig, surface, surfaceAttribs, 0);
if (eglSurface == EGL14.EGL_NO_SURFACE) {
throw new RuntimeException("Failed to create window surface");
}
}
// Create dummy 1x1 pixel buffer surface so the context can be made current.
public void createDummyPbufferSurface() {
checkIsNotReleased();
if (configType != ConfigType.PIXEL_BUFFER) {
throw new RuntimeException(
"This EGL context is not configured to use a pixel buffer: " + configType);
}
if (eglSurface != EGL14.EGL_NO_SURFACE) {
throw new RuntimeException("Already has an EGLSurface");
}
int[] surfaceAttribs = {EGL14.EGL_WIDTH, 1, EGL14.EGL_HEIGHT, 1, EGL14.EGL_NONE};
eglSurface = EGL14.eglCreatePbufferSurface(eglDisplay, eglConfig, surfaceAttribs, 0);
if (eglSurface == EGL14.EGL_NO_SURFACE) {
throw new RuntimeException("Failed to create pixel buffer surface");
}
}
public EGLContext getContext() {
return eglContext;
}
public boolean hasSurface() {
return eglSurface != EGL14.EGL_NO_SURFACE;
}
public void releaseSurface() {
if (eglSurface != EGL14.EGL_NO_SURFACE) {
EGL14.eglDestroySurface(eglDisplay, eglSurface);
eglSurface = EGL14.EGL_NO_SURFACE;
}
}
private void checkIsNotReleased() {
if (eglDisplay == EGL14.EGL_NO_DISPLAY || eglContext == EGL14.EGL_NO_CONTEXT
|| eglConfig == null) {
throw new RuntimeException("This object has been released");
}
}
public void release() {
checkIsNotReleased();
releaseSurface();
// Release our context.
EGL14.eglMakeCurrent(
eglDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT);
EGL14.eglDestroyContext(eglDisplay, eglContext);
EGL14.eglReleaseThread();
EGL14.eglTerminate(eglDisplay);
eglContext = EGL14.EGL_NO_CONTEXT;
eglDisplay = EGL14.EGL_NO_DISPLAY;
eglConfig = null;
}
public void makeCurrent() {
checkIsNotReleased();
if (eglSurface == EGL14.EGL_NO_SURFACE) {
throw new RuntimeException("No EGLSurface - can't make current");
}
if (!EGL14.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
throw new RuntimeException("eglMakeCurrent failed");
}
}
public void swapBuffers() {
checkIsNotReleased();
if (eglSurface == EGL14.EGL_NO_SURFACE) {
throw new RuntimeException("No EGLSurface - can't swap buffers");
}
EGL14.eglSwapBuffers(eglDisplay, eglSurface);
}
// Return an EGLDisplay, or die trying.
private static EGLDisplay getEglDisplay() {
EGLDisplay eglDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
if (eglDisplay == EGL14.EGL_NO_DISPLAY) {
throw new RuntimeException("Unable to get EGL14 display");
}
int[] version = new int[2];
if (!EGL14.eglInitialize(eglDisplay, version, 0, version, 1)) {
throw new RuntimeException("Unable to initialize EGL14");
}
return eglDisplay;
}
// Return an EGLConfig, or die trying.
private static EGLConfig getEglConfig(EGLDisplay eglDisplay, ConfigType configType) {
// Always RGB888, GLES2.
int[] configAttributes = {
EGL14.EGL_RED_SIZE, 8,
EGL14.EGL_GREEN_SIZE, 8,
EGL14.EGL_BLUE_SIZE, 8,
EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
EGL14.EGL_NONE, 0, // Allocate dummy fields for specific options.
EGL14.EGL_NONE
};
// Fill in dummy fields based on configType.
switch (configType) {
case PLAIN:
break;
case PIXEL_BUFFER:
configAttributes[configAttributes.length - 3] = EGL14.EGL_SURFACE_TYPE;
configAttributes[configAttributes.length - 2] = EGL14.EGL_PBUFFER_BIT;
break;
case RECORDABLE:
configAttributes[configAttributes.length - 3] = EGL_RECORDABLE_ANDROID;
configAttributes[configAttributes.length - 2] = 1;
break;
default:
throw new IllegalArgumentException();
}
EGLConfig[] configs = new EGLConfig[1];
int[] numConfigs = new int[1];
if (!EGL14.eglChooseConfig(
eglDisplay, configAttributes, 0, configs, 0, configs.length, numConfigs, 0)) {
throw new RuntimeException("Unable to find RGB888 " + configType + " EGL config");
}
return configs[0];
}
// Return an EGLConfig, or die trying.
private static EGLContext createEglContext(
EGLContext sharedContext, EGLDisplay eglDisplay, EGLConfig eglConfig) {
int[] contextAttributes = {EGL14.EGL_CONTEXT_CLIENT_VERSION, 2, EGL14.EGL_NONE};
EGLContext eglContext =
EGL14.eglCreateContext(eglDisplay, eglConfig, sharedContext, contextAttributes, 0);
if (eglContext == EGL14.EGL_NO_CONTEXT) {
throw new RuntimeException("Failed to create EGL context");
}
return eglContext;
}
}

View File

@ -0,0 +1,127 @@
/*
* libjingle
* Copyright 2015 Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.webrtc;
import android.opengl.GLES20;
import android.util.Log;
// Helper class for handling OpenGL shaders and shader programs.
public class GlShader {
private static final String TAG = "GlShader";
private static int compileShader(int shaderType, String source) {
int[] result = new int[] {
GLES20.GL_FALSE
};
int shader = GLES20.glCreateShader(shaderType);
GLES20.glShaderSource(shader, source);
GLES20.glCompileShader(shader);
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, result, 0);
if (result[0] != GLES20.GL_TRUE) {
Log.e(TAG, "Could not compile shader " + shaderType + ":" +
GLES20.glGetShaderInfoLog(shader));
throw new RuntimeException(GLES20.glGetShaderInfoLog(shader));
}
GlUtil.checkNoGLES2Error("compileShader");
return shader;
}
private int vertexShader;
private int fragmentShader;
private int program;
public GlShader(String vertexSource, String fragmentSource) {
vertexShader = compileShader(GLES20.GL_VERTEX_SHADER, vertexSource);
fragmentShader = compileShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
program = GLES20.glCreateProgram();
if (program == 0) {
throw new RuntimeException("Could not create program");
}
GLES20.glAttachShader(program, vertexShader);
GLES20.glAttachShader(program, fragmentShader);
GLES20.glLinkProgram(program);
int[] linkStatus = new int[] {
GLES20.GL_FALSE
};
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
if (linkStatus[0] != GLES20.GL_TRUE) {
Log.e(TAG, "Could not link program: " +
GLES20.glGetProgramInfoLog(program));
throw new RuntimeException(GLES20.glGetProgramInfoLog(program));
}
GlUtil.checkNoGLES2Error("Creating GlShader");
}
public int getAttribLocation(String label) {
if (program == -1) {
throw new RuntimeException("The program has been released");
}
int location = GLES20.glGetAttribLocation(program, label);
if (location < 0) {
throw new RuntimeException("Could not locate '" + label + "' in program");
}
return location;
}
public int getUniformLocation(String label) {
if (program == -1) {
throw new RuntimeException("The program has been released");
}
int location = GLES20.glGetUniformLocation(program, label);
if (location < 0) {
throw new RuntimeException("Could not locate uniform '" + label + "' in program");
}
return location;
}
public void useProgram() {
if (program == -1) {
throw new RuntimeException("The program has been released");
}
GLES20.glUseProgram(program);
GlUtil.checkNoGLES2Error("glUseProgram");
}
public void release() {
Log.d(TAG, "Deleting shader.");
// Flag shaders for deletion (does not delete until no longer attached to a program).
if (vertexShader != -1) {
GLES20.glDeleteShader(vertexShader);
vertexShader = -1;
}
if (fragmentShader != -1) {
GLES20.glDeleteShader(fragmentShader);
fragmentShader = -1;
}
// Delete program, automatically detaching any shaders from it.
if (program != -1) {
GLES20.glDeleteProgram(program);
program = -1;
}
}
}

View File

@ -0,0 +1,61 @@
/*
* libjingle
* Copyright 2015 Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.webrtc;
import android.opengl.GLES20;
import android.util.Log;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
/**
* Some OpenGL static utility functions.
*/
public class GlUtil {
private static final String TAG = "GlUtil";
private GlUtil() {}
// Assert that no OpenGL ES 2.0 error has been raised.
public static void checkNoGLES2Error(String msg) {
int error = GLES20.glGetError();
if (error != GLES20.GL_NO_ERROR) {
throw new RuntimeException(msg + ": GLES20 error: " + error);
}
}
public static FloatBuffer createFloatBuffer(float[] coords) {
// Allocate a direct ByteBuffer, using 4 bytes per float, and copy coords into it.
ByteBuffer bb = ByteBuffer.allocateDirect(coords.length * 4);
bb.order(ByteOrder.nativeOrder());
FloatBuffer fb = bb.asFloatBuffer();
fb.put(coords);
fb.position(0);
return fb;
}
}

View File

@ -70,8 +70,8 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
private int screenHeight; private int screenHeight;
// List of yuv renderers. // List of yuv renderers.
private ArrayList<YuvImageRenderer> yuvImageRenderers; private ArrayList<YuvImageRenderer> yuvImageRenderers;
private int yuvProgram; private GlShader yuvShader;
private int oesProgram; private GlShader oesShader;
// Types of video scaling: // Types of video scaling:
// SCALE_ASPECT_FIT - video frame is scaled to fit the size of the view by // SCALE_ASPECT_FIT - video frame is scaled to fit the size of the view by
// maintaining the aspect ratio (black borders may be displayed). // maintaining the aspect ratio (black borders may be displayed).
@ -140,69 +140,6 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
yuvImageRenderers = new ArrayList<YuvImageRenderer>(); yuvImageRenderers = new ArrayList<YuvImageRenderer>();
} }
// Poor-man's assert(): die with |msg| unless |condition| is true.
private static void abortUnless(boolean condition, String msg) {
if (!condition) {
throw new RuntimeException(msg);
}
}
// Assert that no OpenGL ES 2.0 error has been raised.
private static void checkNoGLES2Error() {
int error = GLES20.glGetError();
abortUnless(error == GLES20.GL_NO_ERROR, "GLES20 error: " + error);
}
// Wrap a float[] in a direct FloatBuffer using native byte order.
private static FloatBuffer directNativeFloatBuffer(float[] array) {
FloatBuffer buffer = ByteBuffer.allocateDirect(array.length * 4).order(
ByteOrder.nativeOrder()).asFloatBuffer();
buffer.put(array);
buffer.flip();
return buffer;
}
private int loadShader(int shaderType, String source) {
int[] result = new int[] {
GLES20.GL_FALSE
};
int shader = GLES20.glCreateShader(shaderType);
GLES20.glShaderSource(shader, source);
GLES20.glCompileShader(shader);
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, result, 0);
if (result[0] != GLES20.GL_TRUE) {
Log.e(TAG, "Could not compile shader " + shaderType + ":" +
GLES20.glGetShaderInfoLog(shader));
throw new RuntimeException(GLES20.glGetShaderInfoLog(shader));
}
checkNoGLES2Error();
return shader;
}
private int createProgram(String vertexSource, String fragmentSource) {
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
int program = GLES20.glCreateProgram();
if (program == 0) {
throw new RuntimeException("Could not create program");
}
GLES20.glAttachShader(program, vertexShader);
GLES20.glAttachShader(program, fragmentShader);
GLES20.glLinkProgram(program);
int[] linkStatus = new int[] {
GLES20.GL_FALSE
};
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
if (linkStatus[0] != GLES20.GL_TRUE) {
Log.e(TAG, "Could not link program: " +
GLES20.glGetProgramInfoLog(program));
throw new RuntimeException(GLES20.glGetProgramInfoLog(program));
}
checkNoGLES2Error();
return program;
}
/** /**
* Class used to display stream of YUV420 frames at particular location * Class used to display stream of YUV420 frames at particular location
* on a screen. New video frames are sent to display using renderFrame() * on a screen. New video frames are sent to display using renderFrame()
@ -211,11 +148,10 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
private static class YuvImageRenderer implements VideoRenderer.Callbacks { private static class YuvImageRenderer implements VideoRenderer.Callbacks {
private GLSurfaceView surface; private GLSurfaceView surface;
private int id; private int id;
private int yuvProgram; private GlShader yuvShader;
private int oesProgram; private GlShader oesShader;
private int[] yuvTextures = { -1, -1, -1 }; private int[] yuvTextures = { -1, -1, -1 };
private int oesTexture = -1; private int oesTexture = -1;
private float[] stMatrix = new float[16];
// Render frame queue - accessed by two threads. renderFrame() call does // Render frame queue - accessed by two threads. renderFrame() call does
// an offer (writing I420Frame to render) and early-returns (recording // an offer (writing I420Frame to render) and early-returns (recording
@ -307,21 +243,21 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
texRight, texTop, texRight, texTop,
texRight, texBottom texRight, texBottom
}; };
textureVertices = directNativeFloatBuffer(textureVeticesFloat); textureVertices = GlUtil.createFloatBuffer(textureVeticesFloat);
// Create texture UV coordinates. // Create texture UV coordinates.
float textureCoordinatesFloat[] = new float[] { float textureCoordinatesFloat[] = new float[] {
0, 0, 0, 1, 1, 0, 1, 1 0, 0, 0, 1, 1, 0, 1, 1
}; };
textureCoords = directNativeFloatBuffer(textureCoordinatesFloat); textureCoords = GlUtil.createFloatBuffer(textureCoordinatesFloat);
updateTextureProperties = false; updateTextureProperties = false;
rotationDegree = 0; rotationDegree = 0;
} }
private void createTextures(int yuvProgram, int oesProgram) { private void createTextures(GlShader yuvShader, GlShader oesShader) {
Log.d(TAG, " YuvImageRenderer.createTextures " + id + " on GL thread:" + Log.d(TAG, " YuvImageRenderer.createTextures " + id + " on GL thread:" +
Thread.currentThread().getId()); Thread.currentThread().getId());
this.yuvProgram = yuvProgram; this.yuvShader = yuvShader;
this.oesProgram = oesProgram; this.oesShader = oesShader;
// Generate 3 texture ids for Y/U/V and place them into |yuvTextures|. // Generate 3 texture ids for Y/U/V and place them into |yuvTextures|.
GLES20.glGenTextures(3, yuvTextures, 0); GLES20.glGenTextures(3, yuvTextures, 0);
@ -339,7 +275,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
} }
checkNoGLES2Error(); GlUtil.checkNoGLES2Error("y/u/v glGenTextures");
} }
private void checkAdjustTextureCoords() { private void checkAdjustTextureCoords() {
@ -410,7 +346,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
texRight, texTop, texRight, texTop,
texRight, texBottom texRight, texBottom
}; };
textureVertices = directNativeFloatBuffer(textureVeticesFloat); textureVertices = GlUtil.createFloatBuffer(textureVeticesFloat);
float uLeft = texOffsetU; float uLeft = texOffsetU;
float uRight = 1.0f - texOffsetU; float uRight = 1.0f - texOffsetU;
@ -430,8 +366,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
rotationDegree); rotationDegree);
textureCoordinatesFloat = applyMirror(textureCoordinatesFloat, textureCoordinatesFloat = applyMirror(textureCoordinatesFloat,
mirror); mirror);
textureCoords = textureCoords = GlUtil.createFloatBuffer(textureCoordinatesFloat);
directNativeFloatBuffer(textureCoordinatesFloat);
} }
updateTextureProperties = false; updateTextureProperties = false;
Log.d(TAG, " AdjustTextureCoords done"); Log.d(TAG, " AdjustTextureCoords done");
@ -480,7 +415,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
} }
long now = System.nanoTime(); long now = System.nanoTime();
int currentProgram = 0; GlShader currentShader;
I420Frame frameFromQueue; I420Frame frameFromQueue;
synchronized (frameToRenderQueue) { synchronized (frameToRenderQueue) {
@ -495,8 +430,8 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
if (rendererType == RendererType.RENDERER_YUV) { if (rendererType == RendererType.RENDERER_YUV) {
// YUV textures rendering. // YUV textures rendering.
GLES20.glUseProgram(yuvProgram); yuvShader.useProgram();
currentProgram = yuvProgram; currentShader = yuvShader;
for (int i = 0; i < 3; ++i) { for (int i = 0; i < 3; ++i) {
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i); GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
@ -511,16 +446,13 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
frameFromQueue.yuvPlanes[i]); frameFromQueue.yuvPlanes[i]);
} }
} }
GLES20.glUniform1i( GLES20.glUniform1i(yuvShader.getUniformLocation("y_tex"), 0);
GLES20.glGetUniformLocation(yuvProgram, "y_tex"), 0); GLES20.glUniform1i(yuvShader.getUniformLocation("u_tex"), 1);
GLES20.glUniform1i( GLES20.glUniform1i(yuvShader.getUniformLocation("v_tex"), 2);
GLES20.glGetUniformLocation(yuvProgram, "u_tex"), 1);
GLES20.glUniform1i(
GLES20.glGetUniformLocation(yuvProgram, "v_tex"), 2);
} else { } else {
// External texture rendering. // External texture rendering.
GLES20.glUseProgram(oesProgram); oesShader.useProgram();
currentProgram = oesProgram; currentShader = oesShader;
if (frameFromQueue != null) { if (frameFromQueue != null) {
oesTexture = frameFromQueue.textureId; oesTexture = frameFromQueue.textureId;
@ -528,7 +460,6 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
SurfaceTexture surfaceTexture = SurfaceTexture surfaceTexture =
(SurfaceTexture) frameFromQueue.textureObject; (SurfaceTexture) frameFromQueue.textureObject;
surfaceTexture.updateTexImage(); surfaceTexture.updateTexImage();
surfaceTexture.getTransformMatrix(stMatrix);
} }
} }
GLES20.glActiveTexture(GLES20.GL_TEXTURE0); GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
@ -540,18 +471,12 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
} }
} }
int posLocation = GLES20.glGetAttribLocation(currentProgram, "in_pos"); int posLocation = currentShader.getAttribLocation("in_pos");
if (posLocation == -1) {
throw new RuntimeException("Could not get attrib location for in_pos");
}
GLES20.glEnableVertexAttribArray(posLocation); GLES20.glEnableVertexAttribArray(posLocation);
GLES20.glVertexAttribPointer( GLES20.glVertexAttribPointer(
posLocation, 2, GLES20.GL_FLOAT, false, 0, textureVertices); posLocation, 2, GLES20.GL_FLOAT, false, 0, textureVertices);
int texLocation = GLES20.glGetAttribLocation(currentProgram, "in_tc"); int texLocation = currentShader.getAttribLocation("in_tc");
if (texLocation == -1) {
throw new RuntimeException("Could not get attrib location for in_tc");
}
GLES20.glEnableVertexAttribArray(texLocation); GLES20.glEnableVertexAttribArray(texLocation);
GLES20.glVertexAttribPointer( GLES20.glVertexAttribPointer(
texLocation, 2, GLES20.GL_FLOAT, false, 0, textureCoords); texLocation, 2, GLES20.GL_FLOAT, false, 0, textureCoords);
@ -561,7 +486,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
GLES20.glDisableVertexAttribArray(posLocation); GLES20.glDisableVertexAttribArray(posLocation);
GLES20.glDisableVertexAttribArray(texLocation); GLES20.glDisableVertexAttribArray(texLocation);
checkNoGLES2Error(); GlUtil.checkNoGLES2Error("draw done");
if (frameFromQueue != null) { if (frameFromQueue != null) {
framesRendered++; framesRendered++;
@ -770,7 +695,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
instance.surface.queueEvent(new Runnable() { instance.surface.queueEvent(new Runnable() {
public void run() { public void run() {
yuvImageRenderer.createTextures( yuvImageRenderer.createTextures(
instance.yuvProgram, instance.oesProgram); instance.yuvShader, instance.oesShader);
yuvImageRenderer.setScreenSize( yuvImageRenderer.setScreenSize(
instance.screenWidth, instance.screenHeight); instance.screenWidth, instance.screenHeight);
countDownLatch.countDown(); countDownLatch.countDown();
@ -829,20 +754,18 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
Log.d(TAG, "VideoRendererGui EGL Context: " + eglContext); Log.d(TAG, "VideoRendererGui EGL Context: " + eglContext);
} }
// Create YUV and OES programs. // Create YUV and OES shaders.
yuvProgram = createProgram(VERTEX_SHADER_STRING, yuvShader = new GlShader(VERTEX_SHADER_STRING, YUV_FRAGMENT_SHADER_STRING);
YUV_FRAGMENT_SHADER_STRING); oesShader = new GlShader(VERTEX_SHADER_STRING, OES_FRAGMENT_SHADER_STRING);
oesProgram = createProgram(VERTEX_SHADER_STRING,
OES_FRAGMENT_SHADER_STRING);
synchronized (yuvImageRenderers) { synchronized (yuvImageRenderers) {
// Create textures for all images. // Create textures for all images.
for (YuvImageRenderer yuvImageRenderer : yuvImageRenderers) { for (YuvImageRenderer yuvImageRenderer : yuvImageRenderers) {
yuvImageRenderer.createTextures(yuvProgram, oesProgram); yuvImageRenderer.createTextures(yuvShader, oesShader);
} }
onSurfaceCreatedCalled = true; onSurfaceCreatedCalled = true;
} }
checkNoGLES2Error(); GlUtil.checkNoGLES2Error("onSurfaceCreated done");
GLES20.glClearColor(0.15f, 0.15f, 0.15f, 1.0f); GLES20.glClearColor(0.15f, 0.15f, 0.15f, 1.0f);
// Fire EGL context ready event. // Fire EGL context ready event.

View File

@ -73,17 +73,18 @@ ClassReferenceHolder::ClassReferenceHolder(JNIEnv* jni) {
LoadClass(jni, "android/graphics/SurfaceTexture"); LoadClass(jni, "android/graphics/SurfaceTexture");
LoadClass(jni, "org/webrtc/VideoCapturerAndroid"); LoadClass(jni, "org/webrtc/VideoCapturerAndroid");
LoadClass(jni, "org/webrtc/VideoCapturerAndroid$NativeObserver"); LoadClass(jni, "org/webrtc/VideoCapturerAndroid$NativeObserver");
LoadClass(jni, "org/webrtc/EglBase");
LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder"); LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder");
LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo"); LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo");
LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder$VideoCodecType"); LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder$VideoCodecType");
LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder"); LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder");
LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$DecoderOutputBufferInfo"); LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$DecoderOutputBufferInfo");
LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$VideoCodecType"); LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$VideoCodecType");
jclass j_decoder_class = GetClass("org/webrtc/MediaCodecVideoDecoder"); jclass j_egl_base_class = GetClass("org/webrtc/EglBase");
jmethodID j_is_egl14_supported_method = jni->GetStaticMethodID( jmethodID j_is_egl14_supported_method = jni->GetStaticMethodID(
j_decoder_class, "isEGL14Supported", "()Z"); j_egl_base_class, "isEGL14Supported", "()Z");
bool is_egl14_supported = jni->CallStaticBooleanMethod( bool is_egl14_supported = jni->CallStaticBooleanMethod(
j_decoder_class, j_is_egl14_supported_method); j_egl_base_class, j_is_egl14_supported_method);
CHECK_EXCEPTION(jni); CHECK_EXCEPTION(jni);
if (is_egl14_supported) { if (is_egl14_supported) {
LoadClass(jni, "android/opengl/EGLContext"); LoadClass(jni, "android/opengl/EGLContext");

View File

@ -34,10 +34,7 @@ import android.media.MediaCodecInfo.CodecCapabilities;
import android.media.MediaCodecList; import android.media.MediaCodecList;
import android.media.MediaFormat; import android.media.MediaFormat;
import android.opengl.EGL14; import android.opengl.EGL14;
import android.opengl.EGLConfig;
import android.opengl.EGLContext; import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.EGLSurface;
import android.opengl.GLES11Ext; import android.opengl.GLES11Ext;
import android.opengl.GLES20; import android.opengl.GLES20;
import android.os.Build; import android.os.Build;
@ -101,15 +98,7 @@ public class MediaCodecVideoDecoder {
private int textureID = -1; private int textureID = -1;
private SurfaceTexture surfaceTexture = null; private SurfaceTexture surfaceTexture = null;
private Surface surface = null; private Surface surface = null;
private float[] stMatrix = new float[16]; private EglBase eglBase;
private EGLDisplay eglDisplay = EGL14.EGL_NO_DISPLAY;
private EGLContext eglContext = EGL14.EGL_NO_CONTEXT;
private EGLSurface eglSurface = EGL14.EGL_NO_SURFACE;
private static final int EGL14_SDK_VERSION =
android.os.Build.VERSION_CODES.JELLY_BEAN_MR1;
private static final int CURRENT_SDK_VERSION =
android.os.Build.VERSION.SDK_INT;
private MediaCodecVideoDecoder() { } private MediaCodecVideoDecoder() { }
@ -177,11 +166,6 @@ public class MediaCodecVideoDecoder {
return null; // No HW decoder. return null; // No HW decoder.
} }
private static boolean isEGL14Supported() {
Log.d(TAG, "SDK version: " + CURRENT_SDK_VERSION);
return (CURRENT_SDK_VERSION >= EGL14_SDK_VERSION);
}
public static boolean isVp8HwSupported() { public static boolean isVp8HwSupported() {
return findDecoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes) != null; return findDecoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes) != null;
} }
@ -198,100 +182,6 @@ public class MediaCodecVideoDecoder {
} }
} }
private void checkEglError(String msg) {
int error;
if ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) {
Log.e(TAG, msg + ": EGL Error: 0x" + Integer.toHexString(error));
throw new RuntimeException(
msg + ": EGL error: 0x" + Integer.toHexString(error));
}
}
private void checkGlError(String msg) {
int error;
if ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
Log.e(TAG, msg + ": GL Error: 0x" + Integer.toHexString(error));
throw new RuntimeException(
msg + ": GL Error: 0x " + Integer.toHexString(error));
}
}
private void eglSetup(EGLContext sharedContext, int width, int height) {
Log.d(TAG, "EGL setup");
if (sharedContext == null) {
sharedContext = EGL14.EGL_NO_CONTEXT;
}
eglDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
if (eglDisplay == EGL14.EGL_NO_DISPLAY) {
throw new RuntimeException("Unable to get EGL14 display");
}
int[] version = new int[2];
if (!EGL14.eglInitialize(eglDisplay, version, 0, version, 1)) {
throw new RuntimeException("Unable to initialize EGL14");
}
// Configure EGL for pbuffer and OpenGL ES 2.0.
int[] attribList = {
EGL14.EGL_RED_SIZE, 8,
EGL14.EGL_GREEN_SIZE, 8,
EGL14.EGL_BLUE_SIZE, 8,
EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
EGL14.EGL_SURFACE_TYPE, EGL14.EGL_PBUFFER_BIT,
EGL14.EGL_NONE
};
EGLConfig[] configs = new EGLConfig[1];
int[] numConfigs = new int[1];
if (!EGL14.eglChooseConfig(eglDisplay, attribList, 0, configs, 0,
configs.length, numConfigs, 0)) {
throw new RuntimeException("Unable to find RGB888 EGL config");
}
// Configure context for OpenGL ES 2.0.
int[] attrib_list = {
EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
EGL14.EGL_NONE
};
eglContext = EGL14.eglCreateContext(eglDisplay, configs[0], sharedContext,
attrib_list, 0);
checkEglError("eglCreateContext");
if (eglContext == null) {
throw new RuntimeException("Null EGL context");
}
// Create a pbuffer surface.
int[] surfaceAttribs = {
EGL14.EGL_WIDTH, width,
EGL14.EGL_HEIGHT, height,
EGL14.EGL_NONE
};
eglSurface = EGL14.eglCreatePbufferSurface(eglDisplay, configs[0],
surfaceAttribs, 0);
checkEglError("eglCreatePbufferSurface");
if (eglSurface == null) {
throw new RuntimeException("EGL surface was null");
}
}
private void eglRelease() {
Log.d(TAG, "EGL release");
if (eglDisplay != EGL14.EGL_NO_DISPLAY) {
EGL14.eglDestroySurface(eglDisplay, eglSurface);
EGL14.eglDestroyContext(eglDisplay, eglContext);
EGL14.eglReleaseThread();
EGL14.eglTerminate(eglDisplay);
}
eglDisplay = EGL14.EGL_NO_DISPLAY;
eglContext = EGL14.EGL_NO_CONTEXT;
eglSurface = EGL14.EGL_NO_SURFACE;
}
private void makeCurrent() {
if (!EGL14.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
throw new RuntimeException("eglMakeCurrent failed");
}
}
private boolean initDecode( private boolean initDecode(
VideoCodecType type, int width, int height, boolean useSwCodec, VideoCodecType type, int width, int height, boolean useSwCodec,
boolean useSurface, EGLContext sharedContext) { boolean useSurface, EGLContext sharedContext) {
@ -336,16 +226,17 @@ public class MediaCodecVideoDecoder {
if (useSurface) { if (useSurface) {
// Create shared EGL context. // Create shared EGL context.
eglSetup(sharedContext, width, height); eglBase = new EglBase(sharedContext, EglBase.ConfigType.PIXEL_BUFFER);
makeCurrent(); eglBase.createDummyPbufferSurface();
eglBase.makeCurrent();
// Create output surface // Create output surface
int[] textures = new int[1]; int[] textures = new int[1];
GLES20.glGenTextures(1, textures, 0); GLES20.glGenTextures(1, textures, 0);
checkGlError("glGenTextures"); GlUtil.checkNoGLES2Error("glGenTextures");
textureID = textures[0]; textureID = textures[0];
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureID); GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureID);
checkGlError("glBindTexture mTextureID"); GlUtil.checkNoGLES2Error("glBindTexture mTextureID");
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST); GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
@ -355,7 +246,7 @@ public class MediaCodecVideoDecoder {
GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
checkGlError("glTexParameter"); GlUtil.checkNoGLES2Error("glTexParameter");
Log.d(TAG, "Video decoder TextureID = " + textureID); Log.d(TAG, "Video decoder TextureID = " + textureID);
surfaceTexture = new SurfaceTexture(textureID); surfaceTexture = new SurfaceTexture(textureID);
surface = new Surface(surfaceTexture); surface = new Surface(surfaceTexture);
@ -404,9 +295,10 @@ public class MediaCodecVideoDecoder {
textures[0] = textureID; textures[0] = textureID;
Log.d(TAG, "Delete video decoder TextureID " + textureID); Log.d(TAG, "Delete video decoder TextureID " + textureID);
GLES20.glDeleteTextures(1, textures, 0); GLES20.glDeleteTextures(1, textures, 0);
checkGlError("glDeleteTextures"); GlUtil.checkNoGLES2Error("glDeleteTextures");
} }
eglRelease(); eglBase.release();
eglBase = null;
} }
} }

View File

@ -71,6 +71,9 @@ public class VideoRenderer {
this.yuvPlanes = yuvPlanes; this.yuvPlanes = yuvPlanes;
this.yuvFrame = true; this.yuvFrame = true;
this.rotationDegree = rotationDegree; this.rotationDegree = rotationDegree;
if (rotationDegree % 90 != 0) {
throw new IllegalArgumentException("Rotation degree not multiple of 90: " + rotationDegree);
}
} }
/** /**
@ -87,6 +90,17 @@ public class VideoRenderer {
this.textureId = textureId; this.textureId = textureId;
this.yuvFrame = false; this.yuvFrame = false;
this.rotationDegree = rotationDegree; this.rotationDegree = rotationDegree;
if (rotationDegree % 90 != 0) {
throw new IllegalArgumentException("Rotation degree not multiple of 90: " + rotationDegree);
}
}
public int rotatedWidth() {
return (rotationDegree % 180 == 0) ? width : height;
}
public int rotatedHeight() {
return (rotationDegree % 180 == 0) ? height : width;
} }
/** /**

View File

@ -140,6 +140,9 @@
# included here, or better yet, build a proper .jar in webrtc # included here, or better yet, build a proper .jar in webrtc
# and include it here. # and include it here.
'android_java_files': [ 'android_java_files': [
'app/webrtc/java/android/org/webrtc/EglBase.java',
'app/webrtc/java/android/org/webrtc/GlShader.java',
'app/webrtc/java/android/org/webrtc/GlUtil.java',
'app/webrtc/java/android/org/webrtc/VideoRendererGui.java', 'app/webrtc/java/android/org/webrtc/VideoRendererGui.java',
'app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java', 'app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java',
'app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java', 'app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java',