Android YuvConverter: Use OpenGL Framebuffer instead of EGL pixel buffer

This CL changes YuvConverter to use an OpenGL Framebuffer as rendering
target instead of an EGL pixel buffer surface. The purpose is to reduce
the number of EGL contexts and to be able to use YuvConverter from
EglRenderer without having to detach the EGL surface.

BUG=webrtc:6470

Review-Url: https://codereview.webrtc.org/2436653003
Cr-Commit-Position: refs/heads/master@{#14699}
This commit is contained in:
magjed
2016-10-20 03:19:16 -07:00
committed by Commit bot
parent 9ab8a1884d
commit 1cb48232ac
3 changed files with 99 additions and 58 deletions

View File

@ -134,18 +134,6 @@ class SurfaceTextureHelper {
}); });
} }
private YuvConverter getYuvConverter() {
// yuvConverter is assigned once
if (yuvConverter != null)
return yuvConverter;
synchronized (this) {
if (yuvConverter == null)
yuvConverter = new YuvConverter(eglBase.getEglBaseContext());
return yuvConverter;
}
}
/** /**
* Start to stream textures to the given |listener|. If you need to change listener, you need to * Start to stream textures to the given |listener|. If you need to change listener, you need to
* call stopListening() first. * call stopListening() first.
@ -231,12 +219,21 @@ class SurfaceTextureHelper {
}); });
} }
public void textureToYUV( public void textureToYUV(final ByteBuffer buf, final int width, final int height,
ByteBuffer buf, int width, int height, int stride, int textureId, float[] transformMatrix) { final int stride, final int textureId, final float[] transformMatrix) {
if (textureId != oesTextureId) if (textureId != oesTextureId) {
throw new IllegalStateException("textureToByteBuffer called with unexpected textureId"); throw new IllegalStateException("textureToByteBuffer called with unexpected textureId");
}
getYuvConverter().convert(buf, width, height, stride, textureId, transformMatrix); ThreadUtils.invokeAtFrontUninterruptibly(handler, new Runnable() {
@Override
public void run() {
if (yuvConverter == null) {
yuvConverter = new YuvConverter();
}
yuvConverter.convert(buf, width, height, stride, textureId, transformMatrix);
}
});
} }
private void updateTexImage() { private void updateTexImage() {
@ -275,8 +272,7 @@ class SurfaceTextureHelper {
if (isTextureInUse || !isQuitting) { if (isTextureInUse || !isQuitting) {
throw new IllegalStateException("Unexpected release."); throw new IllegalStateException("Unexpected release.");
} }
synchronized (this) { if (yuvConverter != null) {
if (yuvConverter != null)
yuvConverter.release(); yuvConverter.release();
} }
GLES20.glDeleteTextures(1, new int[] {oesTextureId}, 0); GLES20.glDeleteTextures(1, new int[] {oesTextureId}, 0);

View File

@ -15,7 +15,6 @@ import android.os.HandlerThread;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.io.FileOutputStream; import java.io.FileOutputStream;
import java.io.IOException; import java.io.IOException;
import java.util.concurrent.CountDownLatch;
/** /**
* Can be used to save the video frames to file. * Can be used to save the video frames to file.
@ -23,7 +22,6 @@ import java.util.concurrent.CountDownLatch;
public class VideoFileRenderer implements VideoRenderer.Callbacks { public class VideoFileRenderer implements VideoRenderer.Callbacks {
private static final String TAG = "VideoFileRenderer"; private static final String TAG = "VideoFileRenderer";
private final YuvConverter yuvConverter;
private final HandlerThread renderThread; private final HandlerThread renderThread;
private final Object handlerLock = new Object(); private final Object handlerLock = new Object();
private final Handler renderThreadHandler; private final Handler renderThreadHandler;
@ -32,13 +30,14 @@ public class VideoFileRenderer implements VideoRenderer.Callbacks {
private final int outputFileHeight; private final int outputFileHeight;
private final int outputFrameSize; private final int outputFrameSize;
private final ByteBuffer outputFrameBuffer; private final ByteBuffer outputFrameBuffer;
private EglBase eglBase;
private YuvConverter yuvConverter;
public VideoFileRenderer(String outputFile, int outputFileWidth, int outputFileHeight, public VideoFileRenderer(String outputFile, int outputFileWidth, int outputFileHeight,
EglBase.Context sharedContext) throws IOException { final EglBase.Context sharedContext) throws IOException {
if ((outputFileWidth % 2) == 1 || (outputFileHeight % 2) == 1) { if ((outputFileWidth % 2) == 1 || (outputFileHeight % 2) == 1) {
throw new IllegalArgumentException("Does not support uneven width or height"); throw new IllegalArgumentException("Does not support uneven width or height");
} }
yuvConverter = new YuvConverter(sharedContext);
this.outputFileWidth = outputFileWidth; this.outputFileWidth = outputFileWidth;
this.outputFileHeight = outputFileHeight; this.outputFileHeight = outputFileHeight;
@ -54,6 +53,16 @@ public class VideoFileRenderer implements VideoRenderer.Callbacks {
renderThread = new HandlerThread(TAG); renderThread = new HandlerThread(TAG);
renderThread.start(); renderThread.start();
renderThreadHandler = new Handler(renderThread.getLooper()); renderThreadHandler = new Handler(renderThread.getLooper());
ThreadUtils.invokeAtFrontUninterruptibly(renderThreadHandler, new Runnable() {
@Override
public void run() {
eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PIXEL_BUFFER);
eglBase.createDummyPbufferSurface();
eglBase.makeCurrent();
yuvConverter = new YuvConverter();
}
});
} }
@Override @Override
@ -113,8 +122,7 @@ public class VideoFileRenderer implements VideoRenderer.Callbacks {
} }
public void release() { public void release() {
final CountDownLatch cleanupBarrier = new CountDownLatch(1); ThreadUtils.invokeAtFrontUninterruptibly(renderThreadHandler, new Runnable() {
renderThreadHandler.post(new Runnable() {
@Override @Override
public void run() { public void run() {
try { try {
@ -122,11 +130,11 @@ public class VideoFileRenderer implements VideoRenderer.Callbacks {
} catch (IOException e) { } catch (IOException e) {
Logging.d(TAG, "Error closing output video file"); Logging.d(TAG, "Error closing output video file");
} }
cleanupBarrier.countDown(); yuvConverter.release();
eglBase.release();
renderThread.quit();
} }
}); });
ThreadUtils.awaitUninterruptibly(cleanupBarrier);
renderThread.quit();
} }
public static native void nativeI420Scale(ByteBuffer srcY, int strideY, ByteBuffer srcU, public static native void nativeI420Scale(ByteBuffer srcY, int strideY, ByteBuffer srcU,

View File

@ -16,13 +16,10 @@ import java.nio.ByteBuffer;
import java.nio.FloatBuffer; import java.nio.FloatBuffer;
/** /**
* Class for converting OES textures to a YUV ByteBuffer. * Class for converting OES textures to a YUV ByteBuffer. It should be constructed on a thread with
* an active EGL context, and only be used from that thread.
*/ */
class YuvConverter { class YuvConverter {
private final EglBase eglBase;
private final GlShader shader;
private boolean released = false;
// Vertex coordinates in Normalized Device Coordinates, i.e. // Vertex coordinates in Normalized Device Coordinates, i.e.
// (-1, -1) is bottom-left and (1, 1) is top-right. // (-1, -1) is bottom-left and (1, 1) is top-right.
private static final FloatBuffer DEVICE_RECTANGLE = GlUtil.createFloatBuffer(new float[] { private static final FloatBuffer DEVICE_RECTANGLE = GlUtil.createFloatBuffer(new float[] {
@ -83,14 +80,40 @@ class YuvConverter {
+ "}\n"; + "}\n";
// clang-format on // clang-format on
private int texMatrixLoc; private final int frameBufferId;
private int xUnitLoc; private final int frameTextureId;
private int coeffsLoc; private final GlShader shader;
private final int texMatrixLoc;
private final int xUnitLoc;
private final int coeffsLoc;
private final ThreadUtils.ThreadChecker threadChecker = new ThreadUtils.ThreadChecker();
private int frameBufferWidth;
private int frameBufferHeight;
private boolean released = false;
public YuvConverter(EglBase.Context sharedContext) { /**
eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PIXEL_RGBA_BUFFER); * This class should be constructed on a thread that has an active EGL context.
eglBase.createDummyPbufferSurface(); */
eglBase.makeCurrent(); public YuvConverter() {
threadChecker.checkIsOnValidThread();
frameTextureId = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
this.frameBufferWidth = 0;
this.frameBufferHeight = 0;
// Create framebuffer object and bind it.
final int frameBuffers[] = new int[1];
GLES20.glGenFramebuffers(1, frameBuffers, 0);
frameBufferId = frameBuffers[0];
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBufferId);
GlUtil.checkNoGLES2Error("Generate framebuffer");
// Attach the texture to the framebuffer as color attachment.
GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0,
GLES20.GL_TEXTURE_2D, frameTextureId, 0);
GlUtil.checkNoGLES2Error("Attach texture to framebuffer");
// Restore normal framebuffer.
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
shader = new GlShader(VERTEX_SHADER, FRAGMENT_SHADER); shader = new GlShader(VERTEX_SHADER, FRAGMENT_SHADER);
shader.useProgram(); shader.useProgram();
@ -104,11 +127,11 @@ class YuvConverter {
// If the width is not a multiple of 4 pixels, the texture // If the width is not a multiple of 4 pixels, the texture
// will be scaled up slightly and clipped at the right border. // will be scaled up slightly and clipped at the right border.
shader.setVertexAttribArray("in_tc", 2, TEXTURE_RECTANGLE); shader.setVertexAttribArray("in_tc", 2, TEXTURE_RECTANGLE);
eglBase.detachCurrent();
} }
synchronized public void convert( public void convert(ByteBuffer buf, int width, int height, int stride, int srcTextureId,
ByteBuffer buf, int width, int height, int stride, int textureId, float[] transformMatrix) { float[] transformMatrix) {
threadChecker.checkIsOnValidThread();
if (released) { if (released) {
throw new IllegalStateException("YuvConverter.convert called on released object"); throw new IllegalStateException("YuvConverter.convert called on released object");
} }
@ -163,20 +186,28 @@ class YuvConverter {
transformMatrix = transformMatrix =
RendererCommon.multiplyMatrices(transformMatrix, RendererCommon.verticalFlipMatrix()); RendererCommon.multiplyMatrices(transformMatrix, RendererCommon.verticalFlipMatrix());
// Create new pBuffferSurface with the correct size if needed. // Bind our framebuffer.
if (eglBase.hasSurface()) { GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBufferId);
if (eglBase.surfaceWidth() != stride / 4 || eglBase.surfaceHeight() != total_height) { GlUtil.checkNoGLES2Error("glBindFramebuffer");
eglBase.releaseSurface();
eglBase.createPbufferSurface(stride / 4, total_height);
}
} else {
eglBase.createPbufferSurface(stride / 4, total_height);
}
eglBase.makeCurrent(); if (frameBufferWidth != stride / 4 || frameBufferHeight != total_height) {
frameBufferWidth = stride / 4;
frameBufferHeight = total_height;
// (Re)-Allocate texture.
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, frameTextureId);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, frameBufferWidth,
frameBufferHeight, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);
// Check that the framebuffer is in a good state.
final int status = GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER);
if (status != GLES20.GL_FRAMEBUFFER_COMPLETE) {
throw new IllegalStateException("Framebuffer not complete, status: " + status);
}
}
GLES20.glActiveTexture(GLES20.GL_TEXTURE0); GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId); GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, srcTextureId);
GLES20.glUniformMatrix4fv(texMatrixLoc, 1, false, transformMatrix, 0); GLES20.glUniformMatrix4fv(texMatrixLoc, 1, false, transformMatrix, 0);
// Draw Y // Draw Y
@ -203,20 +234,26 @@ class YuvConverter {
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
GLES20.glReadPixels( GLES20.glReadPixels(
0, 0, stride / 4, total_height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buf); 0, 0, frameBufferWidth, frameBufferHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buf);
GlUtil.checkNoGLES2Error("YuvConverter.convert"); GlUtil.checkNoGLES2Error("YuvConverter.convert");
// Restore normal framebuffer.
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
// Unbind texture. Reportedly needed on some devices to get // Unbind texture. Reportedly needed on some devices to get
// the texture updated from the camera. // the texture updated from the camera.
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0); GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
eglBase.detachCurrent();
} }
synchronized public void release() { public void release() {
threadChecker.checkIsOnValidThread();
released = true; released = true;
eglBase.makeCurrent();
shader.release(); shader.release();
eglBase.release(); GLES20.glDeleteTextures(1, new int[] {frameTextureId}, 0);
GLES20.glDeleteFramebuffers(1, new int[] {frameBufferId}, 0);
frameBufferWidth = 0;
frameBufferHeight = 0;
} }
} }