VideoRendererGui: Move to async rendering and remove no longer needed code

BUG=webrtc:4742, webrtc:4909
R=glaznev@webrtc.org

Review URL: https://codereview.webrtc.org/1321853003 .

Cr-Commit-Position: refs/heads/master@{#9847}
This commit is contained in:
Magnus Jedvert
2015-09-03 12:40:38 +02:00
parent 4df08ff374
commit 7afc12fe91
4 changed files with 88 additions and 156 deletions

View File

@ -86,7 +86,7 @@ public class GlRectDrawerTest extends ActivityTestCase {
final GlRectDrawer drawer = new GlRectDrawer(); final GlRectDrawer drawer = new GlRectDrawer();
final float[] texMatrix = new float[16]; final float[] texMatrix = new float[16];
Matrix.setIdentityM(texMatrix, 0); Matrix.setIdentityM(texMatrix, 0);
drawer.drawYuv(WIDTH, HEIGHT, yuvTextures, texMatrix); drawer.drawYuv(yuvTextures, texMatrix);
// Download the pixels in the pixel buffer as RGBA. // Download the pixels in the pixel buffer as RGBA.
final ByteBuffer data = ByteBuffer.allocateDirect(WIDTH * HEIGHT * 4); final ByteBuffer data = ByteBuffer.allocateDirect(WIDTH * HEIGHT * 4);

View File

@ -120,6 +120,51 @@ public class GlRectDrawer {
private GlShader currentShader; private GlShader currentShader;
private float[] currentTexMatrix; private float[] currentTexMatrix;
private int texMatrixLocation; private int texMatrixLocation;
// Intermediate copy buffer for uploading yuv frames that are not packed, i.e. stride > width.
// TODO(magjed): Investigate when GL_UNPACK_ROW_LENGTH is available, or make a custom shader that
// handles stride and compare performance with intermediate copy.
private ByteBuffer copyBuffer;
/**
* Upload |planes| into |outputYuvTextures|, taking stride into consideration. |outputYuvTextures|
* must have been generated in advance.
*/
public void uploadYuvData(
int[] outputYuvTextures, int width, int height, int[] strides, ByteBuffer[] planes) {
// Make a first pass to see if we need a temporary copy buffer.
int copyCapacityNeeded = 0;
for (int i = 0; i < 3; ++i) {
final int planeWidth = (i == 0) ? width : width / 2;
final int planeHeight = (i == 0) ? height : height / 2;
if (strides[i] > planeWidth) {
copyCapacityNeeded = Math.max(copyCapacityNeeded, planeWidth * planeHeight);
}
}
// Allocate copy buffer if necessary.
if (copyCapacityNeeded > 0
&& (copyBuffer == null || copyBuffer.capacity() < copyCapacityNeeded)) {
copyBuffer = ByteBuffer.allocateDirect(copyCapacityNeeded);
}
// Upload each plane.
for (int i = 0; i < 3; ++i) {
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, outputYuvTextures[i]);
final int planeWidth = (i == 0) ? width : width / 2;
final int planeHeight = (i == 0) ? height : height / 2;
// GLES only accepts packed data, i.e. stride == planeWidth.
final ByteBuffer packedByteBuffer;
if (strides[i] == planeWidth) {
// Input is packed already.
packedByteBuffer = planes[i];
} else {
VideoRenderer.nativeCopyPlane(
planes[i], planeWidth, planeHeight, strides[i], copyBuffer, planeWidth);
packedByteBuffer = copyBuffer;
}
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, planeWidth, planeHeight, 0,
GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, packedByteBuffer);
}
}
/** /**
* Draw an OES texture frame with specified texture transformation matrix. Required resources are * Draw an OES texture frame with specified texture transformation matrix. Required resources are
@ -150,7 +195,7 @@ public class GlRectDrawer {
* Draw a YUV frame with specified texture transformation matrix. Required resources are * Draw a YUV frame with specified texture transformation matrix. Required resources are
* allocated at the first call to this function. * allocated at the first call to this function.
*/ */
public void drawYuv(int width, int height, int[] yuvTextures, float[] texMatrix) { public void drawYuv(int[] yuvTextures, float[] texMatrix) {
prepareShader(YUV_FRAGMENT_SHADER_STRING); prepareShader(YUV_FRAGMENT_SHADER_STRING);
// Bind the textures. // Bind the textures.
for (int i = 0; i < 3; ++i) { for (int i = 0; i < 3; ++i) {
@ -219,5 +264,6 @@ public class GlRectDrawer {
shader.release(); shader.release();
} }
shaders.clear(); shaders.clear();
copyBuffer = null;
} }
} }

View File

@ -29,7 +29,6 @@ package org.webrtc;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.concurrent.CountDownLatch; import java.util.concurrent.CountDownLatch;
import java.util.concurrent.LinkedBlockingQueue;
import javax.microedition.khronos.egl.EGLConfig; import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10; import javax.microedition.khronos.opengles.GL10;
@ -102,14 +101,11 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
private int[] yuvTextures = { 0, 0, 0 }; private int[] yuvTextures = { 0, 0, 0 };
private int oesTexture = 0; private int oesTexture = 0;
// Render frame queue - accessed by two threads. renderFrame() call does // Pending frame to render. Serves as a queue with size 1. |pendingFrame| is accessed by two
// an offer (writing I420Frame to render) and early-returns (recording // threads - frames are received in renderFrame() and consumed in draw(). Frames are dropped in
// a dropped frame) if that queue is full. draw() call does a peek(), // renderFrame() if the previous frame has not been rendered yet.
// copies frame to texture and then removes it from a queue using poll(). private I420Frame pendingFrame;
private final LinkedBlockingQueue<I420Frame> frameToRenderQueue; private final Object pendingFrameLock = new Object();
// Local copy of incoming video frame. Synchronized on |frameToRenderQueue|.
private I420Frame yuvFrameToRender;
private I420Frame textureFrameToRender;
// Type of video frame used for recent frame rendering. // Type of video frame used for recent frame rendering.
private static enum RendererType { RENDERER_YUV, RENDERER_TEXTURE }; private static enum RendererType { RENDERER_YUV, RENDERER_TEXTURE };
private RendererType rendererType; private RendererType rendererType;
@ -129,7 +125,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
private long startTimeNs = -1; private long startTimeNs = -1;
// Time in ns spent in draw() function. // Time in ns spent in draw() function.
private long drawTimeNs; private long drawTimeNs;
// Time in ns spent in renderFrame() function - including copying frame // Time in ns spent in draw() copying resources from |pendingFrame| - including uploading frame
// data to rendering planes. // data to rendering planes.
private long copyTimeNs; private long copyTimeNs;
// The allowed view area in percentage of screen size. // The allowed view area in percentage of screen size.
@ -163,7 +159,6 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
this.id = id; this.id = id;
this.scalingType = scalingType; this.scalingType = scalingType;
this.mirror = mirror; this.mirror = mirror;
frameToRenderQueue = new LinkedBlockingQueue<I420Frame>(1);
layoutInPercentage = new Rect(x, y, Math.min(100, x + width), Math.min(100, y + height)); layoutInPercentage = new Rect(x, y, Math.min(100, x + width), Math.min(100, y + height));
updateTextureProperties = false; updateTextureProperties = false;
rotationDegree = 0; rotationDegree = 0;
@ -171,10 +166,11 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
private synchronized void release() { private synchronized void release() {
surface = null; surface = null;
synchronized (frameToRenderQueue) { synchronized (pendingFrameLock) {
frameToRenderQueue.clear(); if (pendingFrame != null) {
yuvFrameToRender = null; VideoRenderer.renderFrameDone(pendingFrame);
textureFrameToRender = null; pendingFrame = null;
}
} }
} }
@ -231,52 +227,47 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
GLES20.glViewport(displayLayout.left, screenHeight - displayLayout.bottom, GLES20.glViewport(displayLayout.left, screenHeight - displayLayout.bottom,
displayLayout.width(), displayLayout.height()); displayLayout.width(), displayLayout.height());
I420Frame frameFromQueue; final boolean isNewFrame;
synchronized (frameToRenderQueue) { synchronized (pendingFrameLock) {
// Check if texture vertices/coordinates adjustment is required when // Check if texture vertices/coordinates adjustment is required when
// screen orientation changes or video frame size changes. // screen orientation changes or video frame size changes.
checkAdjustTextureCoords(); checkAdjustTextureCoords();
frameFromQueue = frameToRenderQueue.peek(); isNewFrame = (pendingFrame != null);
if (frameFromQueue != null && startTimeNs == -1) { if (isNewFrame && startTimeNs == -1) {
startTimeNs = now; startTimeNs = now;
} }
if (frameFromQueue != null) { if (isNewFrame) {
if (frameFromQueue.yuvFrame) { if (pendingFrame.yuvFrame) {
// YUV textures rendering. Upload YUV data as textures. rendererType = RendererType.RENDERER_YUV;
for (int i = 0; i < 3; ++i) { drawer.uploadYuvData(yuvTextures, pendingFrame.width, pendingFrame.height,
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i); pendingFrame.yuvStrides, pendingFrame.yuvPlanes);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
int w = (i == 0) ? frameFromQueue.width : frameFromQueue.width / 2;
int h = (i == 0) ? frameFromQueue.height : frameFromQueue.height / 2;
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE,
w, h, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE,
frameFromQueue.yuvPlanes[i]);
}
} else { } else {
rendererType = RendererType.RENDERER_TEXTURE;
// External texture rendering. Copy texture id and update texture image to latest. // External texture rendering. Copy texture id and update texture image to latest.
// TODO(magjed): We should not make an unmanaged copy of texture id. Also, this is not // TODO(magjed): We should not make an unmanaged copy of texture id. Also, this is not
// the best place to call updateTexImage. // the best place to call updateTexImage.
oesTexture = frameFromQueue.textureId; oesTexture = pendingFrame.textureId;
if (frameFromQueue.textureObject instanceof SurfaceTexture) { if (pendingFrame.textureObject instanceof SurfaceTexture) {
SurfaceTexture surfaceTexture = SurfaceTexture surfaceTexture =
(SurfaceTexture) frameFromQueue.textureObject; (SurfaceTexture) pendingFrame.textureObject;
surfaceTexture.updateTexImage(); surfaceTexture.updateTexImage();
} }
} }
copyTimeNs += (System.nanoTime() - now);
frameToRenderQueue.poll(); VideoRenderer.renderFrameDone(pendingFrame);
pendingFrame = null;
} }
} }
if (rendererType == RendererType.RENDERER_YUV) { if (rendererType == RendererType.RENDERER_YUV) {
drawer.drawYuv(videoWidth, videoHeight, yuvTextures, texMatrix); drawer.drawYuv(yuvTextures, texMatrix);
} else { } else {
drawer.drawOes(oesTexture, texMatrix); drawer.drawOes(oesTexture, texMatrix);
} }
if (frameFromQueue != null) { if (isNewFrame) {
framesRendered++; framesRendered++;
drawTimeNs += (System.nanoTime() - now); drawTimeNs += (System.nanoTime() - now);
if ((framesRendered % 300) == 0) { if ((framesRendered % 300) == 0) {
@ -342,25 +333,13 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
rendererEvents.onFrameResolutionChanged(videoWidth, videoHeight, rotation); rendererEvents.onFrameResolutionChanged(videoWidth, videoHeight, rotation);
} }
// Frame re-allocation need to be synchronized with copying synchronized (updateTextureLock) {
// frame to textures in draw() function to avoid re-allocating
// the frame while it is being copied.
synchronized (frameToRenderQueue) {
Log.d(TAG, "ID: " + id + ". YuvImageRenderer.setSize: " + Log.d(TAG, "ID: " + id + ". YuvImageRenderer.setSize: " +
videoWidth + " x " + videoHeight + " rotation " + rotation); videoWidth + " x " + videoHeight + " rotation " + rotation);
this.videoWidth = videoWidth; this.videoWidth = videoWidth;
this.videoHeight = videoHeight; this.videoHeight = videoHeight;
rotationDegree = rotation; rotationDegree = rotation;
int[] strides = { videoWidth, videoWidth / 2, videoWidth / 2 };
// Clear rendering queue.
frameToRenderQueue.poll();
// Re-allocate / allocate the frame.
yuvFrameToRender = new I420Frame(videoWidth, videoHeight, rotationDegree,
strides, null, 0);
textureFrameToRender = new I420Frame(videoWidth, videoHeight, rotationDegree,
null, -1, 0);
updateTextureProperties = true; updateTextureProperties = true;
Log.d(TAG, " YuvImageRenderer.setSize done."); Log.d(TAG, " YuvImageRenderer.setSize done.");
} }
@ -377,16 +356,8 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
Log.d(TAG, "ID: " + id + ". Reporting first rendered frame."); Log.d(TAG, "ID: " + id + ". Reporting first rendered frame.");
rendererEvents.onFirstFrameRendered(); rendererEvents.onFirstFrameRendered();
} }
setSize(frame.width, frame.height, frame.rotationDegree);
long now = System.nanoTime();
framesReceived++; framesReceived++;
synchronized (frameToRenderQueue) { synchronized (pendingFrameLock) {
// Skip rendering of this frame if setSize() was not called.
if (yuvFrameToRender == null || textureFrameToRender == null) {
framesDropped++;
VideoRenderer.renderFrameDone(frame);
return;
}
// Check input frame parameters. // Check input frame parameters.
if (frame.yuvFrame) { if (frame.yuvFrame) {
if (frame.yuvStrides[0] < frame.width || if (frame.yuvStrides[0] < frame.width ||
@ -397,35 +368,18 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
VideoRenderer.renderFrameDone(frame); VideoRenderer.renderFrameDone(frame);
return; return;
} }
// Check incoming frame dimensions.
if (frame.width != yuvFrameToRender.width ||
frame.height != yuvFrameToRender.height) {
throw new RuntimeException("Wrong frame size " +
frame.width + " x " + frame.height);
}
} }
if (frameToRenderQueue.size() > 0) { if (pendingFrame != null) {
// Skip rendering of this frame if previous frame was not rendered yet. // Skip rendering of this frame if previous frame was not rendered yet.
framesDropped++; framesDropped++;
VideoRenderer.renderFrameDone(frame); VideoRenderer.renderFrameDone(frame);
return; return;
} }
pendingFrame = frame;
// Create a local copy of the frame.
if (frame.yuvFrame) {
yuvFrameToRender.copyFrom(frame);
rendererType = RendererType.RENDERER_YUV;
frameToRenderQueue.offer(yuvFrameToRender);
} else {
textureFrameToRender.copyFrom(frame);
rendererType = RendererType.RENDERER_TEXTURE;
frameToRenderQueue.offer(textureFrameToRender);
} }
} setSize(frame.width, frame.height, frame.rotationDegree);
copyTimeNs += (System.nanoTime() - now);
seenFrame = true; seenFrame = true;
VideoRenderer.renderFrameDone(frame);
// Request rendering. // Request rendering.
surface.requestRender(); surface.requestRender();

View File

@ -37,7 +37,7 @@ import java.nio.ByteBuffer;
*/ */
public class VideoRenderer { public class VideoRenderer {
/** Java version of cricket::VideoFrame. */ /** Java version of cricket::VideoFrame. Frames are only constructed from native code. */
public static class I420Frame { public static class I420Frame {
public final int width; public final int width;
public final int height; public final int height;
@ -46,7 +46,7 @@ public class VideoRenderer {
public final boolean yuvFrame; public final boolean yuvFrame;
public Object textureObject; public Object textureObject;
public int textureId; public int textureId;
// If |nativeFramePointer| is non-zero, the memory is allocated on the C++ side. // Frame pointer in C++.
private long nativeFramePointer; private long nativeFramePointer;
// rotationDegree is the degree that the frame must be rotated clockwisely // rotationDegree is the degree that the frame must be rotated clockwisely
@ -54,22 +54,14 @@ public class VideoRenderer {
public int rotationDegree; public int rotationDegree;
/** /**
* Construct a frame of the given dimensions with the specified planar * Construct a frame of the given dimensions with the specified planar data.
* data. If |yuvPlanes| is null, new planes of the appropriate sizes are
* allocated.
*/ */
public I420Frame( private I420Frame(
int width, int height, int rotationDegree, int width, int height, int rotationDegree,
int[] yuvStrides, ByteBuffer[] yuvPlanes, long nativeFramePointer) { int[] yuvStrides, ByteBuffer[] yuvPlanes, long nativeFramePointer) {
this.width = width; this.width = width;
this.height = height; this.height = height;
this.yuvStrides = yuvStrides; this.yuvStrides = yuvStrides;
if (yuvPlanes == null) {
yuvPlanes = new ByteBuffer[3];
yuvPlanes[0] = ByteBuffer.allocateDirect(yuvStrides[0] * height);
yuvPlanes[1] = ByteBuffer.allocateDirect(yuvStrides[1] * height / 2);
yuvPlanes[2] = ByteBuffer.allocateDirect(yuvStrides[2] * height / 2);
}
this.yuvPlanes = yuvPlanes; this.yuvPlanes = yuvPlanes;
this.yuvFrame = true; this.yuvFrame = true;
this.rotationDegree = rotationDegree; this.rotationDegree = rotationDegree;
@ -82,7 +74,7 @@ public class VideoRenderer {
/** /**
* Construct a texture frame of the given dimensions with data in SurfaceTexture * Construct a texture frame of the given dimensions with data in SurfaceTexture
*/ */
public I420Frame( private I420Frame(
int width, int height, int rotationDegree, int width, int height, int rotationDegree,
Object textureObject, int textureId, long nativeFramePointer) { Object textureObject, int textureId, long nativeFramePointer) {
this.width = width; this.width = width;
@ -107,66 +99,6 @@ public class VideoRenderer {
return (rotationDegree % 180 == 0) ? height : width; return (rotationDegree % 180 == 0) ? height : width;
} }
/**
* Copy the planes out of |source| into |this| and return |this|. Calling
* this with mismatched frame dimensions or frame type is a programming
* error and will likely crash.
*/
public I420Frame copyFrom(I420Frame source) {
// |nativeFramePointer| is not copied from |source|, because resources in this object are
// still allocated in Java. After copyFrom() is done, this object should not hold any
// references to |source|. This is violated for texture frames however, because |textureId|
// is copied without making a deep copy.
if (this.nativeFramePointer != 0) {
throw new RuntimeException("Trying to overwrite a frame allocated in C++");
}
if (source.yuvFrame && yuvFrame) {
if (width != source.width || height != source.height) {
throw new RuntimeException("Mismatched dimensions! Source: " +
source.toString() + ", destination: " + toString());
}
nativeCopyPlane(source.yuvPlanes[0], width, height,
source.yuvStrides[0], yuvPlanes[0], yuvStrides[0]);
nativeCopyPlane(source.yuvPlanes[1], width / 2, height / 2,
source.yuvStrides[1], yuvPlanes[1], yuvStrides[1]);
nativeCopyPlane(source.yuvPlanes[2], width / 2, height / 2,
source.yuvStrides[2], yuvPlanes[2], yuvStrides[2]);
rotationDegree = source.rotationDegree;
return this;
} else if (!source.yuvFrame && !yuvFrame) {
textureObject = source.textureObject;
textureId = source.textureId;
rotationDegree = source.rotationDegree;
return this;
} else {
throw new RuntimeException("Mismatched frame types! Source: " +
source.toString() + ", destination: " + toString());
}
}
public I420Frame copyFrom(byte[] yuvData, int rotationDegree) {
if (yuvData.length < width * height * 3 / 2) {
throw new RuntimeException("Wrong arrays size: " + yuvData.length);
}
if (!yuvFrame) {
throw new RuntimeException("Can not feed yuv data to texture frame");
}
int planeSize = width * height;
ByteBuffer[] planes = new ByteBuffer[3];
planes[0] = ByteBuffer.wrap(yuvData, 0, planeSize);
planes[1] = ByteBuffer.wrap(yuvData, planeSize, planeSize / 4);
planes[2] = ByteBuffer.wrap(yuvData, planeSize + planeSize / 4,
planeSize / 4);
for (int i = 0; i < 3; i++) {
yuvPlanes[i].position(0);
yuvPlanes[i].put(planes[i]);
yuvPlanes[i].position(0);
yuvPlanes[i].limit(yuvPlanes[i].capacity());
}
this.rotationDegree = rotationDegree;
return this;
}
@Override @Override
public String toString() { public String toString() {
return width + "x" + height + ":" + yuvStrides[0] + ":" + yuvStrides[1] + return width + "x" + height + ":" + yuvStrides[0] + ":" + yuvStrides[1] +
@ -175,7 +107,7 @@ public class VideoRenderer {
} }
// Helper native function to do a video frame plane copying. // Helper native function to do a video frame plane copying.
private static native void nativeCopyPlane(ByteBuffer src, int width, public static native void nativeCopyPlane(ByteBuffer src, int width,
int height, int srcStride, ByteBuffer dst, int dstStride); int height, int srcStride, ByteBuffer dst, int dstStride);
/** The real meat of VideoRendererInterface. */ /** The real meat of VideoRendererInterface. */